DISTCHECK_CONFIGURE_FLAGS=--enable-gtk-doc
-if USE_GCONFTOOL
-GCONF_DIR = gconf
-else
-GCONF_DIR =
-endif
-
ALWAYS_SUBDIRS = \
gst sys ext \
tests \
m4 \
pkgconfig
-SUBDIRS = \
- $(ALWAYS_SUBDIRS) \
- $(GCONF_DIR)
-
-DIST_SUBDIRS = \
- $(ALWAYS_SUBDIRS) \
- gconf
+SUBDIRS = $(ALWAYS_SUBDIRS)
+DIST_SUBDIRS = $(ALWAYS_SUBDIRS)
# include before EXTRA_DIST for win32 assignment
include $(top_srcdir)/common/win32.mak
Plugins: cdio (cdiocddasrc)
URL: http://www.gnu.org/software/libcdio/
-Package: ESound
-Version: >= 0.2.12
-Plugins: esdsink
-URL: http://www.gnome.org/
-
Package: FLAC
Version: == 1.1.2
Plugins: flac (flacenc, flacdec)
URL: http://flac.sourceforge.net/
-Package: GConf
-Version: >= 2.0
-Plugins: gconfelements (gconfvideosink, gconfvideosrc, gconfaudiosink,
- gconfaudiosrc)
-URL: http://www.gnome.org/
-
Package: HAL
Version: >= 0.5.6
Plugins: halelements (halaudiosink, halaudiosrc)
-Subproject commit 17fa4abf49d31cf5dcc2994bdbaa86e45a3fb69f
+Subproject commit 7fda5249ab56f0de09277df330780a3b90a2b306
dnl initialize autoconf
dnl releases only do -Wall, git and prerelease does -Werror too
dnl use a three digit version number for releases, and four for git/pre
-AC_INIT(GStreamer Good Plug-ins, 0.10.30.1,
+AC_INIT(GStreamer Good Plug-ins, 0.11.0.1,
http://bugzilla.gnome.org/enter_bug.cgi?product=GStreamer,
gst-plugins-good)
dnl our libraries and install dirs use major.minor as a version
GST_MAJORMINOR=$PACKAGE_VERSION_MAJOR.$PACKAGE_VERSION_MINOR
dnl we override it here if we need to for the release candidate of new series
-GST_MAJORMINOR=0.10
+GST_MAJORMINOR=0.11
AC_SUBST(GST_MAJORMINOR)
AG_GST_LIBTOOL_PREPARE
AM_PROG_LIBTOOL
dnl *** required versions of GStreamer stuff ***
-GST_REQ=0.10.35.1
-GSTPB_REQ=0.10.35.1
+GST_REQ=0.11.0
+GSTPB_REQ=0.11.0
dnl *** autotools stuff ****
AC_PATH_PROG(VALGRIND_PATH, valgrind, no)
AM_CONDITIONAL(HAVE_VALGRIND, test ! "x$VALGRIND_PATH" = "xno")
-dnl check for gconftool-2
-dnl this macro defines an am conditional, so it needs to be run always
-AM_GCONF_SOURCE_2
-
dnl check for documentation tools
GTK_DOC_CHECK([1.3])
AS_PATH_PYTHON([2.1])
dnl used by ext/wavpack
AX_CREATE_STDINT_H
-dnl used in gst/rtp/gstasteriskh263.c
-AC_CHECK_HEADERS([netinet/in.h])
-AC_CHECK_HEADERS([winsock2.h], HAVE_WINSOCK2_H=yes)
-AM_CONDITIONAL(HAVE_WINSOCK2_H, test "x$HAVE_WINSOCK2_H" = "xyes")
-
dnl used in gst/udp
AC_CHECK_HEADERS([sys/time.h])
dnl *** checks for dependency libraries ***
-dnl GLib is required
-AG_GST_GLIB_CHECK([2.24])
-PKG_CHECK_MODULES(GIO, [ gio-2.0 >= 2.20 ], , AC_MSG_ERROR([gio is required]))
+
+dnl GLib
+GLIB_REQ=2.31.10
+AG_GST_GLIB_CHECK([$GLIB_REQ])
dnl Orc
ORC_CHECK([0.4.11])
dnl uninstalled is selected preferentially -- see pkg-config(1)
AG_GST_CHECK_GST($GST_MAJORMINOR, [$GST_REQ], yes)
AG_GST_CHECK_GST_BASE($GST_MAJORMINOR, [$GST_REQ], yes)
-AG_GST_CHECK_GST_GDP($GST_MAJORMINOR, [$GST_REQ], yes)
AG_GST_CHECK_GST_CONTROLLER($GST_MAJORMINOR, [$GST_REQ], yes)
+AG_GST_CHECK_GST_NET($GST_MAJORMINOR, [$GST_REQ], yes)
AG_GST_CHECK_GST_CHECK($GST_MAJORMINOR, [$GST_REQ], no)
AG_GST_CHECK_GST_PLUGINS_BASE($GST_MAJORMINOR, [$GSTPB_REQ], yes)
AM_CONDITIONAL(HAVE_GTK, test "x$HAVE_GTK" = "xyes")
AM_CONDITIONAL(HAVE_GTK_X11, test "x$HAVE_GTK_X11" = "xyes")
-dnl should we install schemas ?
-translit(dnm, m, l) AM_CONDITIONAL(USE_GCONFTOOL, true)
-AG_GST_CHECK_FEATURE(GCONFTOOL, [GConf schemas], , [
- AC_PATH_PROG(GCONFTOOL, gconftool-2, no)
- if test x$GCONFTOOL = xno; then
- AC_MSG_WARN(Not installing GConf schemas)
- HAVE_GCONFTOOL="no"
- else
- HAVE_GCONFTOOL="yes"
- fi
- AC_SUBST(HAVE_GCONFTOOL)
-])
-
dnl *** set variables based on configure arguments ***
dnl set license and copyright notice
dnl *** plug-ins to include ***
+dnl Non ported plugins (non-dependant, then dependant)
+dnl Make sure you have a space before and after all plugins
+GST_PLUGINS_NONPORTED="deinterlace interleave flx goom2k1 \
+ imagefreeze monoscope smpte \
+ videobox \
+ cairo cairo_gobject dv1394 gdk_pixbuf \
+ oss oss4 shout2 \
+ wavpack \
+ osx_video osx_audio "
+AC_SUBST(GST_PLUGINS_NONPORTED)
+
dnl these are all the gst plug-ins, compilable without additional libs
dnl videofilter is at the top because others depend on it
-AG_GST_CHECK_PLUGIN(videofilter)
AG_GST_CHECK_PLUGIN(alpha)
AG_GST_CHECK_PLUGIN(apetag)
AG_GST_CHECK_PLUGIN(audiofx)
AG_GST_CHECK_PLUGIN(effectv)
AG_GST_CHECK_PLUGIN(equalizer)
AG_GST_CHECK_PLUGIN(flv)
-AG_GST_CHECK_PLUGIN(id3demux)
-AG_GST_CHECK_PLUGIN(icydemux)
-AG_GST_CHECK_PLUGIN(interleave)
AG_GST_CHECK_PLUGIN(flx)
AG_GST_CHECK_PLUGIN(goom)
AG_GST_CHECK_PLUGIN(goom2k1)
+AG_GST_CHECK_PLUGIN(icydemux)
+AG_GST_CHECK_PLUGIN(id3demux)
AG_GST_CHECK_PLUGIN(imagefreeze)
+AG_GST_CHECK_PLUGIN(interleave)
AG_GST_CHECK_PLUGIN(isomp4)
AG_GST_CHECK_PLUGIN(law)
AG_GST_CHECK_PLUGIN(level)
AG_GST_CHECK_PLUGIN(udp)
AG_GST_CHECK_PLUGIN(videobox)
AG_GST_CHECK_PLUGIN(videocrop)
+AG_GST_CHECK_PLUGIN(videofilter)
AG_GST_CHECK_PLUGIN(videomixer)
AG_GST_CHECK_PLUGIN(wavenc)
AG_GST_CHECK_PLUGIN(wavparse)
AG_GST_CHECK_PLUGIN(y4m)
-dnl *** checks for socket and nsl libraries ***
-AC_CHECK_FUNC(socket,,[AC_CHECK_LIB(socket,socket)])
-
-dnl disable gst plugins we might not be able to build on this
-dnl platform: udp and rtsp (ugly but minimally invasive)
-dnl FIXME: maybe move to sys
-AC_CHECK_HEADERS([sys/socket.h], HAVE_SYS_SOCKET_H=yes)
-AC_CHECK_HEADERS([winsock2.h], HAVE_WINSOCK2_H=yes)
-
-if test "x$HAVE_SYS_SOCKET_H" != "xyes" -a "x$HAVE_WINSOCK2_H" != "xyes"; then
- AG_GST_DISABLE_PLUGIN(udp)
- AG_GST_DISABLE_PLUGIN(rtsp)
-fi
-
-if test "x$HAVE_WINSOCK2_H" = "xyes"; then
- WIN32_LIBS="-lws2_32"
- AC_SUBST(WIN32_LIBS)
-fi
-
dnl disable experimental plug-ins
if test "x$BUILD_EXPERIMENTAL" != "xyes"; then
AG_GST_DISABLE_PLUGIN(monoscope)
AG_GST_PKG_CHECK_MODULES(CAIRO_GOBJECT, cairo-gobject >= 1.10.0)
])
-dnl **** ESound ****
-translit(dnm, m, l) AM_CONDITIONAL(USE_ESD, true)
-AG_GST_CHECK_FEATURE(ESD, [ESounD sound daemon], esdsink, [
- AG_GST_PKG_CHECK_MODULES(ESD, esound >= 0.2.12)
- if test $HAVE_ESD = no
- then
- AM_PATH_ESD(0.2.12, HAVE_ESD="yes")
- AS_SCRUB_INCLUDE(ESD_CFLAGS)
- fi
-])
-
dnl *** FLAC ***
translit(dnm, m, l) AM_CONDITIONAL(USE_FLAC, true)
AG_GST_CHECK_FEATURE(FLAC, [FLAC lossless audio], flac, [
AG_GST_PKG_CHECK_MODULES(FLAC, flac >= 1.1.4)
])
-dnl *** GConf ***
-translit(dnm, m, l) AM_CONDITIONAL(USE_GCONF, true)
-AG_GST_CHECK_FEATURE(GCONF, [GConf libraries], gconfelements, [
- AG_GST_PKG_CHECK_MODULES(GCONF, gconf-2.0)
-])
-
dnl *** GDK pixbuf ***
translit(dnm, m, l) AM_CONDITIONAL(USE_GDK_PIXBUF, true)
AG_GST_CHECK_FEATURE(GDK_PIXBUF, [GDK pixbuf], gdkpixbuf, [
AG_GST_PKG_CHECK_MODULES(GDK_PIXBUF, gdk-pixbuf-2.0 >= 2.8.0)
])
-dnl *** HAL ***
-translit(dnm, m, l) AM_CONDITIONAL(USE_HAL, true)
-AG_GST_CHECK_FEATURE(HAL, [HAL libraries], halelements, [
- AG_GST_PKG_CHECK_MODULES(HAL, [hal >= 0.5.6, dbus-1 >= 0.32])
-])
-
dnl *** Jack ***
translit(dnm, m, l) AM_CONDITIONAL(USE_JACK, true)
AG_GST_CHECK_FEATURE(JACK, Jack, jack, [
dnl used in ext/pulse/pulseutil.c
AC_CHECK_HEADERS([process.h])
- AG_GST_PKG_CHECK_MODULES(PULSE, libpulse >= 0.9.16)
- AG_GST_PKG_CHECK_MODULES(PULSE_0_9_20, libpulse >= 0.9.20)
- if test x$HAVE_PULSE_0_9_20 = xyes; then
- AC_DEFINE(HAVE_PULSE_0_9_20, 1, [defined if pulseaudio >= 0.9.20 is available])
- fi
- AG_GST_PKG_CHECK_MODULES(PULSE_1_0, libpulse >= 0.98)
- if test x$HAVE_PULSE_1_0 = xyes; then
- AC_DEFINE(HAVE_PULSE_1_0, 1, [defined if pulseaudio >= 1.0 is available])
- fi
+ AG_GST_PKG_CHECK_MODULES(PULSE, libpulse >= 1.0)
])
dnl *** dv1394 ***
AM_CONDITIONAL(USE_CAIRO_GOBJECT, false)
AM_CONDITIONAL(USE_DIRECTSOUND, false)
AM_CONDITIONAL(USE_DV1394, false)
-AM_CONDITIONAL(USE_ESD, false)
AM_CONDITIONAL(USE_FLAC, false)
-AM_CONDITIONAL(USE_GCONF, false)
-AM_CONDITIONAL(USE_GCONFTOOL, false)
AM_CONDITIONAL(USE_GDK_PIXBUF, false)
AM_CONDITIONAL(USE_GST_V4L2, false)
-AM_CONDITIONAL(USE_HAL, false)
AM_CONDITIONAL(USE_JACK, false)
AM_CONDITIONAL(USE_JPEG, false)
AM_CONDITIONAL(USE_LIBCACA, false)
dnl prefer internal headers to already installed ones
dnl also add builddir include for enumtypes and marshal
dnl add ERROR_CFLAGS, but overridable
+GST_CFLAGS="$GST_CFLAGS -DGST_USE_UNSTABLE_API"
GST_CXXFLAGS="-I\$(top_srcdir)/gst-libs $GST_CFLAGS $GLIB_EXTRA_CFLAGS \$(GST_OPTION_CXXFLAGS)"
GST_CFLAGS="-I\$(top_srcdir)/gst-libs $GST_CFLAGS $GLIB_EXTRA_CFLAGS \$(GST_OPTION_CFLAGS)"
AC_SUBST(GST_CFLAGS)
ext/annodex/Makefile
ext/cairo/Makefile
ext/dv/Makefile
-ext/esd/Makefile
ext/flac/Makefile
-ext/gconf/Makefile
ext/gdk_pixbuf/Makefile
-ext/hal/Makefile
ext/jack/Makefile
ext/jpeg/Makefile
ext/libcaca/Makefile
tests/examples/v4l2/Makefile
tests/files/Makefile
tests/icles/Makefile
-gconf/Makefile
-gconf/gstreamer.schemas
common/Makefile
common/m4/Makefile
m4/Makefile
-e 's/.* HAVE_SYS_STAT_H$/#define HAVE_SYS_STAT_H 1/' \
-e 's/.* HAVE_SYS_TYPES_H$/#define HAVE_SYS_TYPES_H 1/' \
-e 's/.* HAVE_WIN32$/#define HAVE_WIN32 1/' \
- -e 's/.* HAVE_WINSOCK2_H$/#define HAVE_WINSOCK2_H 1/' \
-e 's/.* HOST_CPU$/#define HOST_CPU "i686"/' \
-e 's/.* LIBDIR$/#ifdef _DEBUG\n# define LIBDIR PREFIX "\\\\debug\\\\lib"\n#else\n# define LIBDIR PREFIX "\\\\lib"\n#endif/' \
-e 's/.* LOCALEDIR$/#define LOCALEDIR PREFIX "\\\\share\\\\locale"/' \
-e "s/.* PACKAGE_STRING$/#define PACKAGE_STRING \"$PACKAGE_STRING\"/" \
-e 's/.* PACKAGE_TARNAME$/#define PACKAGE_TARNAME "'$PACKAGE_TARNAME'"/' \
-e 's/.* PACKAGE_VERSION$/#define PACKAGE_VERSION "'$PACKAGE_VERSION'"/' \
- -e 's/.* PLUGINDIR$/#ifdef _DEBUG\n# define PLUGINDIR PREFIX "\\\\debug\\\\lib\\\\gstreamer-0.10"\n#else\n# define PLUGINDIR PREFIX "\\\\lib\\\\gstreamer-0.10"\n#endif/' \
+ -e 's/.* PLUGINDIR$/#ifdef _DEBUG\n# define PLUGINDIR PREFIX "\\\\debug\\\\lib\\\\gstreamer-0.11"\n#else\n# define PLUGINDIR PREFIX "\\\\lib\\\\gstreamer-0.11"\n#endif/' \
-e 's/.* USE_BINARY_REGISTRY$/#define USE_BINARY_REGISTRY/' \
-e 's/.* VERSION$/#define VERSION "'$VERSION'"/' \
-e "s/.* DEFAULT_AUDIOSINK$/#define DEFAULT_AUDIOSINK \"directsoundsink\"/" \
$(top_srcdir)/ext/cairo/gstcairooverlay.h \
$(top_srcdir)/ext/dv/gstdvdec.h \
$(top_srcdir)/ext/dv/gstdvdemux.h \
- $(top_srcdir)/ext/esd/esdsink.h \
$(top_srcdir)/ext/flac/gstflacdec.h \
$(top_srcdir)/ext/flac/gstflacenc.h \
$(top_srcdir)/ext/flac/gstflactag.h \
- $(top_srcdir)/ext/gconf/gstgconfaudiosrc.h \
- $(top_srcdir)/ext/gconf/gstgconfaudiosink.h \
- $(top_srcdir)/ext/gconf/gstgconfvideosrc.h \
- $(top_srcdir)/ext/gconf/gstgconfvideosink.h \
$(top_srcdir)/ext/gdk_pixbuf/gstgdkpixbufsink.h \
- $(top_srcdir)/ext/hal/gsthalaudiosink.h \
- $(top_srcdir)/ext/hal/gsthalaudiosrc.h \
$(top_srcdir)/ext/jack/gstjackaudiosrc.h \
$(top_srcdir)/ext/jack/gstjackaudiosink.h \
$(top_srcdir)/ext/jpeg/gstjpegdec.h \
$(top_srcdir)/gst/videofilter/gstgamma.h \
$(top_srcdir)/gst/videofilter/gstvideobalance.h \
$(top_srcdir)/gst/videofilter/gstvideoflip.h \
- $(top_srcdir)/gst/videomixer/videomixer.h \
- $(top_srcdir)/gst/videomixer/videomixerpad.h \
$(top_srcdir)/gst/videomixer/videomixer2.h \
$(top_srcdir)/gst/videomixer/videomixer2pad.h \
$(top_srcdir)/gst/wavenc/gstwavenc.h \
<xi:include href="xml/element-equalizer-10bands.xml" />
<xi:include href="xml/element-equalizer-3bands.xml" />
<xi:include href="xml/element-equalizer-nbands.xml" />
- <xi:include href="xml/element-esdsink.xml" />
<xi:include href="xml/element-flacdec.xml" />
<xi:include href="xml/element-flacenc.xml" />
<xi:include href="xml/element-flacparse.xml" />
<xi:include href="xml/element-flvmux.xml" />
<xi:include href="xml/element-flxdec.xml" />
<xi:include href="xml/element-gamma.xml" />
- <xi:include href="xml/element-gconfaudiosrc.xml" />
- <xi:include href="xml/element-gconfaudiosink.xml" />
- <xi:include href="xml/element-gconfvideosrc.xml" />
- <xi:include href="xml/element-gconfvideosink.xml" />
<xi:include href="xml/element-gdkpixbufsink.xml" />
<xi:include href="xml/element-goom.xml" />
<xi:include href="xml/element-goom2k1.xml" />
- <xi:include href="xml/element-gstrtpbin.xml" />
- <xi:include href="xml/element-gstrtpjitterbuffer.xml" />
- <xi:include href="xml/element-gstrtpptdemux.xml" />
- <xi:include href="xml/element-gstrtpsession.xml" />
- <xi:include href="xml/element-gstrtpssrcdemux.xml" />
<xi:include href="xml/element-halaudiosink.xml" />
<xi:include href="xml/element-halaudiosrc.xml" />
<xi:include href="xml/element-hdv1394src.xml" />
<xi:include href="xml/element-rtpj2kpay.xml" />
<xi:include href="xml/element-rtpjpegpay.xml" />
<xi:include href="xml/element-rtspsrc.xml" />
+ <xi:include href="xml/element-rtpbin.xml" />
+ <xi:include href="xml/element-rtpjitterbuffer.xml" />
+ <xi:include href="xml/element-rtpptdemux.xml" />
+ <xi:include href="xml/element-rtpsession.xml" />
+ <xi:include href="xml/element-rtpssrcdemux.xml" />
<xi:include href="xml/element-shagadelictv.xml" />
<xi:include href="xml/element-shapewipe.xml" />
<xi:include href="xml/element-smokedec.xml" />
<xi:include href="xml/plugin-efence.xml" />
<xi:include href="xml/plugin-equalizer.xml" />
<xi:include href="xml/plugin-effectv.xml" />
- <xi:include href="xml/plugin-esdsink.xml" />
<xi:include href="xml/plugin-flac.xml" />
<xi:include href="xml/plugin-flv.xml" />
<xi:include href="xml/plugin-flxdec.xml" />
- <xi:include href="xml/plugin-gconfelements.xml" />
<xi:include href="xml/plugin-gdkpixbuf.xml" />
<xi:include href="xml/plugin-goom.xml" />
<xi:include href="xml/plugin-goom2k1.xml" />
<xi:include href="xml/plugin-pulseaudio.xml" />
<xi:include href="xml/plugin-replaygain.xml" />
<xi:include href="xml/plugin-rtp.xml" />
- <xi:include href="xml/plugin-gstrtpmanager.xml" />
+ <xi:include href="xml/plugin-rtpmanager.xml" />
<xi:include href="xml/plugin-rtsp.xml" />
<xi:include href="xml/plugin-shapewipe.xml" />
<xi:include href="xml/plugin-shout2send.xml" />
</SECTION>
<SECTION>
-<FILE>element-esdsink</FILE>
-<TITLE>esdsink</TITLE>
-GstEsdSink
-<SUBSECTION Standard>
-GstEsdSinkClass
-GST_TYPE_ESDSINK
-GST_ESDSINK
-GST_ESDSINK_CLASS
-GST_IS_ESDSINK
-GST_IS_ESDSINK_CLASS
-gst_esdsink_get_type
-</SECTION>
-
-<SECTION>
<FILE>element-flacdec</FILE>
<TITLE>flacdec</TITLE>
GstFlacDec
</SECTION>
<SECTION>
-<FILE>element-gconfaudiosrc</FILE>
-<TITLE>gconfaudiosrc</TITLE>
-GstGConfAudioSrc
-<SUBSECTION Standard>
-GstGConfAudioSrcClass
-GST_GCONF_AUDIO_SRC
-GST_IS_GCONF_AUDIO_SRC
-GST_TYPE_GCONF_AUDIO_SRC
-GST_GCONF_AUDIO_SRC_CLASS
-GST_IS_GCONF_AUDIO_SRC_CLASS
-gst_gconf_audio_src_get_type
-</SECTION>
-
-<SECTION>
-<FILE>element-gconfaudiosink</FILE>
-<TITLE>gconfaudiosink</TITLE>
-GstGConfAudioSink
-<SUBSECTION Standard>
-GstGConfAudioSinkClass
-GST_GCONF_AUDIO_SINK
-GST_IS_GCONF_AUDIO_SINK
-GST_TYPE_GCONF_AUDIO_SINK
-GST_GCONF_AUDIO_SINK_CLASS
-GST_IS_GCONF_AUDIO_SINK_CLASS
-gst_gconf_audio_sink_get_type
-</SECTION>
-
-<SECTION>
-<FILE>element-gconfvideosrc</FILE>
-<TITLE>gconfvideosrc</TITLE>
-GstGConfVideoSrc
-<SUBSECTION Standard>
-GstGConfVideoSrcClass
-GST_GCONF_VIDEO_SRC
-GST_IS_GCONF_VIDEO_SRC
-GST_TYPE_GCONF_VIDEO_SRC
-GST_GCONF_VIDEO_SRC_CLASS
-GST_IS_GCONF_VIDEO_SRC_CLASS
-gst_gconf_video_src_get_type
-</SECTION>
-
-<SECTION>
-<FILE>element-gconfvideosink</FILE>
-<TITLE>gconfvideosink</TITLE>
-GstGConfVideoSink
-<SUBSECTION Standard>
-GstGConfVideoSinkClass
-GST_GCONF_VIDEO_SINK
-GST_IS_GCONF_VIDEO_SINK
-GST_TYPE_GCONF_VIDEO_SINK
-GST_GCONF_VIDEO_SINK_CLASS
-GST_IS_GCONF_VIDEO_SINK_CLASS
-gst_gconf_video_sink_get_type
-</SECTION>
-
-<SECTION>
<FILE>element-gdkpixbufsink</FILE>
<TITLE>gdkpixbufsink</TITLE>
GstGdkPixbufSink
</SECTION>
<SECTION>
-<FILE>element-gstrtpbin</FILE>
-<TITLE>gstrtpbin</TITLE>
+<FILE>element-rtpbin</FILE>
+<TITLE>rtpbin</TITLE>
GstRtpBin
<SUBSECTION Standard>
GstRtpBinPrivate
</SECTION>
<SECTION>
-<FILE>element-gstrtpjitterbuffer</FILE>
-<TITLE>gstrtpjitterbuffer</TITLE>
+<FILE>element-rtpjitterbuffer</FILE>
+<TITLE>rtpjitterbuffer</TITLE>
GstRtpJitterBuffer
<SUBSECTION Standard>
GstRtpJitterBufferClass
</SECTION>
<SECTION>
-<FILE>element-gstrtpptdemux</FILE>
-<TITLE>gstrtpptdemux</TITLE>
+<FILE>element-rtpptdemux</FILE>
+<TITLE>rtpptdemux</TITLE>
GstRtpPtDemux
<SUBSECTION Standard>
GstRtpPtDemuxClass
</SECTION>
<SECTION>
-<FILE>element-gstrtpsession</FILE>
-<TITLE>gstrtpsession</TITLE>
+<FILE>element-rtpsession</FILE>
+<TITLE>rtpsession</TITLE>
GstRtpSession
<SUBSECTION Standard>
GstRtpSessionClass
</SECTION>
<SECTION>
-<FILE>element-gstrtpssrcdemux</FILE>
-<TITLE>gstrtpssrcdemux</TITLE>
+<FILE>element-rtpssrcdemux</FILE>
+<TITLE>rtpssrcdemux</TITLE>
GstRtpSsrcDemux
<SUBSECTION Standard>
GstRtpSsrcDemuxClass
+++ /dev/null
-<plugin>
- <name>esdsink</name>
- <description>ESD Element Plugins</description>
- <filename>../../ext/esd/.libs/libgstesd.so</filename>
- <basename>libgstesd.so</basename>
- <version>0.10.30.1</version>
- <license>LGPL</license>
- <source>gst-plugins-good</source>
- <package>GStreamer Good Plug-ins git</package>
- <origin>Unknown package origin</origin>
- <elements>
- <element>
- <name>esdsink</name>
- <longname>Esound audio sink</longname>
- <class>Sink/Audio</class>
- <description>Plays audio to an esound server</description>
- <author>Arwed von Merkatz <v.merkatz@gmx.net></author>
- <pads>
- <caps>
- <name>sink</name>
- <direction>sink</direction>
- <presence>always</presence>
- <details>audio/x-raw-int, endianness=(int)1234, signed=(boolean)true, width=(int)16, depth=(int)16, rate=(int)[ 1, 2147483647 ], channels=(int)[ 1, 2 ]; audio/x-raw-int, signed=(boolean){ true, false }, width=(int)8, depth=(int)8, rate=(int)[ 1, 2147483647 ], channels=(int)[ 1, 2 ]</details>
- </caps>
- </pads>
- </element>
- </elements>
-</plugin>
\ No newline at end of file
+++ /dev/null
-<plugin>
- <name>gconfelements</name>
- <description>elements wrapping the GStreamer/GConf audio/video output settings</description>
- <filename>../../ext/gconf/.libs/libgstgconfelements.so</filename>
- <basename>libgstgconfelements.so</basename>
- <version>0.10.30.1</version>
- <license>LGPL</license>
- <source>gst-plugins-good</source>
- <package>GStreamer Good Plug-ins git</package>
- <origin>Unknown package origin</origin>
- <elements>
- <element>
- <name>gconfaudiosink</name>
- <longname>GConf audio sink</longname>
- <class>Sink/Audio</class>
- <description>Audio sink embedding the GConf-settings for audio output</description>
- <author>Jan Schmidt <thaytan@mad.scientist.com></author>
- <pads>
- </pads>
- </element>
- <element>
- <name>gconfaudiosrc</name>
- <longname>GConf audio source</longname>
- <class>Source/Audio</class>
- <description>Audio source embedding the GConf-settings for audio input</description>
- <author>GStreamer maintainers <gstreamer-devel@lists.sourceforge.net></author>
- <pads>
- </pads>
- </element>
- <element>
- <name>gconfvideosink</name>
- <longname>GConf video sink</longname>
- <class>Sink/Video</class>
- <description>Video sink embedding the GConf-settings for video output</description>
- <author>GStreamer maintainers <gstreamer-devel@lists.sourceforge.net></author>
- <pads>
- </pads>
- </element>
- <element>
- <name>gconfvideosrc</name>
- <longname>GConf video source</longname>
- <class>Source/Video</class>
- <description>Video source embedding the GConf-settings for video input</description>
- <author>GStreamer maintainers <gstreamer-devel@lists.sourceforge.net></author>
- <pads>
- </pads>
- </element>
- </elements>
-</plugin>
\ No newline at end of file
<origin>Unknown package origin</origin>
<elements>
<element>
- <name>pulseaudiosink</name>
- <longname>Bin wrapping pulsesink</longname>
- <class>Sink/Audio/Bin</class>
- <description>Correctly handles sink changes when streaming compressed formats to pulsesink</description>
- <author>Arun Raghavan <arun.raghavan@collabora.co.uk></author>
- <pads>
- <caps>
- <name>sink</name>
- <direction>sink</direction>
- <presence>always</presence>
- <details>audio/x-raw-int, endianness=(int){ 1234, 4321 }, signed=(boolean)true, width=(int)16, depth=(int)16, rate=(int)[ 1, 2147483647 ], channels=(int)[ 1, 32 ]; audio/x-raw-float, endianness=(int){ 1234, 4321 }, width=(int)32, rate=(int)[ 1, 2147483647 ], channels=(int)[ 1, 32 ]; audio/x-raw-int, endianness=(int){ 1234, 4321 }, signed=(boolean)true, width=(int)32, depth=(int)32, rate=(int)[ 1, 2147483647 ], channels=(int)[ 1, 32 ]; audio/x-raw-int, signed=(boolean)false, width=(int)8, depth=(int)8, rate=(int)[ 1, 2147483647 ], channels=(int)[ 1, 32 ]; audio/x-alaw, rate=(int)[ 1, 2147483647 ], channels=(int)[ 1, 32 ]; audio/x-mulaw, rate=(int)[ 1, 2147483647 ], channels=(int)[ 1, 32 ]; audio/x-raw-int, endianness=(int){ 1234, 4321 }, signed=(boolean)true, width=(int)24, depth=(int)24, rate=(int)[ 1, 2147483647 ], channels=(int)[ 1, 32 ]; audio/x-raw-int, endianness=(int){ 1234, 4321 }, signed=(boolean)true, width=(int)32, depth=(int)24, rate=(int)[ 1, 2147483647 ], channels=(int)[ 1, 32 ]; audio/x-ac3, framed=(boolean)true; audio/x-eac3, framed=(boolean)true; audio/x-dts, framed=(boolean)true, block-size=(int){ 512, 1024, 2048 }; audio/mpeg, mpegversion=(int)1, mpegaudioversion=(int)[ 1, 2 ], parsed=(boolean)true</details>
- </caps>
- </pads>
- </element>
- <element>
<name>pulsemixer</name>
<longname>PulseAudio Mixer</longname>
<class>Generic/Audio</class>
</pads>
</element>
</elements>
-</plugin>
\ No newline at end of file
+</plugin>
<plugin>
- <name>gstrtpmanager</name>
+ <name>rtpmanager</name>
<description>RTP session management plugin library</description>
<filename>../../gst/rtpmanager/.libs/libgstrtpmanager.so</filename>
<basename>libgstrtpmanager.so</basename>
<origin>Unknown package origin</origin>
<elements>
<element>
- <name>gstrtpbin</name>
+ <name>rtpbin</name>
<longname>RTP Bin</longname>
<class>Filter/Network/RTP</class>
<description>Real-Time Transport Protocol bin</description>
</pads>
</element>
<element>
- <name>gstrtpjitterbuffer</name>
+ <name>rtpjitterbuffer</name>
<longname>RTP packet jitter-buffer</longname>
<class>Filter/Network/RTP</class>
<description>A buffer that deals with network jitter and other transmission faults</description>
</pads>
</element>
<element>
- <name>gstrtpptdemux</name>
+ <name>rtpptdemux</name>
<longname>RTP Demux</longname>
<class>Demux/Network/RTP</class>
<description>Parses codec streams transmitted in the same RTP session</description>
</pads>
</element>
<element>
- <name>gstrtpsession</name>
+ <name>rtpsession</name>
<longname>RTP Session</longname>
<class>Filter/Network/RTP</class>
<description>Implement an RTP session</description>
</pads>
</element>
<element>
- <name>gstrtpssrcdemux</name>
+ <name>rtpssrcdemux</name>
<longname>RTP SSRC Demux</longname>
<class>Demux/Network/RTP</class>
<description>Splits RTP streams based on the SSRC</description>
</pads>
</element>
</elements>
-</plugin>
\ No newline at end of file
+</plugin>
CAIRO_DIR =
endif
-if USE_ESD
-ESD_DIR = esd
-else
-ESD_DIR =
-endif
-
if USE_FLAC
FLAC_DIR = flac
else
FLAC_DIR =
endif
-if USE_GCONF
-GCONF_DIR = gconf
-else
-GCONF_DIR =
-endif
-
if USE_GDK_PIXBUF
GDK_PIXBUF_DIR = gdk_pixbuf
else
GDK_PIXBUF_DIR =
endif
-if USE_HAL
-HAL_DIR = hal
-else
-HAL_DIR =
-endif
-
if USE_JACK
JACK_DIR=jack
else
$(ANNODEX_DIR) \
$(CAIRO_DIR) \
$(DV1394_DIR) \
- $(ESD_DIR) \
$(FLAC_DIR) \
- $(GCONF_DIR) \
$(GDK_PIXBUF_DIR) \
- $(HAL_DIR) \
$(JACK_DIR) \
$(JPEG_DIR) \
$(LIBCACA_DIR) \
annodex \
cairo \
dv \
- esd \
flac \
- gconf \
gdk_pixbuf \
- hal \
jack \
jpeg \
libcaca \
plugin_LTLIBRARIES = libgstaasink.la
libgstaasink_la_SOURCES = gstaasink.c
-libgstaasink_la_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_BASE_CFLAGS) $(AALIB_CFLAGS)
-libgstaasink_la_LIBADD = $(GST_PLUGINS_BASE_LIBS) $(GST_BASE_LIBS) $(AALIB_LIBS)
+libgstaasink_la_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_BASE_CFLAGS) $(GST_CFLAGS) $(AALIB_CFLAGS)
+libgstaasink_la_LIBADD = $(GST_PLUGINS_BASE_LIBS) -lgstvideo-$(GST_MAJORMINOR) $(GST_BASE_LIBS) $(GST_LIBS) $(AALIB_LIBS)
libgstaasink_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
libgstaasink_la_LIBTOOLFLAGS = --tag=disable-static
#include <sys/time.h>
#include "gstaasink.h"
-#include <gst/video/video.h>
/* aasink signals and args */
enum
{
- SIGNAL_FRAME_DISPLAYED,
- SIGNAL_HAVE_SIZE,
LAST_SIGNAL
};
enum
{
- ARG_0,
- ARG_WIDTH,
- ARG_HEIGHT,
- ARG_DRIVER,
- ARG_DITHER,
- ARG_BRIGHTNESS,
- ARG_CONTRAST,
- ARG_GAMMA,
- ARG_INVERSION,
- ARG_RANDOMVAL,
- ARG_FRAMES_DISPLAYED,
- ARG_FRAME_TIME
+ PROP_0,
+ PROP_WIDTH,
+ PROP_HEIGHT,
+ PROP_DRIVER,
+ PROP_DITHER,
+ PROP_BRIGHTNESS,
+ PROP_CONTRAST,
+ PROP_GAMMA,
+ PROP_INVERSION,
+ PROP_RANDOMVAL,
+ PROP_FRAMES_DISPLAYED,
+ PROP_FRAME_TIME
};
static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("I420"))
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("I420"))
);
-static void gst_aasink_base_init (gpointer g_class);
-static void gst_aasink_class_init (GstAASinkClass * klass);
-static void gst_aasink_init (GstAASink * aasink);
-
-static gboolean gst_aasink_setcaps (GstBaseSink * pad, GstCaps * caps);
-static void gst_aasink_get_times (GstBaseSink * sink, GstBuffer * buffer,
+static void gst_aasink_fixate (GstBaseSink * bsink, GstCaps * caps);
+static gboolean gst_aasink_setcaps (GstBaseSink * bsink, GstCaps * caps);
+static void gst_aasink_get_times (GstBaseSink * bsink, GstBuffer * buffer,
GstClockTime * start, GstClockTime * end);
static GstFlowReturn gst_aasink_render (GstBaseSink * basesink,
GstBuffer * buffer);
static GstStateChangeReturn gst_aasink_change_state (GstElement * element,
GstStateChange transition);
-static GstElementClass *parent_class = NULL;
-static guint gst_aasink_signals[LAST_SIGNAL] = { 0 };
-
-GType
-gst_aasink_get_type (void)
-{
- static GType aasink_type = 0;
-
- if (!aasink_type) {
- static const GTypeInfo aasink_info = {
- sizeof (GstAASinkClass),
- gst_aasink_base_init,
- NULL,
- (GClassInitFunc) gst_aasink_class_init,
- NULL,
- NULL,
- sizeof (GstAASink),
- 0,
- (GInstanceInitFunc) gst_aasink_init,
- };
-
- aasink_type =
- g_type_register_static (GST_TYPE_BASE_SINK, "GstAASink", &aasink_info,
- 0);
- }
- return aasink_type;
-}
+#define gst_aasink_parent_class parent_class
+G_DEFINE_TYPE (GstAASink, gst_aasink, GST_TYPE_BASE_SINK);
#define GST_TYPE_AADRIVERS (gst_aasink_drivers_get_type())
static GType
}
static void
-gst_aasink_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_static_pad_template (element_class,
- &sink_template);
- gst_element_class_set_details_simple (element_class, "ASCII art video sink",
- "Sink/Video",
- "An ASCII art videosink", "Wim Taymans <wim.taymans@chello.be>");
-}
-
-static void
gst_aasink_class_init (GstAASinkClass * klass)
{
GObjectClass *gobject_class;
gstelement_class = (GstElementClass *) klass;
gstbasesink_class = (GstBaseSinkClass *) klass;
- parent_class = g_type_class_peek_parent (klass);
-
gobject_class->set_property = gst_aasink_set_property;
gobject_class->get_property = gst_aasink_get_property;
/* FIXME: add long property descriptions */
- g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_WIDTH,
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_WIDTH,
g_param_spec_int ("width", "width", "width", G_MININT, G_MAXINT, 0,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
- g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_HEIGHT,
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_HEIGHT,
g_param_spec_int ("height", "height", "height", G_MININT, G_MAXINT, 0,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
- g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_DRIVER,
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_DRIVER,
g_param_spec_enum ("driver", "driver", "driver", GST_TYPE_AADRIVERS, 0,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
- g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_DITHER,
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_DITHER,
g_param_spec_enum ("dither", "dither", "dither", GST_TYPE_AADITHER, 0,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
- g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_BRIGHTNESS,
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_BRIGHTNESS,
g_param_spec_int ("brightness", "brightness", "brightness", G_MININT,
G_MAXINT, 0, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
- g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_CONTRAST,
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_CONTRAST,
g_param_spec_int ("contrast", "contrast", "contrast", G_MININT, G_MAXINT,
0, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
- g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_GAMMA,
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_GAMMA,
g_param_spec_float ("gamma", "gamma", "gamma", 0.0, 5.0, 1.0,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
- g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_INVERSION,
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_INVERSION,
g_param_spec_boolean ("inversion", "inversion", "inversion", TRUE,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
- g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_RANDOMVAL,
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_RANDOMVAL,
g_param_spec_int ("randomval", "randomval", "randomval", G_MININT,
G_MAXINT, 0, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
- g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_FRAMES_DISPLAYED,
- g_param_spec_int ("frames-displayed", "frames displayed",
- "frames displayed", G_MININT, G_MAXINT, 0,
+ g_object_class_install_property (G_OBJECT_CLASS (klass),
+ PROP_FRAMES_DISPLAYED, g_param_spec_int ("frames-displayed",
+ "frames displayed", "frames displayed", G_MININT, G_MAXINT, 0,
G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
- g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_FRAME_TIME,
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_FRAME_TIME,
g_param_spec_int ("frame-time", "frame time", "frame time", G_MININT,
G_MAXINT, 0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
- gst_aasink_signals[SIGNAL_FRAME_DISPLAYED] =
- g_signal_new ("frame-displayed", G_TYPE_FROM_CLASS (klass),
- G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstAASinkClass, frame_displayed),
- NULL, NULL, g_cclosure_marshal_VOID__VOID, G_TYPE_NONE, 0);
- gst_aasink_signals[SIGNAL_HAVE_SIZE] =
- g_signal_new ("have-size", G_TYPE_FROM_CLASS (klass), G_SIGNAL_RUN_LAST,
- G_STRUCT_OFFSET (GstAASinkClass, have_size), NULL, NULL,
- gst_marshal_VOID__INT_INT, G_TYPE_NONE, 2, G_TYPE_UINT, G_TYPE_UINT);
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "ASCII art video sink", "Sink/Video", "An ASCII art videosink",
+ "Wim Taymans <wim.taymans@chello.be>");
gstelement_class->change_state = GST_DEBUG_FUNCPTR (gst_aasink_change_state);
+ gstbasesink_class->fixate = GST_DEBUG_FUNCPTR (gst_aasink_fixate);
gstbasesink_class->set_caps = GST_DEBUG_FUNCPTR (gst_aasink_setcaps);
gstbasesink_class->get_times = GST_DEBUG_FUNCPTR (gst_aasink_get_times);
gstbasesink_class->preroll = GST_DEBUG_FUNCPTR (gst_aasink_render);
}
static void
-gst_aasink_fixate (GstPad * pad, GstCaps * caps)
+gst_aasink_fixate (GstBaseSink * bsink, GstCaps * caps)
{
GstStructure *structure;
gst_structure_fixate_field_nearest_int (structure, "width", 320);
gst_structure_fixate_field_nearest_int (structure, "height", 240);
gst_structure_fixate_field_nearest_fraction (structure, "framerate", 30, 1);
+
+ GST_BASE_SINK_CLASS (parent_class)->fixate (bsink, caps);
}
static gboolean
gst_aasink_setcaps (GstBaseSink * basesink, GstCaps * caps)
{
GstAASink *aasink;
- GstStructure *structure;
+ GstVideoInfo info;
aasink = GST_AASINK (basesink);
- structure = gst_caps_get_structure (caps, 0);
- gst_structure_get_int (structure, "width", &aasink->width);
- gst_structure_get_int (structure, "height", &aasink->height);
+ if (!gst_video_info_from_caps (&info, caps))
+ goto invalid_caps;
- /* FIXME aasink->format is never set */
- g_print ("%d %d\n", aasink->width, aasink->height);
-
- GST_DEBUG ("aasink: setting %08lx (%" GST_FOURCC_FORMAT ")",
- aasink->format, GST_FOURCC_ARGS (aasink->format));
-
- g_signal_emit (G_OBJECT (aasink), gst_aasink_signals[SIGNAL_HAVE_SIZE], 0,
- aasink->width, aasink->height);
+ aasink->info = info;
return TRUE;
+
+ /* ERRORS */
+invalid_caps:
+ {
+ GST_DEBUG_OBJECT (aasink, "invalid caps");
+ return FALSE;
+ }
}
static void
gst_aasink_init (GstAASink * aasink)
{
- GstPad *pad;
-
- pad = GST_BASE_SINK_PAD (aasink);
- gst_pad_set_fixatecaps_function (pad, gst_aasink_fixate);
-
memcpy (&aasink->ascii_surf, &aa_defparams,
sizeof (struct aa_hardware_params));
aasink->ascii_parms.bright = 0;
aasink->ascii_parms.inversion = 0;
aasink->ascii_parms.randomval = 0;
aasink->aa_driver = 0;
-
- aasink->width = -1;
- aasink->height = -1;
-
}
static void
GstClockTime * start, GstClockTime * end)
{
*start = GST_BUFFER_TIMESTAMP (buffer);
- *end = *start + GST_BUFFER_DURATION (buffer);
+ if (GST_BUFFER_DURATION_IS_VALID (buffer))
+ *end = *start + GST_BUFFER_DURATION (buffer);
}
static GstFlowReturn
gst_aasink_render (GstBaseSink * basesink, GstBuffer * buffer)
{
GstAASink *aasink;
+ GstVideoFrame frame;
aasink = GST_AASINK (basesink);
GST_DEBUG ("render");
- gst_aasink_scale (aasink, GST_BUFFER_DATA (buffer), /* src */
+ if (!gst_video_frame_map (&frame, &aasink->info, buffer, GST_MAP_READ))
+ goto invalid_frame;
+
+ gst_aasink_scale (aasink, GST_VIDEO_FRAME_PLANE_DATA (&frame, 0), /* src */
aa_image (aasink->context), /* dest */
- aasink->width, /* sw */
- aasink->height, /* sh */
+ GST_VIDEO_INFO_WIDTH (&aasink->info), /* sw */
+ GST_VIDEO_INFO_HEIGHT (&aasink->info), /* sh */
aa_imgwidth (aasink->context), /* dw */
aa_imgheight (aasink->context)); /* dh */
0, 0, aa_imgwidth (aasink->context), aa_imgheight (aasink->context));
aa_flush (aasink->context);
aa_getevent (aasink->context, FALSE);
+ gst_video_frame_unmap (&frame);
return GST_FLOW_OK;
+
+ /* ERRORS */
+invalid_frame:
+ {
+ GST_DEBUG_OBJECT (aasink, "invalid frame");
+ return GST_FLOW_ERROR;
+ }
}
aasink = GST_AASINK (object);
switch (prop_id) {
- case ARG_WIDTH:
+ case PROP_WIDTH:
aasink->ascii_surf.width = g_value_get_int (value);
break;
- case ARG_HEIGHT:
+ case PROP_HEIGHT:
aasink->ascii_surf.height = g_value_get_int (value);
break;
- case ARG_DRIVER:{
+ case PROP_DRIVER:{
aasink->aa_driver = g_value_get_enum (value);
break;
}
- case ARG_DITHER:{
+ case PROP_DITHER:{
aasink->ascii_parms.dither = g_value_get_enum (value);
break;
}
- case ARG_BRIGHTNESS:{
+ case PROP_BRIGHTNESS:{
aasink->ascii_parms.bright = g_value_get_int (value);
break;
}
- case ARG_CONTRAST:{
+ case PROP_CONTRAST:{
aasink->ascii_parms.contrast = g_value_get_int (value);
break;
}
- case ARG_GAMMA:{
+ case PROP_GAMMA:{
aasink->ascii_parms.gamma = g_value_get_float (value);
break;
}
- case ARG_INVERSION:{
+ case PROP_INVERSION:{
aasink->ascii_parms.inversion = g_value_get_boolean (value);
break;
}
- case ARG_RANDOMVAL:{
+ case PROP_RANDOMVAL:{
aasink->ascii_parms.randomval = g_value_get_int (value);
break;
}
aasink = GST_AASINK (object);
switch (prop_id) {
- case ARG_WIDTH:{
+ case PROP_WIDTH:{
g_value_set_int (value, aasink->ascii_surf.width);
break;
}
- case ARG_HEIGHT:{
+ case PROP_HEIGHT:{
g_value_set_int (value, aasink->ascii_surf.height);
break;
}
- case ARG_DRIVER:{
+ case PROP_DRIVER:{
g_value_set_enum (value, aasink->aa_driver);
break;
}
- case ARG_DITHER:{
+ case PROP_DITHER:{
g_value_set_enum (value, aasink->ascii_parms.dither);
break;
}
- case ARG_BRIGHTNESS:{
+ case PROP_BRIGHTNESS:{
g_value_set_int (value, aasink->ascii_parms.bright);
break;
}
- case ARG_CONTRAST:{
+ case PROP_CONTRAST:{
g_value_set_int (value, aasink->ascii_parms.contrast);
break;
}
- case ARG_GAMMA:{
+ case PROP_GAMMA:{
g_value_set_float (value, aasink->ascii_parms.gamma);
break;
}
- case ARG_INVERSION:{
+ case PROP_INVERSION:{
g_value_set_boolean (value, aasink->ascii_parms.inversion);
break;
}
- case ARG_RANDOMVAL:{
+ case PROP_RANDOMVAL:{
g_value_set_int (value, aasink->ascii_parms.randomval);
break;
}
- case ARG_FRAMES_DISPLAYED:{
+ case PROP_FRAMES_DISPLAYED:{
g_value_set_int (value, aasink->frames_displayed);
break;
}
- case ARG_FRAME_TIME:{
+ case PROP_FRAME_TIME:{
g_value_set_int (value, aasink->frame_time / 1000000);
break;
}
#include <gst/gst.h>
#include <gst/base/gstbasesink.h>
+#include <gst/video/video.h>
#include <aalib.h>
struct _GstAASink {
GstBaseSink parent;
- gulong format;
- gint width, height;
+ GstVideoInfo info;
gint frames_displayed;
guint64 frame_time;
struct _GstAASinkClass {
GstBaseSinkClass parent_class;
-
- /* signals */
- void (*frame_displayed) (GstElement *element);
- void (*have_size) (GstElement *element, guint width, guint height);
};
GType gst_aasink_get_type(void);
plugin_LTLIBRARIES = libgstannodex.la
+# FIXME 0.11: ignore GValueArray warnings for now until this is sorted
+ERROR_CFLAGS=
+
libgstannodex_la_SOURCES = \
gstannodex.c \
gstcmmlutils.c \
#endif
#include <math.h>
+#include <string.h>
+
#include <gst/tag/tag.h>
#include "gstannodex.h"
#include "gstcmmlparser.h"
);
/* GstCmmlDec */
-GST_BOILERPLATE (GstCmmlDec, gst_cmml_dec, GstElement, GST_TYPE_ELEMENT);
+#define gst_cmml_dec_parent_class parent_class
+G_DEFINE_TYPE (GstCmmlDec, gst_cmml_dec, GST_TYPE_ELEMENT);
+
static void gst_cmml_dec_get_property (GObject * dec, guint property_id,
GValue * value, GParamSpec * pspec);
static void gst_cmml_dec_set_property (GObject * dec, guint property_id,
const GValue * value, GParamSpec * pspec);
-static const GstQueryType *gst_cmml_dec_query_types (GstPad * pad);
-static gboolean gst_cmml_dec_sink_query (GstPad * pad, GstQuery * query);
-static gboolean gst_cmml_dec_sink_event (GstPad * pad, GstEvent * event);
+static gboolean gst_cmml_dec_sink_query (GstPad * pad, GstObject * parent,
+ GstQuery * query);
+static gboolean gst_cmml_dec_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
static gboolean gst_cmml_dec_convert (GstPad * pad, GstFormat src_fmt,
gint64 src_val, GstFormat * dest_fmt, gint64 * dest_val);
static GstStateChangeReturn gst_cmml_dec_change_state (GstElement * element,
GstStateChange transition);
-static GstFlowReturn gst_cmml_dec_chain (GstPad * pad, GstBuffer * buffer);
+static GstFlowReturn gst_cmml_dec_chain (GstPad * pad, GstObject * parent,
+ GstBuffer * buffer);
static GstCmmlPacketType gst_cmml_dec_parse_packet_type (GstCmmlDec * dec,
- GstBuffer * buffer);
-static void gst_cmml_dec_parse_ident_header (GstCmmlDec * dec, GstBuffer * buf);
-static void gst_cmml_dec_parse_first_header (GstCmmlDec * dec, GstBuffer * buf);
-static void gst_cmml_dec_parse_preamble (GstCmmlDec * dec,
- guchar * preamble, guchar * cmml_root_element);
-static void gst_cmml_dec_parse_xml (GstCmmlDec * dec,
- guchar * data, guint size);
+ gchar * data, gsize size);
+static void gst_cmml_dec_parse_ident_header (GstCmmlDec * dec, guint8 * data,
+ gsize size);
+static void gst_cmml_dec_parse_first_header (GstCmmlDec * dec, guint8 * data,
+ gsize size);
+static void gst_cmml_dec_parse_preamble (GstCmmlDec * dec, guchar * preamble,
+ guchar * cmml_root_element);
+static void gst_cmml_dec_parse_xml (GstCmmlDec * dec, guchar * data,
+ guint size);
static void gst_cmml_dec_parse_head (GstCmmlDec * dec, GstCmmlTagHead * head);
static void gst_cmml_dec_parse_clip (GstCmmlDec * dec, GstCmmlTagClip * clip);
static void gst_cmml_dec_finalize (GObject * object);
static void
-gst_cmml_dec_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_static_pad_template (element_class,
- &gst_cmml_dec_sink_factory);
- gst_element_class_add_static_pad_template (element_class,
- &gst_cmml_dec_src_factory);
- gst_element_class_set_details_simple (element_class, "CMML stream decoder",
- "Codec/Decoder",
- "Decodes CMML streams", "Alessandro Decina <alessandro@nnva.org>");
-}
-
-static void
-gst_cmml_dec_class_init (GstCmmlDecClass * dec_class)
+gst_cmml_dec_class_init (GstCmmlDecClass * klass)
{
- GObjectClass *klass = G_OBJECT_CLASS (dec_class);
+ GObjectClass *object_class = G_OBJECT_CLASS (klass);
+ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
- GST_ELEMENT_CLASS (klass)->change_state = gst_cmml_dec_change_state;
+ object_class->set_property = gst_cmml_dec_set_property;
+ object_class->get_property = gst_cmml_dec_get_property;
+ object_class->finalize = gst_cmml_dec_finalize;
- klass->set_property = gst_cmml_dec_set_property;
- klass->get_property = gst_cmml_dec_get_property;
- klass->finalize = gst_cmml_dec_finalize;
-
- g_object_class_install_property (klass, GST_CMML_DEC_WAIT_CLIP_END,
+ g_object_class_install_property (object_class, GST_CMML_DEC_WAIT_CLIP_END,
g_param_spec_boolean ("wait-clip-end-time",
"Wait clip end time",
"Send a tag for a clip when the clip ends, setting its end-time. "
"Use when you need to know both clip's start-time and end-time.",
FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ element_class->change_state = gst_cmml_dec_change_state;
+
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_cmml_dec_sink_factory));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_cmml_dec_src_factory));
+ gst_element_class_set_details_simple (element_class, "CMML stream decoder",
+ "Codec/Decoder",
+ "Decodes CMML streams", "Alessandro Decina <alessandro@nnva.org>");
}
static void
-gst_cmml_dec_init (GstCmmlDec * dec, GstCmmlDecClass * klass)
+gst_cmml_dec_init (GstCmmlDec * dec)
{
dec->sinkpad =
gst_pad_new_from_static_template (&gst_cmml_dec_sink_factory, "sink");
gst_pad_set_chain_function (dec->sinkpad, gst_cmml_dec_chain);
- gst_pad_set_query_type_function (dec->sinkpad, gst_cmml_dec_query_types);
gst_pad_set_query_function (dec->sinkpad, gst_cmml_dec_sink_query);
gst_pad_set_event_function (dec->sinkpad, gst_cmml_dec_sink_event);
gst_element_add_pad (GST_ELEMENT (dec), dec->sinkpad);
break;
}
- res = parent_class->change_state (element, transition);
+ res = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
switch (transition) {
case GST_STATE_CHANGE_PAUSED_TO_READY:
return res;
}
-static const GstQueryType *
-gst_cmml_dec_query_types (GstPad * pad)
-{
- static const GstQueryType query_types[] = {
- GST_QUERY_CONVERT,
- 0
- };
-
- return query_types;
-}
-
static gboolean
-gst_cmml_dec_sink_query (GstPad * pad, GstQuery * query)
+gst_cmml_dec_sink_query (GstPad * pad, GstObject * parent, GstQuery * query)
{
gboolean res = FALSE;
}
static gboolean
-gst_cmml_dec_sink_event (GstPad * pad, GstEvent * event)
+gst_cmml_dec_sink_event (GstPad * pad, GstObject * parent, GstEvent * event)
{
- GstCmmlDec *dec = GST_CMML_DEC (GST_PAD_PARENT (pad));
+ GstCmmlDec *dec = GST_CMML_DEC (parent);
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_EOS:
break;
}
- return gst_pad_event_default (pad, event);
+ return gst_pad_event_default (pad, parent, event);
}
static GstFlowReturn
-gst_cmml_dec_chain (GstPad * pad, GstBuffer * buffer)
+gst_cmml_dec_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer)
{
- GstCmmlDec *dec = GST_CMML_DEC (GST_PAD_PARENT (pad));
+ GstCmmlDec *dec = GST_CMML_DEC (parent);
GstCmmlPacketType packet;
+ GstMapInfo map;
+ guint8 *data;
+ gsize size;
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ data = map.data;
+ size = map.size;
- if (GST_BUFFER_SIZE (buffer) == 0) {
+ if (size == 0) {
/* the EOS page could be empty */
dec->flow_return = GST_FLOW_OK;
goto done;
dec->granulerate_n, dec->granulerate_d, dec->granuleshift);
/* identify the packet type */
- packet = gst_cmml_dec_parse_packet_type (dec, buffer);
+ packet = gst_cmml_dec_parse_packet_type (dec, (gchar *) data, size);
/* handle the packet. the handler will set dec->flow_return */
switch (packet) {
case GST_CMML_PACKET_IDENT_HEADER:
if (dec->sent_root == FALSE)
/* don't parse the ident again in case of seeking to the beginning */
- gst_cmml_dec_parse_ident_header (dec, buffer);
+ gst_cmml_dec_parse_ident_header (dec, data, size);
break;
case GST_CMML_PACKET_FIRST_HEADER:
if (dec->sent_root == FALSE)
/* don't parse the xml preamble if it has already been parsed because it
* would error out, so seeking to the beginning would fail */
- gst_cmml_dec_parse_first_header (dec, buffer);
+ gst_cmml_dec_parse_first_header (dec, data, size);
break;
case GST_CMML_PACKET_SECOND_HEADER:
case GST_CMML_PACKET_CLIP:
- gst_cmml_dec_parse_xml (dec,
- GST_BUFFER_DATA (buffer), GST_BUFFER_SIZE (buffer));
+ gst_cmml_dec_parse_xml (dec, data, size);
break;
case GST_CMML_PACKET_UNKNOWN:
default:
}
done:
+ gst_buffer_unmap (buffer, &map);
gst_buffer_unref (buffer);
+
return dec->flow_return;
}
/* finds the packet type of the buffer
*/
static GstCmmlPacketType
-gst_cmml_dec_parse_packet_type (GstCmmlDec * dec, GstBuffer * buffer)
+gst_cmml_dec_parse_packet_type (GstCmmlDec * dec, gchar * data, gsize size)
{
GstCmmlPacketType packet_type = GST_CMML_PACKET_UNKNOWN;
- gchar *data = (gchar *) GST_BUFFER_DATA (buffer);
- guint size = GST_BUFFER_SIZE (buffer);
if (size >= 8 && !memcmp (data, "CMML\0\0\0\0", 8)) {
packet_type = GST_CMML_PACKET_IDENT_HEADER;
{
GstFlowReturn res;
- res = gst_pad_alloc_buffer (dec->srcpad, GST_BUFFER_OFFSET_NONE,
- size, gst_static_pad_template_get_caps (&gst_cmml_dec_src_factory),
- buffer);
-
- if (res == GST_FLOW_OK) {
+ *buffer = gst_buffer_new_allocate (NULL, size, 0);
+ if (*buffer != NULL) {
if (data)
- memcpy (GST_BUFFER_DATA (*buffer), data, size);
+ gst_buffer_fill (*buffer, 0, data, size);
GST_BUFFER_TIMESTAMP (*buffer) = dec->timestamp;
- } else if (res == GST_FLOW_NOT_LINKED) {
- GST_DEBUG_OBJECT (dec, "alloc function return NOT-LINKED, ignoring");
+ res = GST_FLOW_OK;
} else {
- GST_WARNING_OBJECT (dec, "alloc function returned error %s",
- gst_flow_get_name (res));
+ GST_WARNING_OBJECT (dec, "could not allocate buffer");
+ res = GST_FLOW_ERROR;
}
return res;
/* parses the first CMML packet (the ident header)
*/
static void
-gst_cmml_dec_parse_ident_header (GstCmmlDec * dec, GstBuffer * buffer)
+gst_cmml_dec_parse_ident_header (GstCmmlDec * dec, guint8 * data, gsize size)
{
- guint8 *data = GST_BUFFER_DATA (buffer);
-
/* the ident header has a fixed length */
- if (GST_BUFFER_SIZE (buffer) != CMML_IDENT_HEADER_SIZE) {
+ if (size != CMML_IDENT_HEADER_SIZE) {
GST_ELEMENT_ERROR (dec, STREAM, DECODE,
- (NULL), ("wrong ident header size: %d", GST_BUFFER_SIZE (buffer)));
+ (NULL), ("wrong ident header size: %" G_GSIZE_FORMAT, size));
dec->flow_return = GST_FLOW_ERROR;
return;
* optional "cmml" processing instruction.
*/
static void
-gst_cmml_dec_parse_first_header (GstCmmlDec * dec, GstBuffer * buffer)
+gst_cmml_dec_parse_first_header (GstCmmlDec * dec, guint8 * data, gsize size)
{
- gst_cmml_dec_parse_xml (dec,
- GST_BUFFER_DATA (buffer), GST_BUFFER_SIZE (buffer));
+ gst_cmml_dec_parse_xml (dec, data, size);
/* if there is a processing instruction, gst_cmml_dec_parse_preamble
* will be triggered. Otherwise we need to call it manually.
*/
if (dec->flow_return == GST_FLOW_OK && !dec->sent_root) {
- guchar *preamble = (guchar *) g_strndup ((gchar *) GST_BUFFER_DATA (buffer),
- GST_BUFFER_SIZE (buffer));
+ guchar *preamble = (guchar *) g_strndup ((gchar *) data, size);
gst_cmml_dec_parse_preamble (dec, preamble, (guchar *) "<cmml>");
g_free (preamble);
GST_DEBUG_OBJECT (dec, "found CMML head (title: %s base: %s)",
head->title, head->base);
+ /* FIXME: what's the point of all this GValue transform crack? */
/* create the GST_TAG_TITLE tag */
g_value_init (&str_val, G_TYPE_STRING);
g_value_init (&title_val, gst_tag_get_type (GST_TAG_TITLE));
g_value_set_string (&str_val, (gchar *) head->title);
g_value_transform (&str_val, &title_val);
- tags = gst_tag_list_new ();
+ tags = gst_tag_list_new_empty ();
gst_tag_list_add_values (tags, GST_TAG_MERGE_APPEND,
GST_TAG_TITLE, &title_val, NULL);
gst_tag_list_add (tags, GST_TAG_MERGE_APPEND, GST_TAG_CMML_HEAD, head, NULL);
- gst_element_found_tags_for_pad (GST_ELEMENT (dec), dec->srcpad, tags);
+ gst_pad_push_event (dec->srcpad, gst_event_new_tag (tags));
g_value_unset (&str_val);
g_value_unset (&title_val);
GST_DEBUG_OBJECT (dec, "sending clip tag %s", clip->id);
- tags = gst_tag_list_new ();
- gst_tag_list_add (tags, GST_TAG_MERGE_APPEND, GST_TAG_CMML_CLIP, clip, NULL);
- gst_element_found_tags_for_pad (GST_ELEMENT (dec), dec->srcpad, tags);
+ tags = gst_tag_list_new (GST_TAG_CMML_CLIP, clip, NULL);
+ gst_pad_push_event (dec->srcpad, gst_event_new_tag (tags));
}
/* push the string representation of a clip */
#include <gst/gst.h>
#include <gst/gstformat.h>
-#include <gst/controller/gstcontroller.h>
#include "gstcmmlparser.h"
GST_STATIC_CAPS ("text/x-cmml, encoded = (boolean) false")
);
-GST_BOILERPLATE (GstCmmlEnc, gst_cmml_enc, GstElement, GST_TYPE_ELEMENT);
+#define gst_cmml_enc_parent_class parent_class
+G_DEFINE_TYPE (GstCmmlEnc, gst_cmml_enc, GST_TYPE_ELEMENT);
+
static void gst_cmml_enc_get_property (GObject * object, guint property_id,
GValue * value, GParamSpec * pspec);
static void gst_cmml_enc_set_property (GObject * object, guint property_id,
const GValue * value, GParamSpec * pspec);
-static gboolean gst_cmml_enc_sink_event (GstPad * pad, GstEvent * event);
+static gboolean gst_cmml_enc_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
static GstStateChangeReturn gst_cmml_enc_change_state (GstElement * element,
GstStateChange transition);
-static GstFlowReturn gst_cmml_enc_chain (GstPad * pad, GstBuffer * buffer);
-static void gst_cmml_enc_parse_preamble (GstCmmlEnc * enc,
- guchar * preamble, guchar * processing_instruction);
+static GstFlowReturn gst_cmml_enc_chain (GstPad * pad, GstObject * parent,
+ GstBuffer * buffer);
+static void gst_cmml_enc_parse_preamble (GstCmmlEnc * enc, guchar * preamble,
+ guchar * processing_instruction);
static void gst_cmml_enc_parse_end_tag (GstCmmlEnc * enc);
static void gst_cmml_enc_parse_tag_head (GstCmmlEnc * enc,
GstCmmlTagHead * head);
static void gst_cmml_enc_finalize (GObject * object);
static void
-gst_cmml_enc_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_static_pad_template (element_class,
- &gst_cmml_enc_sink_factory);
- gst_element_class_add_static_pad_template (element_class,
- &gst_cmml_enc_src_factory);
- gst_element_class_set_details_simple (element_class, "CMML streams encoder",
- "Codec/Encoder",
- "Encodes CMML streams", "Alessandro Decina <alessandro@nnva.org>");
-}
-
-static void
gst_cmml_enc_class_init (GstCmmlEncClass * enc_class)
{
GObjectClass *klass = G_OBJECT_CLASS (enc_class);
+ GstElementClass *element_class = GST_ELEMENT_CLASS (enc_class);
klass->get_property = gst_cmml_enc_get_property;
klass->set_property = gst_cmml_enc_set_property;
0, 64, 32,
G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
- GST_ELEMENT_CLASS (klass)->change_state = gst_cmml_enc_change_state;
+ element_class->change_state = gst_cmml_enc_change_state;
+
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_cmml_enc_sink_factory));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_cmml_enc_src_factory));
+ gst_element_class_set_details_simple (element_class, "CMML streams encoder",
+ "Codec/Encoder",
+ "Encodes CMML streams", "Alessandro Decina <alessandro@nnva.org>");
}
static void
-gst_cmml_enc_init (GstCmmlEnc * enc, GstCmmlEncClass * klass)
+gst_cmml_enc_init (GstCmmlEnc * enc)
{
enc->sinkpad =
gst_pad_new_from_static_template (&gst_cmml_enc_sink_factory, "sink");
break;
}
- res = parent_class->change_state (element, transition);
+ res = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
switch (transition) {
case GST_STATE_CHANGE_PAUSED_TO_READY:
}
static gboolean
-gst_cmml_enc_sink_event (GstPad * pad, GstEvent * event)
+gst_cmml_enc_sink_event (GstPad * pad, GstObject * parent, GstEvent * event)
{
- GstCmmlEnc *enc = GST_CMML_ENC (GST_PAD_PARENT (pad));
+ GstCmmlEnc *enc = GST_CMML_ENC (parent);
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_EOS:
break;
}
- return gst_pad_event_default (pad, event);
+ return gst_pad_event_default (pad, parent, event);
}
static GstFlowReturn
{
GstFlowReturn res;
- res = gst_pad_alloc_buffer (enc->srcpad, GST_BUFFER_OFFSET_NONE, size,
- NULL, buffer);
- if (res == GST_FLOW_OK) {
+ *buffer = gst_buffer_new_allocate (NULL, size, 0);
+ if (*buffer != NULL) {
if (data)
- memcpy (GST_BUFFER_DATA (*buffer), data, size);
+ gst_buffer_fill (*buffer, 0, data, size);
+ res = GST_FLOW_OK;
} else {
- GST_WARNING_OBJECT (enc, "alloc function returned error %s",
- gst_flow_get_name (res));
+ GST_WARNING_OBJECT (enc, "could not allocate buffer");
+ res = GST_FLOW_ERROR;
}
return res;
gst_value_array_append_value (&array, &value);
gst_buffer_unref (buffer);
- GST_BUFFER_FLAG_SET (ident, GST_BUFFER_FLAG_IN_CAPS);
- GST_BUFFER_FLAG_SET (preamble, GST_BUFFER_FLAG_IN_CAPS);
- GST_BUFFER_FLAG_SET (head, GST_BUFFER_FLAG_IN_CAPS);
+ GST_BUFFER_FLAG_SET (ident, GST_BUFFER_FLAG_HEADER);
+ GST_BUFFER_FLAG_SET (preamble, GST_BUFFER_FLAG_HEADER);
+ GST_BUFFER_FLAG_SET (head, GST_BUFFER_FLAG_HEADER);
gst_structure_set_value (structure, "streamheader", &array);
goto alloc_error;
headers = g_list_append (headers, head_buf);
- caps = gst_pad_get_caps (enc->srcpad);
+ caps = gst_pad_query_caps (enc->srcpad, NULL);
caps = gst_cmml_enc_set_header_on_caps (enc, caps,
ident_buf, preamble_buf, head_buf);
+ gst_pad_set_caps (enc->srcpad, caps);
+ gst_caps_unref (caps);
while (headers) {
buffer = GST_BUFFER (headers->data);
/* set granulepos 0 on headers */
GST_BUFFER_OFFSET_END (buffer) = 0;
- gst_buffer_set_caps (buffer, caps);
enc->flow_return = gst_cmml_enc_push (enc, buffer);
headers = g_list_delete_link (headers, headers);
goto push_error;
}
- gst_caps_unref (caps);
-
enc->sent_headers = TRUE;
return;
enc->flow_return = GST_FLOW_ERROR;
return;
push_error:
- gst_caps_unref (caps);
/* fallthrough */
alloc_error:
for (walk = headers; walk; walk = walk->next)
}
static GstFlowReturn
-gst_cmml_enc_chain (GstPad * pad, GstBuffer * buffer)
+gst_cmml_enc_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer)
{
GError *err = NULL;
- GstCmmlEnc *enc = GST_CMML_ENC (GST_PAD_PARENT (pad));
+ GstCmmlEnc *enc = GST_CMML_ENC (parent);
+ GstMapInfo map;
/* the CMML handlers registered with enc->parser will override this when
* encoding/pushing the buffers downstream
*/
enc->flow_return = GST_FLOW_OK;
- if (!gst_cmml_parser_parse_chunk (enc->parser,
- (gchar *) GST_BUFFER_DATA (buffer), GST_BUFFER_SIZE (buffer), &err)) {
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+
+ if (!gst_cmml_parser_parse_chunk (enc->parser, (gchar *) map.data, map.size,
+ &err)) {
GST_ELEMENT_ERROR (enc, STREAM, ENCODE, (NULL), ("%s", err->message));
g_error_free (err);
enc->flow_return = GST_FLOW_ERROR;
}
+ gst_buffer_unmap (buffer, &map);
gst_buffer_unref (buffer);
return enc->flow_return;
}
"Render overlay on a video stream using Cairo",
"Jon Nordby <jononor@gmail.com>");
- gst_element_class_add_static_pad_template (element_class,
- &gst_cairo_overlay_sink_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_cairo_overlay_src_template);
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_cairo_overlay_sink_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_cairo_overlay_src_template));
}
static void
gst_element_class_set_details_simple (ec, "Cairo encoder",
"Codec/Encoder", "Encodes streams using Cairo",
"Lutz Mueller <lutz@topfrose.de>");
- gst_element_class_add_static_pad_template (ec, &t_snk);
- gst_element_class_add_static_pad_template (ec, &t_src);
+ gst_element_class_add_pad_template (ec, gst_static_pad_template_get (&t_snk));
+ gst_element_class_add_pad_template (ec, gst_static_pad_template_get (&t_src));
}
static void
{
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
- gst_element_class_add_static_pad_template (element_class,
- &cairo_text_overlay_src_template_factory);
- gst_element_class_add_static_pad_template (element_class,
- &video_sink_template_factory);
- gst_element_class_add_static_pad_template (element_class,
- &text_sink_template_factory);
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&cairo_text_overlay_src_template_factory));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&video_sink_template_factory));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&text_sink_template_factory));
gst_element_class_set_details_simple (element_class, "Text overlay",
"Filter/Editor/Video",
overlay->text_collect_data);
}
gst_pad_push_event (overlay->srcpad, gst_event_new_eos ());
- ret = GST_FLOW_UNEXPECTED;
+ ret = GST_FLOW_EOS;
goto done;
}
"Filter/Editor/Video",
"Overlays the time on a video stream", "David Schleef <ds@schleef.org>");
- gst_element_class_add_static_pad_template (element_class,
- &gst_cairo_time_overlay_sink_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_cairo_time_overlay_src_template);
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_cairo_time_overlay_sink_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_cairo_time_overlay_src_template));
}
static void
libgstdv_la_SOURCES = gstdv.c gstdvdec.c gstdvdemux.c gstsmptetimecode.c
libgstdv_la_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_BASE_CFLAGS) $(GST_CFLAGS) $(LIBDV_CFLAGS)
libgstdv_la_LIBADD = \
- $(GST_PLUGINS_BASE_LIBS) -lgstaudio-$(GST_MAJORMINOR) \
- $(GST_BASE_LIBS) $(GST_LIBS) $(LIBDV_LIBS)
+ $(GST_PLUGINS_BASE_LIBS) -lgstaudio-$(GST_MAJORMINOR) -lgstvideo-$(GST_MAJORMINOR) \
+ $(GST_BASE_LIBS) $(GST_LIBS) $(LIBDV_LIBS)
libgstdv_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
libgstdv_la_LIBTOOLFLAGS = --tag=disable-static
#include <string.h>
#include <math.h>
#include <gst/video/video.h>
+#include <gst/video/gstvideopool.h>
#include "gstdvdec.h"
static GstStaticPadTemplate src_temp = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("video/x-raw-yuv, "
- "format = (fourcc) YUY2, "
- "width = (int) 720, "
- "framerate = (fraction) [ 1/1, 60/1 ];"
- "video/x-raw-rgb, "
- "bpp = (int) 32, "
- "depth = (int) 24, "
- "endianness = (int) " G_STRINGIFY (G_BIG_ENDIAN) ", "
- "red_mask = (int) 0x0000ff00, "
- "green_mask = (int) 0x00ff0000, "
- "blue_mask = (int) 0xff000000, "
- "width = (int) 720, "
- "framerate = (fraction) [ 1/1, 60/1 ];"
- "video/x-raw-rgb, "
- "bpp = (int) 24, "
- "depth = (int) 24, "
- "endianness = (int) " G_STRINGIFY (G_BIG_ENDIAN) ", "
- "red_mask = (int) 0x00ff0000, "
- "green_mask = (int) 0x0000ff00, "
- "blue_mask = (int) 0x000000ff, "
- "width = (int) 720, " "framerate = (fraction) [ 1/1, 60/1 ]")
+ GST_STATIC_CAPS ("video/x-raw, "
+ "format = (string) { YUY2, BGRx, RGB }, "
+ "framerate = (fraction) [ 1/1, 60/1 ], "
+ "width = (int) 720, " "height = (int) { 576, 480 }")
);
#define GST_TYPE_DVDEC_QUALITY (gst_dvdec_quality_get_type())
return qtype;
}
-GST_BOILERPLATE (GstDVDec, gst_dvdec, GstElement, GST_TYPE_ELEMENT);
+#define gst_dvdec_parent_class parent_class
+G_DEFINE_TYPE (GstDVDec, gst_dvdec, GST_TYPE_ELEMENT);
-static void gst_dvdec_finalize (GObject * object);
-static gboolean gst_dvdec_sink_setcaps (GstPad * pad, GstCaps * caps);
-static GstFlowReturn gst_dvdec_chain (GstPad * pad, GstBuffer * buffer);
-static gboolean gst_dvdec_sink_event (GstPad * pad, GstEvent * event);
+static GstFlowReturn gst_dvdec_chain (GstPad * pad, GstObject * parent,
+ GstBuffer * buffer);
+static gboolean gst_dvdec_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
static GstStateChangeReturn gst_dvdec_change_state (GstElement * element,
GstStateChange transition);
GValue * value, GParamSpec * pspec);
static void
-gst_dvdec_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_static_pad_template (element_class, &sink_temp);
- gst_element_class_add_static_pad_template (element_class, &src_temp);
-
- gst_element_class_set_details_simple (element_class, "DV video decoder",
- "Codec/Decoder/Video",
- "Uses libdv to decode DV video (smpte314) (libdv.sourceforge.net)",
- "Erik Walthinsen <omega@cse.ogi.edu>," "Wim Taymans <wim@fluendo.com>");
-
- GST_DEBUG_CATEGORY_INIT (dvdec_debug, "dvdec", 0, "DV decoding element");
-}
-
-static void
gst_dvdec_class_init (GstDVDecClass * klass)
{
GObjectClass *gobject_class;
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
- gobject_class->finalize = gst_dvdec_finalize;
gobject_class->set_property = gst_dvdec_set_property;
gobject_class->get_property = gst_dvdec_get_property;
1, G_MAXINT, DV_DEFAULT_DECODE_NTH,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
- gstelement_class->change_state = gst_dvdec_change_state;
+ gstelement_class->change_state = GST_DEBUG_FUNCPTR (gst_dvdec_change_state);
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&sink_temp));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&src_temp));
+
+ gst_element_class_set_details_simple (gstelement_class, "DV video decoder",
+ "Codec/Decoder/Video",
+ "Uses libdv to decode DV video (smpte314) (libdv.sourceforge.net)",
+ "Erik Walthinsen <omega@cse.ogi.edu>," "Wim Taymans <wim@fluendo.com>");
+
+ GST_DEBUG_CATEGORY_INIT (dvdec_debug, "dvdec", 0, "DV decoding element");
}
static void
-gst_dvdec_init (GstDVDec * dvdec, GstDVDecClass * g_class)
+gst_dvdec_init (GstDVDec * dvdec)
{
dvdec->sinkpad = gst_pad_new_from_static_template (&sink_temp, "sink");
- gst_pad_set_setcaps_function (dvdec->sinkpad,
- GST_DEBUG_FUNCPTR (gst_dvdec_sink_setcaps));
gst_pad_set_chain_function (dvdec->sinkpad, gst_dvdec_chain);
gst_pad_set_event_function (dvdec->sinkpad, gst_dvdec_sink_event);
gst_element_add_pad (GST_ELEMENT (dvdec), dvdec->sinkpad);
dvdec->clamp_luma = FALSE;
dvdec->clamp_chroma = FALSE;
dvdec->quality = DV_DEFAULT_QUALITY;
- dvdec->segment = gst_segment_new ();
-}
-
-static void
-gst_dvdec_finalize (GObject * object)
-{
- GstDVDec *dvdec = GST_DVDEC (object);
-
- gst_segment_free (dvdec->segment);
-
- G_OBJECT_CLASS (parent_class)->finalize (object);
}
static gboolean
-gst_dvdec_sink_setcaps (GstPad * pad, GstCaps * caps)
+gst_dvdec_sink_setcaps (GstDVDec * dvdec, GstCaps * caps)
{
- GstDVDec *dvdec;
GstStructure *s;
const GValue *par = NULL, *rate = NULL;
- dvdec = GST_DVDEC (gst_pad_get_parent (pad));
-
/* first parse the caps */
s = gst_caps_get_structure (caps, 0);
dvdec->sink_negotiated = TRUE;
dvdec->src_negotiated = FALSE;
- gst_object_unref (dvdec);
-
return TRUE;
/* ERRORS */
no_framerate:
{
GST_DEBUG_OBJECT (dvdec, "no framerate specified in caps");
- gst_object_unref (dvdec);
return FALSE;
}
}
+static void
+gst_dvdec_negotiate_pool (GstDVDec * dec, GstCaps * caps, GstVideoInfo * info)
+{
+ GstQuery *query;
+ GstBufferPool *pool = NULL;
+ guint size, min, max, prefix, alignment;
+ GstStructure *config;
+
+ /* find a pool for the negotiated caps now */
+ query = gst_query_new_allocation (caps, TRUE);
+
+ if (gst_pad_peer_query (dec->srcpad, query)) {
+ GST_DEBUG_OBJECT (dec, "got downstream ALLOCATION hints");
+ /* we got configuration from our peer, parse them */
+ gst_query_parse_allocation_params (query, &size, &min, &max, &prefix,
+ &alignment, &pool);
+ size = MAX (size, info->size);
+ } else {
+ GST_DEBUG_OBJECT (dec, "didn't get downstream ALLOCATION hints");
+ size = info->size;
+ min = max = 0;
+ prefix = 0;
+ alignment = 0;
+ }
+
+ if (pool == NULL) {
+ /* we did not get a pool, make one ourselves then */
+ pool = gst_buffer_pool_new ();
+ }
+
+ if (dec->pool)
+ gst_object_unref (dec->pool);
+ dec->pool = pool;
+
+ config = gst_buffer_pool_get_config (pool);
+ gst_buffer_pool_config_set (config, caps, size, min, max, prefix, alignment);
+ /* just set the option, if the pool can support it we will transparently use
+ * it through the video info API. We could also see if the pool support this
+ * option and only activate it then. */
+ gst_buffer_pool_config_add_option (config, GST_BUFFER_POOL_OPTION_VIDEO_META);
+
+ gst_buffer_pool_set_config (pool, config);
+ /* and activate */
+ gst_buffer_pool_set_active (pool, TRUE);
+
+ gst_query_unref (query);
+}
+
static gboolean
gst_dvdec_src_negotiate (GstDVDec * dvdec)
{
/* ignoring rgb, bgr0 for now */
dvdec->bpp = 2;
- othercaps = gst_caps_new_simple ("video/x-raw-yuv",
- "format", GST_TYPE_FOURCC, GST_STR_FOURCC ("YUY2"),
- "width", G_TYPE_INT, 720,
- "height", G_TYPE_INT, dvdec->height,
- "framerate", GST_TYPE_FRACTION, dvdec->framerate_numerator,
- dvdec->framerate_denominator,
- "pixel-aspect-ratio", GST_TYPE_FRACTION, dvdec->par_x,
- dvdec->par_y, "interlaced", G_TYPE_BOOLEAN, dvdec->interlaced, NULL);
+ gst_video_info_set_format (&dvdec->vinfo, GST_VIDEO_FORMAT_YUY2,
+ 720, dvdec->height);
+ dvdec->vinfo.fps_n = dvdec->framerate_numerator;
+ dvdec->vinfo.fps_d = dvdec->framerate_denominator;
+ dvdec->vinfo.par_n = dvdec->par_x;
+ dvdec->vinfo.par_d = dvdec->par_y;
+ if (dvdec->interlaced) {
+ dvdec->vinfo.interlace_mode = GST_VIDEO_INTERLACE_MODE_INTERLEAVED;
+ dvdec->vinfo.flags |= GST_VIDEO_FLAG_INTERLACED;
+ } else {
+ dvdec->vinfo.interlace_mode = GST_VIDEO_INTERLACE_MODE_PROGRESSIVE;
+ dvdec->vinfo.flags &= GST_VIDEO_FLAG_INTERLACED;
+ }
+ othercaps = gst_video_info_to_caps (&dvdec->vinfo);
gst_pad_set_caps (dvdec->srcpad, othercaps);
+
+ gst_dvdec_negotiate_pool (dvdec, othercaps, &dvdec->vinfo);
gst_caps_unref (othercaps);
dvdec->src_negotiated = TRUE;
}
static gboolean
-gst_dvdec_sink_event (GstPad * pad, GstEvent * event)
+gst_dvdec_sink_event (GstPad * pad, GstObject * parent, GstEvent * event)
{
GstDVDec *dvdec;
gboolean res = TRUE;
- dvdec = GST_DVDEC (gst_pad_get_parent (pad));
+ dvdec = GST_DVDEC (parent);
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_FLUSH_STOP:
- gst_segment_init (dvdec->segment, GST_FORMAT_UNDEFINED);
+ gst_segment_init (&dvdec->segment, GST_FORMAT_UNDEFINED);
break;
- case GST_EVENT_NEWSEGMENT:{
- gboolean update;
- gdouble rate, applied_rate;
- GstFormat format;
- gint64 start, stop, position;
-
- gst_event_parse_new_segment_full (event, &update, &rate, &applied_rate,
- &format, &start, &stop, &position);
-
- GST_DEBUG_OBJECT (dvdec, "Got NEWSEGMENT [%" GST_TIME_FORMAT
- " - %" GST_TIME_FORMAT " / %" GST_TIME_FORMAT "]",
- GST_TIME_ARGS (start), GST_TIME_ARGS (stop),
- GST_TIME_ARGS (position));
-
- gst_segment_set_newsegment_full (dvdec->segment, update, rate,
- applied_rate, format, start, stop, position);
+ case GST_EVENT_SEGMENT:{
+ const GstSegment *segment;
+
+ gst_event_parse_segment (event, &segment);
+
+ GST_DEBUG_OBJECT (dvdec, "Got NEWSEGMENT %" GST_SEGMENT_FORMAT, &segment);
+
+ gst_segment_copy_into (segment, &dvdec->segment);
break;
}
+ case GST_EVENT_CAPS:
+ {
+ GstCaps *caps;
+
+ gst_event_parse_caps (event, &caps);
+ gst_dvdec_sink_setcaps (dvdec, caps);
+ gst_event_unref (event);
+ event = NULL;
+ res = TRUE;
+ break;
+ }
+
default:
break;
}
- res = gst_pad_push_event (dvdec->srcpad, event);
+ if (event)
+ res = gst_pad_push_event (dvdec->srcpad, event);
return res;
}
static GstFlowReturn
-gst_dvdec_chain (GstPad * pad, GstBuffer * buf)
+gst_dvdec_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
{
GstDVDec *dvdec;
guint8 *inframe;
- guint8 *outframe;
guint8 *outframe_ptrs[3];
gint outframe_pitches[3];
+ GstMapInfo map;
+ GstVideoFrame frame;
GstBuffer *outbuf;
GstFlowReturn ret = GST_FLOW_OK;
guint length;
- gint64 cstart, cstop;
+ guint64 cstart, cstop;
gboolean PAL, wide;
- dvdec = GST_DVDEC (gst_pad_get_parent (pad));
- inframe = GST_BUFFER_DATA (buf);
+ dvdec = GST_DVDEC (parent);
+
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ inframe = map.data;
/* buffer should be at least the size of one NTSC frame, this should
* be enough to decode the header. */
- if (G_UNLIKELY (GST_BUFFER_SIZE (buf) < NTSC_BUFFER))
+ if (G_UNLIKELY (map.size < NTSC_BUFFER))
goto wrong_size;
/* preliminary dropping. unref and return if outside of configured segment */
- if ((dvdec->segment->format == GST_FORMAT_TIME) &&
- (!(gst_segment_clip (dvdec->segment, GST_FORMAT_TIME,
+ if ((dvdec->segment.format == GST_FORMAT_TIME) &&
+ (!(gst_segment_clip (&dvdec->segment, GST_FORMAT_TIME,
GST_BUFFER_TIMESTAMP (buf),
GST_BUFFER_TIMESTAMP (buf) + GST_BUFFER_DURATION (buf),
&cstart, &cstop))))
/* check the buffer is of right size after we know if we are
* dealing with PAL or NTSC */
length = (PAL ? PAL_BUFFER : NTSC_BUFFER);
- if (G_UNLIKELY (GST_BUFFER_SIZE (buf) < length))
+ if (G_UNLIKELY (map.size < length))
goto wrong_size;
dv_parse_packs (dvdec->decoder, inframe);
goto not_negotiated;
}
- ret =
- gst_pad_alloc_buffer_and_set_caps (dvdec->srcpad, 0,
- (720 * dvdec->height) * dvdec->bpp,
- GST_PAD_CAPS (dvdec->srcpad), &outbuf);
+ if (gst_pad_check_reconfigure (dvdec->srcpad)) {
+ GstCaps *caps;
+
+ caps = gst_pad_get_current_caps (dvdec->srcpad);
+ gst_dvdec_negotiate_pool (dvdec, caps, &dvdec->vinfo);
+ gst_caps_unref (caps);
+ }
+
+ ret = gst_buffer_pool_acquire_buffer (dvdec->pool, &outbuf, NULL);
if (G_UNLIKELY (ret != GST_FLOW_OK))
goto no_buffer;
- outframe = GST_BUFFER_DATA (outbuf);
+ gst_video_frame_map (&frame, &dvdec->vinfo, outbuf, GST_MAP_WRITE);
- outframe_ptrs[0] = outframe;
- outframe_pitches[0] = 720 * dvdec->bpp;
+ outframe_ptrs[0] = GST_VIDEO_FRAME_COMP_DATA (&frame, 0);
+ outframe_pitches[0] = GST_VIDEO_FRAME_COMP_STRIDE (&frame, 0);
/* the rest only matters for YUY2 */
if (dvdec->bpp < 3) {
- outframe_ptrs[1] = outframe_ptrs[0] + 720 * dvdec->height;
- outframe_ptrs[2] = outframe_ptrs[1] + 360 * dvdec->height;
+ outframe_ptrs[1] = GST_VIDEO_FRAME_COMP_DATA (&frame, 1);
+ outframe_ptrs[2] = GST_VIDEO_FRAME_COMP_DATA (&frame, 2);
- outframe_pitches[1] = dvdec->height / 2;
- outframe_pitches[2] = outframe_pitches[1];
+ outframe_pitches[1] = GST_VIDEO_FRAME_COMP_STRIDE (&frame, 1);
+ outframe_pitches[2] = GST_VIDEO_FRAME_COMP_STRIDE (&frame, 2);
}
GST_DEBUG_OBJECT (dvdec, "decoding and pushing buffer");
dv_decode_full_frame (dvdec->decoder, inframe,
e_dv_color_yuv, outframe_ptrs, outframe_pitches);
- GST_BUFFER_FLAG_UNSET (outbuf, GST_VIDEO_BUFFER_TFF);
+ gst_video_frame_unmap (&frame);
+
+ GST_BUFFER_FLAG_UNSET (outbuf, GST_VIDEO_BUFFER_FLAG_TFF);
GST_BUFFER_OFFSET (outbuf) = GST_BUFFER_OFFSET (buf);
GST_BUFFER_OFFSET_END (outbuf) = GST_BUFFER_OFFSET_END (buf);
dvdec->video_offset++;
done:
+ gst_buffer_unmap (buf, &map);
gst_buffer_unref (buf);
- gst_object_unref (dvdec);
return ret;
dv_decoder_new (0, dvdec->clamp_luma, dvdec->clamp_chroma);
dvdec->decoder->quality = qualities[dvdec->quality];
dv_set_error_log (dvdec->decoder, NULL);
- gst_segment_init (dvdec->segment, GST_FORMAT_UNDEFINED);
+ gst_video_info_init (&dvdec->vinfo);
+ gst_segment_init (&dvdec->segment, GST_FORMAT_UNDEFINED);
dvdec->src_negotiated = FALSE;
dvdec->sink_negotiated = FALSE;
/*
break;
}
- ret = parent_class->change_state (element, transition);
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
switch (transition) {
case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
case GST_STATE_CHANGE_PAUSED_TO_READY:
dv_decoder_free (dvdec->decoder);
dvdec->decoder = NULL;
+ if (dvdec->pool) {
+ gst_buffer_pool_set_active (dvdec->pool, FALSE);
+ gst_object_unref (dvdec->pool);
+ dvdec->pool = NULL;
+ }
break;
case GST_STATE_CHANGE_READY_TO_NULL:
break;
#include <gst/gst.h>
+#include <gst/video/video.h>
+
#include <libdv/dv.h>
/* input caps */
gboolean sink_negotiated;
+ GstVideoInfo vinfo;
gint framerate_numerator;
gint framerate_denominator;
gint height;
gint video_offset;
gint drop_factor;
- GstSegment *segment;
+ GstBufferPool *pool;
+ GstSegment segment;
};
struct _GstDVDecClass {
#include "config.h"
#endif
-/* FIXME 0.11: suppress warnings for deprecated API such as GStaticRecMutex
- * with newer GLib versions (>= 2.31.0) */
-#define GLIB_DISABLE_DEPRECATION_WARNINGS
-
#include <string.h>
#include <math.h>
static GstStaticPadTemplate audio_src_temp = GST_STATIC_PAD_TEMPLATE ("audio",
GST_PAD_SRC,
GST_PAD_SOMETIMES,
- GST_STATIC_CAPS ("audio/x-raw-int, "
- "depth = (int) 16, "
- "width = (int) 16, "
- "signed = (boolean) TRUE, "
- "channels = (int) {2, 4}, "
- "endianness = (int) " G_STRINGIFY (G_BYTE_ORDER) ", "
- "rate = (int) { 32000, 44100, 48000 }")
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "format = (string) " GST_AUDIO_NE (S16) ", "
+ "layout = (string) interleaved, "
+ "rate = (int) { 32000, 44100, 48000 }, " "channels = (int) {2, 4}")
);
-GST_BOILERPLATE (GstDVDemux, gst_dvdemux, GstElement, GST_TYPE_ELEMENT);
+#define gst_dvdemux_parent_class parent_class
+G_DEFINE_TYPE (GstDVDemux, gst_dvdemux, GST_TYPE_ELEMENT);
static void gst_dvdemux_finalize (GObject * object);
/* query functions */
-static const GstQueryType *gst_dvdemux_get_src_query_types (GstPad * pad);
-static gboolean gst_dvdemux_src_query (GstPad * pad, GstQuery * query);
-static const GstQueryType *gst_dvdemux_get_sink_query_types (GstPad * pad);
-static gboolean gst_dvdemux_sink_query (GstPad * pad, GstQuery * query);
+static gboolean gst_dvdemux_src_query (GstPad * pad, GstObject * parent,
+ GstQuery * query);
+static gboolean gst_dvdemux_sink_query (GstPad * pad, GstObject * parent,
+ GstQuery * query);
/* convert functions */
static gboolean gst_dvdemux_sink_convert (GstDVDemux * demux,
/* event functions */
static gboolean gst_dvdemux_send_event (GstElement * element, GstEvent * event);
-static gboolean gst_dvdemux_handle_src_event (GstPad * pad, GstEvent * event);
-static gboolean gst_dvdemux_handle_sink_event (GstPad * pad, GstEvent * event);
+static gboolean gst_dvdemux_handle_src_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+static gboolean gst_dvdemux_handle_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
/* scheduling functions */
static void gst_dvdemux_loop (GstPad * pad);
static GstFlowReturn gst_dvdemux_flush (GstDVDemux * dvdemux);
-static GstFlowReturn gst_dvdemux_chain (GstPad * pad, GstBuffer * buffer);
+static GstFlowReturn gst_dvdemux_chain (GstPad * pad, GstObject * parent,
+ GstBuffer * buffer);
/* state change functions */
-static gboolean gst_dvdemux_sink_activate (GstPad * sinkpad);
-static gboolean gst_dvdemux_sink_activate_push (GstPad * sinkpad,
- gboolean active);
-static gboolean gst_dvdemux_sink_activate_pull (GstPad * sinkpad,
- gboolean active);
+static gboolean gst_dvdemux_sink_activate (GstPad * sinkpad,
+ GstObject * parent);
+static gboolean gst_dvdemux_sink_activate_mode (GstPad * sinkpad,
+ GstObject * parent, GstPadMode mode, gboolean active);
static GstStateChangeReturn gst_dvdemux_change_state (GstElement * element,
GstStateChange transition);
static void
-gst_dvdemux_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_static_pad_template (element_class, &sink_temp);
- gst_element_class_add_static_pad_template (element_class,
- &video_src_temp);
- gst_element_class_add_static_pad_template (element_class,
- &audio_src_temp);
-
- gst_element_class_set_details_simple (element_class,
- "DV system stream demuxer", "Codec/Demuxer",
- "Uses libdv to separate DV audio from DV video (libdv.sourceforge.net)",
- "Erik Walthinsen <omega@cse.ogi.edu>, Wim Taymans <wim@fluendo.com>");
-
- GST_DEBUG_CATEGORY_INIT (dvdemux_debug, "dvdemux", 0, "DV demuxer element");
-}
-
-static void
gst_dvdemux_class_init (GstDVDemuxClass * klass)
{
GObjectClass *gobject_class;
gstelement_class->change_state = GST_DEBUG_FUNCPTR (gst_dvdemux_change_state);
gstelement_class->send_event = GST_DEBUG_FUNCPTR (gst_dvdemux_send_event);
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&sink_temp));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&video_src_temp));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&audio_src_temp));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "DV system stream demuxer", "Codec/Demuxer",
+ "Uses libdv to separate DV audio from DV video (libdv.sourceforge.net)",
+ "Erik Walthinsen <omega@cse.ogi.edu>, Wim Taymans <wim@fluendo.com>");
+
+ GST_DEBUG_CATEGORY_INIT (dvdemux_debug, "dvdemux", 0, "DV demuxer element");
}
static void
-gst_dvdemux_init (GstDVDemux * dvdemux, GstDVDemuxClass * g_class)
+gst_dvdemux_init (GstDVDemux * dvdemux)
{
gint i;
gst_pad_set_activate_function (dvdemux->sinkpad,
GST_DEBUG_FUNCPTR (gst_dvdemux_sink_activate));
/* the function to activate in push mode */
- gst_pad_set_activatepush_function (dvdemux->sinkpad,
- GST_DEBUG_FUNCPTR (gst_dvdemux_sink_activate_push));
- /* the function to activate in pull mode */
- gst_pad_set_activatepull_function (dvdemux->sinkpad,
- GST_DEBUG_FUNCPTR (gst_dvdemux_sink_activate_pull));
+ gst_pad_set_activatemode_function (dvdemux->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_dvdemux_sink_activate_mode));
/* for push mode, this is the chain function */
gst_pad_set_chain_function (dvdemux->sinkpad,
GST_DEBUG_FUNCPTR (gst_dvdemux_chain));
/* query functions */
gst_pad_set_query_function (dvdemux->sinkpad,
GST_DEBUG_FUNCPTR (gst_dvdemux_sink_query));
- gst_pad_set_query_type_function (dvdemux->sinkpad,
- GST_DEBUG_FUNCPTR (gst_dvdemux_get_sink_query_types));
/* now add the pad */
gst_element_add_pad (GST_ELEMENT (dvdemux), dvdemux->sinkpad);
gst_pad_set_query_function (pad, GST_DEBUG_FUNCPTR (gst_dvdemux_src_query));
- gst_pad_set_query_type_function (pad,
- GST_DEBUG_FUNCPTR (gst_dvdemux_get_src_query_types));
gst_pad_set_event_function (pad,
GST_DEBUG_FUNCPTR (gst_dvdemux_handle_src_event));
gst_pad_use_fixed_caps (pad);
if (no_more_pads)
gst_element_no_more_pads (GST_ELEMENT (dvdemux));
- gst_pad_push_event (pad, gst_event_new_new_segment (FALSE,
- dvdemux->byte_segment.rate, GST_FORMAT_TIME,
- dvdemux->time_segment.start, dvdemux->time_segment.stop,
- dvdemux->time_segment.start));
+ gst_pad_push_event (pad, gst_event_new_segment (&dvdemux->time_segment));
if (no_more_pads) {
- gst_element_found_tags (GST_ELEMENT (dvdemux),
- gst_tag_list_new_full (GST_TAG_CONTAINER_FORMAT, "DV", NULL));
+ gst_pad_push_event (pad,
+ gst_event_new_tag (gst_tag_list_new (GST_TAG_CONTAINER_FORMAT, "DV",
+ NULL)));
}
return pad;
}
}
-static const GstQueryType *
-gst_dvdemux_get_src_query_types (GstPad * pad)
-{
- static const GstQueryType src_query_types[] = {
- GST_QUERY_POSITION,
- GST_QUERY_DURATION,
- GST_QUERY_CONVERT,
- 0
- };
-
- return src_query_types;
-}
-
static gboolean
-gst_dvdemux_src_query (GstPad * pad, GstQuery * query)
+gst_dvdemux_src_query (GstPad * pad, GstObject * parent, GstQuery * query)
{
gboolean res = TRUE;
GstDVDemux *dvdemux;
- dvdemux = GST_DVDEMUX (gst_pad_get_parent (pad));
+ dvdemux = GST_DVDEMUX (parent);
switch (GST_QUERY_TYPE (query)) {
case GST_QUERY_POSITION:
/* bring the position to the requested format. */
if (!(res = gst_dvdemux_src_convert (dvdemux, pad,
- GST_FORMAT_TIME, dvdemux->time_segment.last_stop,
+ GST_FORMAT_TIME, dvdemux->time_segment.position,
&format, &cur)))
goto error;
gst_query_set_position (query, format, cur);
break;
}
default:
- res = gst_pad_query_default (pad, query);
+ res = gst_pad_query_default (pad, parent, query);
break;
}
- gst_object_unref (dvdemux);
return res;
/* ERRORS */
error:
{
- gst_object_unref (dvdemux);
GST_DEBUG ("error source query");
return FALSE;
}
}
-static const GstQueryType *
-gst_dvdemux_get_sink_query_types (GstPad * pad)
-{
- static const GstQueryType sink_query_types[] = {
- GST_QUERY_CONVERT,
- 0
- };
-
- return sink_query_types;
-}
-
static gboolean
-gst_dvdemux_sink_query (GstPad * pad, GstQuery * query)
+gst_dvdemux_sink_query (GstPad * pad, GstObject * parent, GstQuery * query)
{
gboolean res = TRUE;
GstDVDemux *dvdemux;
- dvdemux = GST_DVDEMUX (gst_pad_get_parent (pad));
+ dvdemux = GST_DVDEMUX (parent);
switch (GST_QUERY_TYPE (query)) {
case GST_QUERY_CONVERT:
break;
}
default:
- res = gst_pad_query_default (pad, query);
+ res = gst_pad_query_default (pad, parent, query);
break;
}
- gst_object_unref (dvdemux);
return res;
/* ERRORS */
error:
{
- gst_object_unref (dvdemux);
GST_DEBUG ("error handling sink query");
return FALSE;
}
}
static gboolean
-gst_dvdemux_handle_sink_event (GstPad * pad, GstEvent * event)
+gst_dvdemux_handle_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event)
{
- GstDVDemux *dvdemux = GST_DVDEMUX (gst_pad_get_parent (pad));
+ GstDVDemux *dvdemux = GST_DVDEMUX (parent);
gboolean res = TRUE;
switch (GST_EVENT_TYPE (event)) {
gst_segment_init (&dvdemux->time_segment, GST_FORMAT_TIME);
res = gst_dvdemux_push_event (dvdemux, event);
break;
- case GST_EVENT_NEWSEGMENT:
+ case GST_EVENT_SEGMENT:
{
- gboolean update;
- gdouble rate;
- GstFormat format;
- gint64 start, stop, time;
+ const GstSegment *segment;
- /* parse byte start and stop positions */
- gst_event_parse_new_segment (event, &update, &rate, &format,
- &start, &stop, &time);
-
- switch (format) {
+ gst_event_parse_segment (event, &segment);
+ switch (segment->format) {
case GST_FORMAT_BYTES:
- gst_segment_set_newsegment (&dvdemux->byte_segment, update,
- rate, format, start, stop, time);
+ gst_segment_copy_into (segment, &dvdemux->byte_segment);
+#if 0
+ /* FIXME ?? no longer such subtle distinction in 0.11 */
/* the update can always be sent */
if (update) {
GstEvent *update;
update = gst_event_new_new_segment (TRUE,
dvdemux->time_segment.rate, dvdemux->time_segment.format,
- dvdemux->time_segment.start, dvdemux->time_segment.last_stop,
+ dvdemux->time_segment.start, dvdemux->time_segment.position,
dvdemux->time_segment.time);
gst_dvdemux_push_event (dvdemux, update);
* frames, etc.. */
dvdemux->need_segment = TRUE;
}
+#endif
+ dvdemux->need_segment = TRUE;
gst_event_unref (event);
break;
case GST_FORMAT_TIME:
- gst_segment_set_newsegment (&dvdemux->time_segment, update,
- rate, format, start, stop, time);
+ gst_segment_copy_into (segment, &dvdemux->time_segment);
/* and we can just forward this time event */
res = gst_dvdemux_push_event (dvdemux, event);
/* and clear the adapter */
gst_adapter_clear (dvdemux->adapter);
break;
+ case GST_EVENT_CAPS:
+ gst_event_unref (event);
+ break;
default:
res = gst_dvdemux_push_event (dvdemux, event);
break;
}
- gst_object_unref (dvdemux);
-
return res;
}
* timestamp can be found. */
format = GST_FORMAT_BYTES;
res = gst_dvdemux_sink_convert (demux,
- segment->format, segment->last_stop,
- &format, &demux->byte_segment.last_stop);
+ segment->format, segment->position,
+ &format, (gint64 *) & demux->byte_segment.position);
if (!res)
goto done;
/* update byte segment start */
gst_dvdemux_sink_convert (demux,
- segment->format, segment->start, &format, &demux->byte_segment.start);
+ segment->format, segment->start, &format,
+ (gint64 *) & demux->byte_segment.start);
/* update byte segment stop */
gst_dvdemux_sink_convert (demux,
- segment->format, segment->stop, &format, &demux->byte_segment.stop);
+ segment->format, segment->stop, &format,
+ (gint64 *) & demux->byte_segment.stop);
/* update byte segment time */
gst_dvdemux_sink_convert (demux,
- segment->format, segment->time, &format, &demux->byte_segment.time);
+ segment->format, segment->time, &format,
+ (gint64 *) & demux->byte_segment.time);
/* calculate current frame number */
format = GST_FORMAT_DEFAULT;
/* now configure the seek segment */
if (event) {
- gst_segment_set_seek (&seeksegment, rate, format, flags,
+ gst_segment_do_seek (&seeksegment, rate, format, flags,
cur_type, cur, stop_type, stop, &update);
}
GST_DEBUG_OBJECT (demux, "segment configured from %" G_GINT64_FORMAT
" to %" G_GINT64_FORMAT ", position %" G_GINT64_FORMAT,
- seeksegment.start, seeksegment.stop, seeksegment.last_stop);
+ seeksegment.start, seeksegment.stop, seeksegment.position);
- /* do the seek, segment.last_stop contains new position. */
+ /* do the seek, segment.position contains new position. */
res = gst_dvdemux_do_seek (demux, &seeksegment);
/* and prepare to continue streaming */
if (flush) {
/* send flush stop, peer will accept data and events again. We
* are not yet providing data as we still have the STREAM_LOCK. */
- gst_dvdemux_push_event (demux, gst_event_new_flush_stop ());
- } else if (res && demux->running) {
- /* we are running the current segment and doing a non-flushing seek,
- * close the segment first based on the last_stop. */
- GST_DEBUG_OBJECT (demux, "closing running segment %" G_GINT64_FORMAT
- " to %" G_GINT64_FORMAT, demux->time_segment.start,
- demux->time_segment.last_stop);
-
- gst_dvdemux_push_event (demux,
- gst_event_new_new_segment (TRUE,
- demux->time_segment.rate, demux->time_segment.format,
- demux->time_segment.start, demux->time_segment.last_stop,
- demux->time_segment.time));
+ gst_dvdemux_push_event (demux, gst_event_new_flush_stop (TRUE));
}
/* if successfull seek, we update our real segment and push
if (demux->time_segment.flags & GST_SEEK_FLAG_SEGMENT) {
gst_element_post_message (GST_ELEMENT_CAST (demux),
gst_message_new_segment_start (GST_OBJECT_CAST (demux),
- demux->time_segment.format, demux->time_segment.last_stop));
+ demux->time_segment.format, demux->time_segment.position));
}
if ((stop = demux->time_segment.stop) == -1)
stop = demux->time_segment.duration;
if (demux->pending_segment)
gst_event_unref (demux->pending_segment);
- demux->pending_segment = gst_event_new_new_segment (FALSE,
- demux->time_segment.rate, demux->time_segment.format,
- demux->time_segment.last_stop, stop, demux->time_segment.time);
+ demux->pending_segment = gst_event_new_segment (&demux->time_segment);
demux->need_segment = FALSE;
}
- demux->running = TRUE;
/* and restart the task in case it got paused explicitely or by
* the FLUSH_START event we pushed out. */
gst_pad_start_task (demux->sinkpad, (GstTaskFunction) gst_dvdemux_loop,
/* handle an event on the source pad, it's most likely a seek */
static gboolean
-gst_dvdemux_handle_src_event (GstPad * pad, GstEvent * event)
+gst_dvdemux_handle_src_event (GstPad * pad, GstObject * parent,
+ GstEvent * event)
{
gboolean res = TRUE;
GstDVDemux *dvdemux;
- dvdemux = GST_DVDEMUX (gst_pad_get_parent (pad));
+ dvdemux = GST_DVDEMUX (parent);
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_SEEK:
res = FALSE;
break;
case GST_EVENT_NAVIGATION:
- /* no navigation either... */
+ case GST_EVENT_CAPS:
+ /* no navigation or caps either... */
res = FALSE;
break;
default:
if (event)
gst_event_unref (event);
- gst_object_unref (dvdemux);
-
return res;
}
{
gint num_samples;
GstFlowReturn ret;
- const guint8 *data;
+ GstMapInfo map;
- data = GST_BUFFER_DATA (buffer);
-
- dv_decode_full_audio (dvdemux->decoder, data, dvdemux->audio_buffers);
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ dv_decode_full_audio (dvdemux->decoder, map.data, dvdemux->audio_buffers);
+ gst_buffer_unmap (buffer, &map);
if (G_LIKELY ((num_samples = dv_get_num_samples (dvdemux->decoder)) > 0)) {
gint16 *a_ptr;
if (G_UNLIKELY ((frequency != dvdemux->frequency)
|| (channels != dvdemux->channels))) {
GstCaps *caps;
+ GstAudioInfo info;
dvdemux->frequency = frequency;
dvdemux->channels = channels;
- /* and set new caps */
- caps = gst_caps_new_simple ("audio/x-raw-int",
- "rate", G_TYPE_INT, frequency,
- "depth", G_TYPE_INT, 16,
- "width", G_TYPE_INT, 16,
- "signed", G_TYPE_BOOLEAN, TRUE,
- "channels", G_TYPE_INT, channels,
- "endianness", G_TYPE_INT, G_BYTE_ORDER, NULL);
+ gst_audio_info_init (&info);
+ gst_audio_info_set_format (&info, GST_AUDIO_FORMAT_S16LE,
+ frequency, channels, NULL);
+ caps = gst_audio_info_to_caps (&info);
gst_pad_set_caps (dvdemux->audiosrcpad, caps);
gst_caps_unref (caps);
}
outbuf = gst_buffer_new_and_alloc (num_samples *
sizeof (gint16) * dvdemux->channels);
- a_ptr = (gint16 *) GST_BUFFER_DATA (outbuf);
+ gst_buffer_map (outbuf, &map, GST_MAP_WRITE);
+ a_ptr = (gint16 *) map.data;
for (i = 0; i < num_samples; i++) {
for (j = 0; j < dvdemux->channels; j++) {
*(a_ptr++) = dvdemux->audio_buffers[j][i];
}
}
+ gst_buffer_unmap (outbuf, &map);
GST_DEBUG ("pushing audio %" GST_TIME_FORMAT,
- GST_TIME_ARGS (dvdemux->time_segment.last_stop));
+ GST_TIME_ARGS (dvdemux->time_segment.position));
- GST_BUFFER_TIMESTAMP (outbuf) = dvdemux->time_segment.last_stop;
+ GST_BUFFER_TIMESTAMP (outbuf) = dvdemux->time_segment.position;
GST_BUFFER_DURATION (outbuf) = duration;
GST_BUFFER_OFFSET (outbuf) = dvdemux->audio_offset;
dvdemux->audio_offset += num_samples;
if (dvdemux->new_media)
GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
- gst_buffer_set_caps (outbuf, GST_PAD_CAPS (dvdemux->audiosrcpad));
ret = gst_pad_push (dvdemux->audiosrcpad, outbuf);
} else {
/* takes ownership of buffer here, we just need to modify
* the metadata. */
- outbuf = gst_buffer_make_metadata_writable (buffer);
+ outbuf = gst_buffer_make_writable (buffer);
- GST_BUFFER_TIMESTAMP (outbuf) = dvdemux->time_segment.last_stop;
+ GST_BUFFER_TIMESTAMP (outbuf) = dvdemux->time_segment.position;
GST_BUFFER_OFFSET (outbuf) = dvdemux->video_offset;
GST_BUFFER_OFFSET_END (outbuf) = dvdemux->video_offset + 1;
GST_BUFFER_DURATION (outbuf) = duration;
if (dvdemux->new_media)
GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
- gst_buffer_set_caps (outbuf, GST_PAD_CAPS (dvdemux->videosrcpad));
GST_DEBUG ("pushing video %" GST_TIME_FORMAT,
- GST_TIME_ARGS (dvdemux->time_segment.last_stop));
+ GST_TIME_ARGS (dvdemux->time_segment.position));
ret = gst_pad_push (dvdemux->videosrcpad, outbuf);
gst_dvdemux_get_timecode (GstDVDemux * dvdemux, GstBuffer * buffer,
GstSMPTETimeCode * timecode)
{
- guint8 *data = GST_BUFFER_DATA (buffer);
+ guint8 *data;
+ GstMapInfo map;
int offset;
int dif;
int n_difs = dvdemux->decoder->num_dif_seqs;
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ data = map.data;
for (dif = 0; dif < n_difs; dif++) {
offset = get_ssyb_offset (dif, 3);
if (data[offset + 3] == 0x13) {
(data[offset + 7] & 0xf);
GST_DEBUG ("got timecode %" GST_SMPTE_TIME_CODE_FORMAT,
GST_SMPTE_TIME_CODE_ARGS (timecode));
+ gst_buffer_unmap (buffer, &map);
return TRUE;
}
}
+ gst_buffer_unmap (buffer, &map);
return FALSE;
}
static gboolean
gst_dvdemux_is_new_media (GstDVDemux * dvdemux, GstBuffer * buffer)
{
- guint8 *data = GST_BUFFER_DATA (buffer);
+ guint8 *data;
+ GstMapInfo map;
int aaux_offset;
int dif;
int n_difs;
n_difs = dvdemux->decoder->num_dif_seqs;
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ data = map.data;
for (dif = 0; dif < n_difs; dif++) {
if (dif & 1) {
aaux_offset = (dif * 12000) + (6 + 16 * 1) * 80 + 3;
aaux_offset = (dif * 12000) + (6 + 16 * 4) * 80 + 3;
}
if (data[aaux_offset + 0] == 0x51) {
- if ((data[aaux_offset + 2] & 0x80) == 0)
+ if ((data[aaux_offset + 2] & 0x80) == 0) {
+ gst_buffer_unmap (buffer, &map);
return TRUE;
+ }
}
}
+ gst_buffer_unmap (buffer, &map);
return FALSE;
}
{
GstClockTime next_ts;
GstFlowReturn aret, vret, ret;
- guint8 *data;
+ GstMapInfo map;
guint64 duration;
GstSMPTETimeCode timecode;
int frame_number;
if (G_UNLIKELY (dvdemux->need_segment)) {
- GstEvent *event;
GstFormat format;
/* convert to time and store as start/end_timestamp */
if (!(gst_dvdemux_convert_sink_pair (dvdemux,
GST_FORMAT_BYTES, dvdemux->byte_segment.start,
dvdemux->byte_segment.stop, format,
- &dvdemux->time_segment.start, &dvdemux->time_segment.stop)))
+ (gint64 *) & dvdemux->time_segment.start,
+ (gint64 *) & dvdemux->time_segment.stop)))
goto segment_error;
dvdemux->time_segment.rate = dvdemux->byte_segment.rate;
- dvdemux->time_segment.abs_rate = dvdemux->byte_segment.abs_rate;
- dvdemux->time_segment.last_stop = dvdemux->time_segment.start;
+ dvdemux->time_segment.position = dvdemux->time_segment.start;
/* calculate current frame number */
format = GST_FORMAT_DEFAULT;
GST_TIME_ARGS (dvdemux->time_segment.stop),
GST_TIME_ARGS (dvdemux->time_segment.start));
- event = gst_event_new_new_segment (FALSE, dvdemux->byte_segment.rate,
- GST_FORMAT_TIME, dvdemux->time_segment.start,
- dvdemux->time_segment.stop, dvdemux->time_segment.start);
- gst_dvdemux_push_event (dvdemux, event);
+ gst_dvdemux_push_event (dvdemux,
+ gst_event_new_segment (&dvdemux->time_segment));
dvdemux->need_segment = FALSE;
}
next_ts = gst_util_uint64_scale_int (
(dvdemux->frame_offset + 1) * GST_SECOND,
dvdemux->framerate_denominator, dvdemux->framerate_numerator);
- duration = next_ts - dvdemux->time_segment.last_stop;
-
- data = GST_BUFFER_DATA (buffer);
+ duration = next_ts - dvdemux->time_segment.position;
- dv_parse_packs (dvdemux->decoder, data);
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ dv_parse_packs (dvdemux->decoder, map.data);
+ gst_buffer_unmap (buffer, &map);
dvdemux->new_media = FALSE;
if (gst_dvdemux_is_new_media (dvdemux, buffer) &&
dvdemux->frames_since_new_media > 2) {
goto done;
}
- gst_segment_set_last_stop (&dvdemux->time_segment, GST_FORMAT_TIME, next_ts);
+ dvdemux->time_segment.position = next_ts;
dvdemux->frame_offset++;
/* check for the end of the segment */
if (dvdemux->time_segment.stop != -1 && next_ts > dvdemux->time_segment.stop)
- ret = GST_FLOW_UNEXPECTED;
+ ret = GST_FLOW_EOS;
else
ret = GST_FLOW_OK;
gint length;
/* get the accumulated bytes */
- data = gst_adapter_peek (dvdemux->adapter, dvdemux->frame_len);
+ data = gst_adapter_map (dvdemux->adapter, dvdemux->frame_len);
/* parse header to know the length and other params */
- if (G_UNLIKELY (dv_parse_header (dvdemux->decoder, data) < 0))
+ if (G_UNLIKELY (dv_parse_header (dvdemux->decoder, data) < 0)) {
+ gst_adapter_unmap (dvdemux->adapter);
goto parse_header_error;
+ }
+ gst_adapter_unmap (dvdemux->adapter);
/* after parsing the header we know the length of the data */
length = dvdemux->frame_len = dvdemux->decoder->frame_size;
if (G_LIKELY (gst_adapter_available (dvdemux->adapter) >= length)) {
GstBuffer *buffer;
- data = gst_adapter_take (dvdemux->adapter, length);
-
- /* create buffer for the remainder of the code */
- buffer = gst_buffer_new ();
- GST_BUFFER_DATA (buffer) = (guint8 *) data;
- GST_BUFFER_SIZE (buffer) = length;
- GST_BUFFER_MALLOCDATA (buffer) = (guint8 *) data;
+ buffer = gst_adapter_take_buffer (dvdemux->adapter, length);
/* and decode the buffer, takes ownership */
ret = gst_dvdemux_demux_frame (dvdemux, buffer);
* accumulate data until we have a frame, then decode.
*/
static GstFlowReturn
-gst_dvdemux_chain (GstPad * pad, GstBuffer * buffer)
+gst_dvdemux_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer)
{
GstDVDemux *dvdemux;
GstFlowReturn ret;
GstClockTime timestamp;
- dvdemux = GST_DVDEMUX (gst_pad_get_parent (pad));
+ dvdemux = GST_DVDEMUX (parent);
/* a discontinuity in the stream, we need to get rid of
* accumulated data in the adapter and assume a new frame
/* a timestamp always should be respected */
timestamp = GST_BUFFER_TIMESTAMP (buffer);
if (GST_CLOCK_TIME_IS_VALID (timestamp)) {
- gst_segment_set_last_stop (&dvdemux->time_segment, GST_FORMAT_TIME,
- timestamp);
+ dvdemux->time_segment.position = timestamp;
/* FIXME, adjust frame_offset and other counters */
}
/* and try to flush pending frames */
ret = gst_dvdemux_flush (dvdemux);
- gst_object_unref (dvdemux);
-
return ret;
}
GstFlowReturn ret;
GstDVDemux *dvdemux;
GstBuffer *buffer = NULL;
- const guint8 *data;
+ GstMapInfo map;
dvdemux = GST_DVDEMUX (gst_pad_get_parent (pad));
/* pull in NTSC sized buffer to figure out the frame
* length */
ret = gst_pad_pull_range (dvdemux->sinkpad,
- dvdemux->byte_segment.last_stop, NTSC_BUFFER, &buffer);
+ dvdemux->byte_segment.position, NTSC_BUFFER, &buffer);
if (G_UNLIKELY (ret != GST_FLOW_OK))
goto pause;
/* check buffer size, don't want to read small buffers */
- if (G_UNLIKELY (GST_BUFFER_SIZE (buffer) < NTSC_BUFFER))
+ if (G_UNLIKELY (gst_buffer_get_size (buffer) < NTSC_BUFFER))
goto small_buffer;
- data = GST_BUFFER_DATA (buffer);
-
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
/* parse header to know the length and other params */
- if (G_UNLIKELY (dv_parse_header (dvdemux->decoder, data) < 0))
+ if (G_UNLIKELY (dv_parse_header (dvdemux->decoder, map.data) < 0)) {
+ gst_buffer_unmap (buffer, &map);
goto parse_header_error;
+ }
+ gst_buffer_unmap (buffer, &map);
/* after parsing the header we know the length of the data */
dvdemux->frame_len = dvdemux->decoder->frame_size;
if (G_LIKELY (buffer == NULL)) {
GST_DEBUG_OBJECT (dvdemux, "pulling buffer at offset %" G_GINT64_FORMAT,
- dvdemux->byte_segment.last_stop);
+ dvdemux->byte_segment.position);
ret = gst_pad_pull_range (dvdemux->sinkpad,
- dvdemux->byte_segment.last_stop, dvdemux->frame_len, &buffer);
+ dvdemux->byte_segment.position, dvdemux->frame_len, &buffer);
if (ret != GST_FLOW_OK)
goto pause;
/* check buffer size, don't want to read small buffers */
- if (GST_BUFFER_SIZE (buffer) < dvdemux->frame_len)
+ if (gst_buffer_get_size (buffer) < dvdemux->frame_len)
goto small_buffer;
}
/* and decode the buffer */
goto pause;
/* and position ourselves for the next buffer */
- dvdemux->byte_segment.last_stop += dvdemux->frame_len;
+ dvdemux->byte_segment.position += dvdemux->frame_len;
done:
gst_object_unref (dvdemux);
GST_ELEMENT_ERROR (dvdemux, STREAM, DECODE,
(NULL), ("Error parsing DV header"));
gst_buffer_unref (buffer);
- dvdemux->running = FALSE;
gst_pad_pause_task (dvdemux->sinkpad);
gst_dvdemux_push_event (dvdemux, gst_event_new_eos ());
goto done;
GST_ELEMENT_ERROR (dvdemux, STREAM, DECODE,
(NULL), ("Error reading buffer"));
gst_buffer_unref (buffer);
- dvdemux->running = FALSE;
gst_pad_pause_task (dvdemux->sinkpad);
gst_dvdemux_push_event (dvdemux, gst_event_new_eos ());
goto done;
pause:
{
GST_INFO_OBJECT (dvdemux, "pausing task, %s", gst_flow_get_name (ret));
- dvdemux->running = FALSE;
gst_pad_pause_task (dvdemux->sinkpad);
- if (ret == GST_FLOW_UNEXPECTED) {
+ if (ret == GST_FLOW_EOS) {
GST_LOG_OBJECT (dvdemux, "got eos");
+ /* so align our position with the end of it, if there is one
+ * this ensures a subsequent will arrive at correct base/acc time */
+ if (dvdemux->time_segment.rate > 0.0 &&
+ GST_CLOCK_TIME_IS_VALID (dvdemux->time_segment.stop))
+ dvdemux->time_segment.position = dvdemux->time_segment.stop;
+ else if (dvdemux->time_segment.rate < 0.0)
+ dvdemux->time_segment.position = dvdemux->time_segment.start;
/* perform EOS logic */
if (dvdemux->time_segment.flags & GST_SEEK_FLAG_SEGMENT) {
gst_element_post_message (GST_ELEMENT (dvdemux),
gst_message_new_segment_done (GST_OBJECT_CAST (dvdemux),
- dvdemux->time_segment.format, dvdemux->time_segment.last_stop));
+ dvdemux->time_segment.format, dvdemux->time_segment.position));
} else {
gst_dvdemux_push_event (dvdemux, gst_event_new_eos ());
}
- } else if (ret == GST_FLOW_NOT_LINKED || ret < GST_FLOW_UNEXPECTED) {
+ } else if (ret == GST_FLOW_NOT_LINKED || ret < GST_FLOW_EOS) {
/* for fatal errors or not-linked we post an error message */
GST_ELEMENT_ERROR (dvdemux, STREAM, FAILED,
(NULL), ("streaming stopped, reason %s", gst_flow_get_name (ret)));
}
static gboolean
-gst_dvdemux_sink_activate_push (GstPad * sinkpad, gboolean active)
+gst_dvdemux_sink_activate_mode (GstPad * sinkpad, GstObject * parent,
+ GstPadMode mode, gboolean active)
{
- GstDVDemux *demux = GST_DVDEMUX (gst_pad_get_parent (sinkpad));
-
- if (active) {
- demux->seek_handler = gst_dvdemux_handle_push_seek;
- } else {
- demux->seek_handler = NULL;
+ gboolean res;
+ GstDVDemux *demux = GST_DVDEMUX (parent);
+
+ switch (mode) {
+ case GST_PAD_MODE_PULL:
+ if (active) {
+ demux->seek_handler = gst_dvdemux_handle_pull_seek;
+ res = gst_pad_start_task (sinkpad,
+ (GstTaskFunction) gst_dvdemux_loop, sinkpad);
+ } else {
+ demux->seek_handler = NULL;
+ res = gst_pad_stop_task (sinkpad);
+ }
+ break;
+ case GST_PAD_MODE_PUSH:
+ if (active) {
+ GST_DEBUG_OBJECT (demux, "activating push/chain function");
+ demux->seek_handler = gst_dvdemux_handle_push_seek;
+ } else {
+ GST_DEBUG_OBJECT (demux, "deactivating push/chain function");
+ demux->seek_handler = NULL;
+ }
+ res = TRUE;
+ break;
+ default:
+ res = FALSE;
+ break;
}
- gst_object_unref (demux);
-
- return TRUE;
+ return res;
}
+/* decide on push or pull based scheduling */
static gboolean
-gst_dvdemux_sink_activate_pull (GstPad * sinkpad, gboolean active)
+gst_dvdemux_sink_activate (GstPad * sinkpad, GstObject * parent)
{
- GstDVDemux *demux = GST_DVDEMUX (gst_pad_get_parent (sinkpad));
+ GstQuery *query;
+ gboolean pull_mode;
- if (active) {
- demux->running = TRUE;
- demux->seek_handler = gst_dvdemux_handle_pull_seek;
- gst_pad_start_task (sinkpad, (GstTaskFunction) gst_dvdemux_loop, sinkpad);
- } else {
- demux->seek_handler = NULL;
- gst_pad_stop_task (sinkpad);
- demux->running = FALSE;
- }
+ query = gst_query_new_scheduling ();
- gst_object_unref (demux);
+ if (!gst_pad_peer_query (sinkpad, query)) {
+ gst_query_unref (query);
+ goto activate_push;
+ }
- return TRUE;
-};
+ pull_mode = gst_query_has_scheduling_mode (query, GST_PAD_MODE_PULL);
+ gst_query_unref (query);
-/* decide on push or pull based scheduling */
-static gboolean
-gst_dvdemux_sink_activate (GstPad * sinkpad)
-{
- gboolean ret;
+ if (!pull_mode)
+ goto activate_push;
- if (gst_pad_check_pull_range (sinkpad))
- ret = gst_pad_activate_pull (sinkpad, TRUE);
- else
- ret = gst_pad_activate_push (sinkpad, TRUE);
+ GST_DEBUG_OBJECT (sinkpad, "activating pull");
+ return gst_pad_activate_mode (sinkpad, GST_PAD_MODE_PULL, TRUE);
- return ret;
-};
+activate_push:
+ {
+ GST_DEBUG_OBJECT (sinkpad, "activating push");
+ return gst_pad_activate_mode (sinkpad, GST_PAD_MODE_PUSH, TRUE);
+ }
+}
static GstStateChangeReturn
gst_dvdemux_change_state (GstElement * element, GstStateChange transition)
break;
}
- ret = parent_class->change_state (element, transition);
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
switch (transition) {
case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
GstDVDemuxSeekHandler seek_handler;
GstSegment byte_segment;
GstSegment time_segment;
- gboolean running;
gboolean need_segment;
gboolean new_media;
int frames_since_new_media;
+++ /dev/null
-plugin_LTLIBRARIES = libgstesd.la
-
-libgstesd_la_SOURCES = esdsink.c gstesd.c
-libgstesd_la_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_BASE_CFLAGS) $(GST_CFLAGS) $(ESD_CFLAGS)
-libgstesd_la_LIBADD = \
- $(GST_PLUGINS_BASE_LIBS) -lgstaudio-$(GST_MAJORMINOR) \
- $(GST_BASE_LIBS) \
- $(GST_LIBS) \
- $(ESD_LIBS)
-libgstesd_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
-libgstesd_la_LIBTOOLFLAGS = --tag=disable-static
-
-noinst_HEADERS = esdsink.h esdmon.h
-EXTRA_DIST =
+++ /dev/null
-/* GStreamer
- * Copyright (C) <2001,2002> Richard Boulton <richard-gst@tartarus.org>
- *
- * Based on example.c:
- * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
- *
- * This library is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Library General Public
- * License as published by the Free Software Foundation; either
- * version 2 of the License, or (at your option) any later version.
- *
- * This library is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Library General Public License for more details.
- *
- * You should have received a copy of the GNU Library General Public
- * License along with this library; if not, write to the
- * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
- * Boston, MA 02111-1307, USA.
- */
-/**
- * SECTION:element-esdmon
- * @see_also: #GstAlsaSrc, #GstAutoAudioSrc
- *
- * This element records sound from an already-running Enlightened Sound Daemon
- * (ESound Daemon, esd). Note that a sound daemon will never be auto-spawned
- * through this element (regardless of the system configuration), since this
- * is actively prevented by the element. If you must use esd, you need to
- * make sure it is started automatically with your session or otherwise.
- *
- * TODO: insert some comments about how sucky esd is and that all the cool
- * kids use pulseaudio or whatever these days.
- *
- * <refsect2>
- * <title>Example launch line</title>
- * |[
- * gst-launch esdmon ! audioconvert ! waveenc ! filesink location=record.wav
- * ]| Record from audioinput
- * </refsect2>
- */
-
-#ifdef HAVE_CONFIG_H
-#include "config.h"
-#endif
-#include "esdmon.h"
-#include <esd.h>
-#include <unistd.h>
-
-/* Signals and args */
-enum
-{
- /* FILL ME */
- LAST_SIGNAL
-};
-
-enum
-{
- ARG_0,
- ARG_DEPTH,
- ARG_BYTESPERREAD,
- ARG_CUROFFSET,
- ARG_CHANNELS,
- ARG_RATE,
- ARG_HOST
-};
-
-static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src",
- GST_PAD_SRC,
- GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("audio/x-raw-int, "
- "endianness = (int) " G_STRINGIFY (G_BYTE_ORDER) ", "
- "signed = (boolean) TRUE, "
- "width = (int) 16, "
- "depth = (int) 16, "
- "rate = [ 8000, 96000 ], "
- "channels = [ 1, 2 ]; "
- "audio/x-raw-int, "
- "signed = (boolean) FALSE, "
- "width = (int) 8, "
- "depth = (int) 8, " "rate = [ 8000, 96000 ], " "channels = [ 1, 2 ]")
- );
-
-static void gst_esdmon_base_init (gpointer g_class);
-static void gst_esdmon_class_init (gpointer g_class, gpointer class_data);
-static void gst_esdmon_init (GTypeInstance * instance, gpointer g_class);
-
-static gboolean gst_esdmon_open_audio (GstEsdmon * src);
-static void gst_esdmon_close_audio (GstEsdmon * src);
-static GstStateChangeReturn gst_esdmon_change_state (GstElement * element,
- GstStateChange transition);
-static gboolean gst_esdmon_sync_parms (GstEsdmon * esdmon);
-
-static GstData *gst_esdmon_get (GstPad * pad);
-
-static void gst_esdmon_set_property (GObject * object, guint prop_id,
- const GValue * value, GParamSpec * pspec);
-static void gst_esdmon_get_property (GObject * object, guint prop_id,
- GValue * value, GParamSpec * pspec);
-
-#define GST_TYPE_ESDMON_DEPTHS (gst_esdmon_depths_get_type())
-static GType
-gst_esdmon_depths_get_type (void)
-{
- static GType esdmon_depths_type = 0;
- static const GEnumValue esdmon_depths[] = {
- {8, "8 Bits", "8"},
- {16, "16 Bits", "16"},
- {0, NULL, NULL},
- };
-
- if (!esdmon_depths_type) {
- esdmon_depths_type =
- g_enum_register_static ("GstEsdmonDepths", esdmon_depths);
- }
- return esdmon_depths_type;
-}
-
-#define GST_TYPE_ESDMON_CHANNELS (gst_esdmon_channels_get_type())
-static GType
-gst_esdmon_channels_get_type (void)
-{
- static GType esdmon_channels_type = 0;
- static const GEnumValue esdmon_channels[] = {
- {1, "Mono", "mono"},
- {2, "Stereo", "stereo"},
- {0, NULL, NULL},
- };
-
- if (!esdmon_channels_type) {
- esdmon_channels_type =
- g_enum_register_static ("GstEsdmonChannels", esdmon_channels);
- }
- return esdmon_channels_type;
-}
-
-
-static GstElementClass *parent_class = NULL;
-
-/*static guint gst_esdmon_signals[LAST_SIGNAL] = { 0 }; */
-
-GType
-gst_esdmon_get_type (void)
-{
- static GType esdmon_type = 0;
-
- if (!esdmon_type) {
- static const GTypeInfo esdmon_info = {
- sizeof (GstEsdmonClass),
- gst_esdmon_base_init,
- NULL,
- gst_esdmon_class_init,
- NULL,
- NULL,
- sizeof (GstEsdmon),
- 0,
- gst_esdmon_init,
- };
-
- esdmon_type =
- g_type_register_static (GST_TYPE_ELEMENT, "GstEsdmon", &esdmon_info, 0);
- }
- return esdmon_type;
-}
-
-static void
-gst_esdmon_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_static_pad_template (element_class, &src_factory);
- gst_element_class_set_details_simple (element_class, "Esound audio monitor",
- "Source/Audio",
- "Monitors audio from an esound server",
- "Richard Boulton <richard-gst@tartarus.org>");
-}
-
-static void
-gst_esdmon_class_init (gpointer g_class, gpointer class_data)
-{
- GObjectClass *gobject_class = G_OBJECT_CLASS (g_class);
- GstElementClass *gstelement_class = GST_ELEMENT_CLASS (g_class);
-
- parent_class = g_type_class_peek_parent (g_class);
-
- /* FIXME: add long property descriptions */
- g_object_class_install_property (gobject_class, ARG_BYTESPERREAD,
- g_param_spec_ulong ("bytes-per-read", "bytes per read", "bytes per read",
- 0, G_MAXULONG, 0, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
- g_object_class_install_property (gobject_class, ARG_CUROFFSET,
- g_param_spec_ulong ("curoffset", "curoffset", "curoffset",
- 0, G_MAXULONG, 0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
- g_object_class_install_property (gobject_class, ARG_DEPTH,
- g_param_spec_enum ("depth", "depth", "depth", GST_TYPE_ESDMON_DEPTHS,
- 16, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
- g_object_class_install_property (gobject_class, ARG_CHANNELS,
- g_param_spec_enum ("channels", "channels", "channels",
- GST_TYPE_ESDMON_CHANNELS, 2,
- G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
- g_object_class_install_property (gobject_class, ARG_RATE,
- g_param_spec_int ("frequency", "frequency", "frequency",
- G_MININT, G_MAXINT, 0, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
- g_object_class_install_property (gobject_class, ARG_HOST,
- g_param_spec_string ("host", "host", "host", NULL,
- G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
-
- gobject_class->set_property = gst_esdmon_set_property;
- gobject_class->get_property = gst_esdmon_get_property;
-
- gstelement_class->change_state = gst_esdmon_change_state;
-}
-
-static void
-gst_esdmon_init (GTypeInstance * instance, gpointer g_class)
-{
- GstEsdmon *esdmon = GST_ESDMON (instance);
-
- esdmon->srcpad =
- gst_pad_new_from_template (gst_element_class_get_pad_template
- (GST_ELEMENT_GET_CLASS (esdmon), "src"), "src");
- gst_pad_set_get_function (esdmon->srcpad, gst_esdmon_get);
- gst_pad_use_explicit_caps (esdmon->srcpad);
- gst_element_add_pad (GST_ELEMENT (esdmon), esdmon->srcpad);
-
- esdmon->fd = -1;
- /* FIXME: get default from somewhere better than just putting them inline. */
- esdmon->depth = 16;
- esdmon->channels = 2;
- esdmon->frequency = 44100;
- esdmon->host = NULL;
- esdmon->bytes_per_read = 4096;
- esdmon->curoffset = 0;
- esdmon->basetime = 0;
- esdmon->samples_since_basetime = 0;
-}
-
-static gboolean
-gst_esdmon_sync_parms (GstEsdmon * esdmon)
-{
- g_return_val_if_fail (esdmon != NULL, FALSE);
- g_return_val_if_fail (GST_IS_ESDMON (esdmon), FALSE);
-
- if (esdmon->fd == -1)
- return TRUE;
-
- /* Need to set fd to use new parameters: only way to do this is to reopen. */
- gst_esdmon_close_audio (esdmon);
- return gst_esdmon_open_audio (esdmon);
-}
-
-static GstData *
-gst_esdmon_get (GstPad * pad)
-{
- GstEsdmon *esdmon;
- GstBuffer *buf;
- glong readbytes;
- glong readsamples;
-
- g_return_val_if_fail (pad != NULL, NULL);
- esdmon = GST_ESDMON (gst_pad_get_parent (pad));
-
- GST_DEBUG ("attempting to read something from esdmon");
-
- buf = gst_buffer_new ();
- g_return_val_if_fail (buf, NULL);
-
- GST_BUFFER_DATA (buf) = (gpointer) g_malloc (esdmon->bytes_per_read);
-
- readbytes = read (esdmon->fd, GST_BUFFER_DATA (buf), esdmon->bytes_per_read);
-
- if (readbytes == 0) {
- gst_element_set_eos (GST_ELEMENT (esdmon));
- return NULL;
- }
- if (!GST_PAD_CAPS (pad)) {
- GstCaps *caps = gst_caps_new_simple ("audio/x-raw-int",
- "endianness", G_TYPE_INT, G_BYTE_ORDER,
- "signed", G_TYPE_BOOLEAN, esdmon->depth == 8 ? FALSE : TRUE,
- "width", G_TYPE_INT, esdmon->depth,
- "depth", G_TYPE_INT, esdmon->depth,
- "rate", G_TYPE_INT, esdmon->frequency,
- "channels", G_TYPE_INT, esdmon->channels,
- NULL);
-
- /* set caps on src pad */
- if (gst_pad_set_explicit_caps (esdmon->srcpad, caps) <= 0) {
- GST_ELEMENT_ERROR (esdmon, CORE, NEGOTIATION, (NULL), (NULL));
- gst_caps_free (caps);
- return NULL;
- }
- gst_caps_free (caps);
- }
-
- GST_BUFFER_SIZE (buf) = readbytes;
- GST_BUFFER_OFFSET (buf) = esdmon->curoffset;
- GST_BUFFER_TIMESTAMP (buf) = esdmon->basetime +
- esdmon->samples_since_basetime * GST_SECOND / esdmon->frequency;
-
- esdmon->curoffset += readbytes;
- readsamples = readbytes / esdmon->channels;
- if (esdmon->depth == 16)
- readsamples /= 2;
- esdmon->samples_since_basetime += readsamples;
-
- GST_DEBUG ("pushed buffer from esdmon of %ld bytes, timestamp %"
- G_GINT64_FORMAT, readbytes, GST_BUFFER_TIMESTAMP (buf));
- gst_object_unref (esdmon);
- return GST_DATA (buf);
-}
-
-static void
-gst_esdmon_set_property (GObject * object, guint prop_id, const GValue * value,
- GParamSpec * pspec)
-{
- GstEsdmon *esdmon;
-
- g_return_if_fail (GST_IS_ESDMON (object));
- esdmon = GST_ESDMON (object);
-
- switch (prop_id) {
- case ARG_BYTESPERREAD:
- esdmon->bytes_per_read = g_value_get_ulong (value);
- /* No need to sync params - will just happen on next read. */
- break;
- case ARG_DEPTH:
- esdmon->depth = g_value_get_enum (value);
- gst_esdmon_sync_parms (esdmon);
- break;
- case ARG_CHANNELS:
- esdmon->channels = g_value_get_enum (value);
- gst_esdmon_sync_parms (esdmon);
- break;
- case ARG_RATE:
- /* Preserve the timestamps */
- esdmon->basetime =
- esdmon->samples_since_basetime * GST_SECOND / esdmon->frequency;
- esdmon->samples_since_basetime = 0;
-
- /* Set the new frequency */
- esdmon->frequency = g_value_get_int (value);
- gst_esdmon_sync_parms (esdmon);
- break;
- case ARG_HOST:
- if (esdmon->host != NULL)
- g_free (esdmon->host);
- if (g_value_get_string (value) == NULL)
- esdmon->host = NULL;
- else
- esdmon->host = g_strdup (g_value_get_string (value));
- break;
- default:
- break;
- }
-}
-
-static void
-gst_esdmon_get_property (GObject * object, guint prop_id, GValue * value,
- GParamSpec * pspec)
-{
- GstEsdmon *esdmon;
-
- g_return_if_fail (GST_IS_ESDMON (object));
- esdmon = GST_ESDMON (object);
-
- switch (prop_id) {
- case ARG_BYTESPERREAD:
- g_value_set_ulong (value, esdmon->bytes_per_read);
- break;
- case ARG_CUROFFSET:
- g_value_set_ulong (value, esdmon->curoffset);
- break;
- case ARG_DEPTH:
- g_value_set_enum (value, esdmon->depth);
- break;
- case ARG_CHANNELS:
- g_value_set_enum (value, esdmon->channels);
- break;
- case ARG_RATE:
- g_value_set_int (value, esdmon->frequency);
- break;
- case ARG_HOST:
- g_value_set_string (value, esdmon->host);
- break;
- default:
- G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
- break;
- }
-}
-
-static gboolean
-gst_esdmon_open_audio (GstEsdmon * src)
-{
- /* Name used by esound for this connection. */
- const char connname[] = "GStreamer";
-
- /* Bitmap describing audio format. */
- esd_format_t esdformat = ESD_STREAM | ESD_PLAY;
-
- g_return_val_if_fail (src->fd == -1, FALSE);
-
- if (src->depth == 16)
- esdformat |= ESD_BITS16;
- else if (src->depth == 8)
- esdformat |= ESD_BITS8;
- else {
- GST_DEBUG ("esdmon: invalid bit depth (%d)", src->depth);
- return FALSE;
- }
-
- if (src->channels == 2)
- esdformat |= ESD_STEREO;
- else if (src->channels == 1)
- esdformat |= ESD_MONO;
- else {
- GST_DEBUG ("esdmon: invalid number of channels (%d)", src->channels);
- return FALSE;
- }
-
- GST_DEBUG ("esdmon: attempting to open connection to esound server");
- src->fd = esd_monitor_stream (esdformat, src->frequency, src->host, connname);
- if (src->fd < 0) {
- GST_DEBUG ("esdmon: can't open connection to esound server");
- return FALSE;
- }
-
- GST_OBJECT_FLAG_SET (src, GST_ESDMON_OPEN);
-
- return TRUE;
-}
-
-static void
-gst_esdmon_close_audio (GstEsdmon * src)
-{
- if (src->fd < 0)
- return;
-
- close (src->fd);
- src->fd = -1;
-
- GST_OBJECT_FLAG_UNSET (src, GST_ESDMON_OPEN);
-
- GST_DEBUG ("esdmon: closed sound device");
-}
-
-static GstStateChangeReturn
-gst_esdmon_change_state (GstElement * element, GstStateChange transition)
-{
- g_return_val_if_fail (GST_IS_ESDMON (element), FALSE);
-
- /* if going down into NULL state, close the fd if it's open */
- if (GST_STATE_PENDING (element) == GST_STATE_NULL) {
- if (GST_OBJECT_FLAG_IS_SET (element, GST_ESDMON_OPEN))
- gst_esdmon_close_audio (GST_ESDMON (element));
- /* otherwise (READY or higher) we need to open the fd */
- } else {
- if (!GST_OBJECT_FLAG_IS_SET (element, GST_ESDMON_OPEN)) {
- if (!gst_esdmon_open_audio (GST_ESDMON (element)))
- return GST_STATE_CHANGE_FAILURE;
- }
- }
-
- if (GST_ELEMENT_CLASS (parent_class)->change_state)
- return GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
- return GST_STATE_CHANGE_SUCCESS;
-}
+++ /dev/null
-/* GStreamer
- * Copyright (C) <2001,2002> Richard Boulton <richard-gst@tartarus.org>
- *
- * Based on example.c:
- * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
- *
- * This library is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Library General Public
- * License as published by the Free Software Foundation; either
- * version 2 of the License, or (at your option) any later version.
- *
- * This library is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Library General Public License for more details.
- *
- * You should have received a copy of the GNU Library General Public
- * License along with this library; if not, write to the
- * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
- * Boston, MA 02111-1307, USA.
- */
-
-#ifndef __GST_ESDMON_H__
-#define __GST_ESDMON_H__
-
-#include <gst/gst.h>
-
-G_BEGIN_DECLS
-
-#define GST_TYPE_ESDMON \
- (gst_esdmon_get_type())
-#define GST_ESDMON(obj) \
- (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_ESDMON,GstEsdmon))
-#define GST_ESDMON_CLASS(klass) \
- (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_ESDMON,GstEsdmonClass))
-#define GST_IS_ESDMON(obj) \
- (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_ESDMON))
-#define GST_IS_ESDMON_CLASS(klass) \
- (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_ESDMON))
-
-typedef enum {
- GST_ESDMON_OPEN = (GST_ELEMENT_FLAG_LAST << 0),
- GST_ESDMON_FLAG_LAST = (GST_ELEMENT_FLAG_LAST << 2)
-} GstEsdSrcFlags;
-
-typedef struct _GstEsdmon GstEsdmon;
-typedef struct _GstEsdmonClass GstEsdmonClass;
-
-struct _GstEsdmon {
- GstElement element;
-
- GstPad *srcpad;
-
- gchar* host;
-
- int fd;
-
- gint depth;
- gint channels;
- gint frequency;
-
- guint64 basetime;
- guint64 samples_since_basetime;
- guint64 curoffset;
- guint64 bytes_per_read;
-};
-
-struct _GstEsdmonClass {
- GstElementClass parent_class;
-};
-
-GType gst_esdmon_get_type (void);
-
-G_END_DECLS
-
-#endif /* __GST_ESDMON_H__ */
-
+++ /dev/null
-/* GStreamer
- * Copyright (C) <2005> Arwed v. Merkatz <v.merkatz@gmx.net>
- *
- * Roughly based on the gstreamer 0.8 esdsink plugin:
- * Copyright (C) <2001> Richard Boulton <richard-gst@tartarus.org>
- *
- * esdsink.c: an EsounD audio sink
- *
- * This library is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Library General Public
- * License as published by the Free Software Foundation; either
- * version 2 of the License, or (at your option) any later version.
- *
- * This library is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Library General Public License for more details.
- *
- * You should have received a copy of the GNU Library General Public
- * License along with this library; if not, write to the
- * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
- * Boston, MA 02111-1307, USA.
- */
-
-/**
- * SECTION:element-esdsink
- * @see_also: #GstAlsaSink, #GstAutoAudioSink
- *
- * This element outputs sound to an already-running Enlightened Sound Daemon
- * (ESound Daemon, esd). Note that a sound daemon will never be auto-spawned
- * through this element (regardless of the system configuration), since this
- * is actively prevented by the element. If you must use esd, you need to
- * make sure it is started automatically with your session or otherwise.
- *
- * TODO: insert some comments about how sucky esd is and that all the cool
- * kids use pulseaudio or whatever these days.
- *
- * <refsect2>
- * <title>Example launch line</title>
- * |[
- * gst-launch -v filesrc location=foo.ogg ! decodebin ! audioconvert ! audioresample ! esdsink
- * ]| play an Ogg/Vorbis audio file via esd
- * </refsect2>
- */
-
-#ifdef HAVE_CONFIG_H
-#include "config.h"
-#endif
-
-#include "esdsink.h"
-#include <esd.h>
-#include <unistd.h>
-#include <errno.h>
-
-#include <gst/gst-i18n-plugin.h>
-
-/* wtay: from my esd.h (debian unstable libesd0-dev 0.2.36-3) */
-#ifndef ESD_MAX_WRITE_SIZE
-#define ESD_MAX_WRITE_SIZE (21 * 4096)
-#endif
-
-GST_DEBUG_CATEGORY_EXTERN (esd_debug);
-#define GST_CAT_DEFAULT esd_debug
-
-enum
-{
- PROP_0,
- PROP_HOST
-};
-
-static GstStaticPadTemplate sink_factory = GST_STATIC_PAD_TEMPLATE ("sink",
- GST_PAD_SINK,
- GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("audio/x-raw-int, "
- "endianness = (int) BYTE_ORDER, "
- "signed = (boolean) TRUE, "
- "width = (int) 16, "
- "depth = (int) 16, "
- "rate = (int) [ 1, MAX ], "
- "channels = (int) [ 1, 2 ]; "
- "audio/x-raw-int, "
- "signed = (boolean) { true, false }, "
- "width = (int) 8, "
- "depth = (int) 8, "
- "rate = (int) [ 1, MAX ], " "channels = (int) [ 1, 2 ]")
- );
-
-static void gst_esdsink_finalize (GObject * object);
-
-static GstCaps *gst_esdsink_getcaps (GstBaseSink * bsink);
-
-static gboolean gst_esdsink_open (GstAudioSink * asink);
-static gboolean gst_esdsink_close (GstAudioSink * asink);
-static gboolean gst_esdsink_prepare (GstAudioSink * asink,
- GstRingBufferSpec * spec);
-static gboolean gst_esdsink_unprepare (GstAudioSink * asink);
-static guint gst_esdsink_write (GstAudioSink * asink, gpointer data,
- guint length);
-static guint gst_esdsink_delay (GstAudioSink * asink);
-static void gst_esdsink_reset (GstAudioSink * asink);
-
-static void gst_esdsink_set_property (GObject * object, guint prop_id,
- const GValue * value, GParamSpec * pspec);
-static void gst_esdsink_get_property (GObject * object, guint prop_id,
- GValue * value, GParamSpec * pspec);
-
-GST_BOILERPLATE (GstEsdSink, gst_esdsink, GstAudioSink, GST_TYPE_AUDIO_SINK);
-
-static void
-gst_esdsink_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_static_pad_template (element_class, &sink_factory);
- gst_element_class_set_details_simple (element_class, "Esound audio sink",
- "Sink/Audio",
- "Plays audio to an esound server",
- "Arwed von Merkatz <v.merkatz@gmx.net>");
-}
-
-static void
-gst_esdsink_class_init (GstEsdSinkClass * klass)
-{
- GObjectClass *gobject_class;
- GstBaseSinkClass *gstbasesink_class;
- GstAudioSinkClass *gstaudiosink_class;
-
- gobject_class = (GObjectClass *) klass;
- gstbasesink_class = (GstBaseSinkClass *) klass;
- gstaudiosink_class = (GstAudioSinkClass *) klass;
-
- parent_class = g_type_class_peek_parent (klass);
-
- gobject_class->finalize = gst_esdsink_finalize;
-
- gstbasesink_class->get_caps = GST_DEBUG_FUNCPTR (gst_esdsink_getcaps);
-
- gstaudiosink_class->open = GST_DEBUG_FUNCPTR (gst_esdsink_open);
- gstaudiosink_class->close = GST_DEBUG_FUNCPTR (gst_esdsink_close);
- gstaudiosink_class->prepare = GST_DEBUG_FUNCPTR (gst_esdsink_prepare);
- gstaudiosink_class->unprepare = GST_DEBUG_FUNCPTR (gst_esdsink_unprepare);
- gstaudiosink_class->write = GST_DEBUG_FUNCPTR (gst_esdsink_write);
- gstaudiosink_class->delay = GST_DEBUG_FUNCPTR (gst_esdsink_delay);
- gstaudiosink_class->reset = GST_DEBUG_FUNCPTR (gst_esdsink_reset);
-
- gobject_class->set_property = gst_esdsink_set_property;
- gobject_class->get_property = gst_esdsink_get_property;
-
- /* default value is filled in the _init method */
- g_object_class_install_property (gobject_class, PROP_HOST,
- g_param_spec_string ("host", "Host",
- "The host running the esound daemon", NULL,
- G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
-}
-
-static void
-gst_esdsink_init (GstEsdSink * esdsink, GstEsdSinkClass * klass)
-{
- esdsink->fd = -1;
- esdsink->ctrl_fd = -1;
- esdsink->host = g_strdup (g_getenv ("ESPEAKER"));
-}
-
-static void
-gst_esdsink_finalize (GObject * object)
-{
- GstEsdSink *esdsink = GST_ESDSINK (object);
-
- gst_caps_replace (&esdsink->cur_caps, NULL);
- g_free (esdsink->host);
-
- G_OBJECT_CLASS (parent_class)->finalize (object);
-}
-
-static GstCaps *
-gst_esdsink_getcaps (GstBaseSink * bsink)
-{
- GstEsdSink *esdsink;
-
- esdsink = GST_ESDSINK (bsink);
-
- /* no fd, we're done with the template caps */
- if (esdsink->ctrl_fd < 0 || esdsink->cur_caps == NULL) {
- GST_LOG_OBJECT (esdsink, "getcaps called, returning template caps");
- return NULL;
- }
-
- GST_LOG_OBJECT (esdsink, "returning %" GST_PTR_FORMAT, esdsink->cur_caps);
-
- return gst_caps_ref (esdsink->cur_caps);
-}
-
-static gboolean
-gst_esdsink_open (GstAudioSink * asink)
-{
- esd_server_info_t *server_info;
- GstPadTemplate *pad_template;
- GstEsdSink *esdsink;
- gchar *saved_env;
- gint i;
-
- esdsink = GST_ESDSINK (asink);
-
- GST_DEBUG_OBJECT (esdsink, "open");
-
- /* ensure libesd doesn't auto-spawn a sound daemon if none is running yet */
- saved_env = g_strdup (g_getenv ("ESD_NO_SPAWN"));
- g_setenv ("ESD_NO_SPAWN", "1", TRUE);
-
- /* now try to connect to any existing/running sound daemons */
- esdsink->ctrl_fd = esd_open_sound (esdsink->host);
-
- /* and restore the previous state */
- if (saved_env != NULL) {
- g_setenv ("ESD_NO_SPAWN", saved_env, TRUE);
- } else {
- g_unsetenv ("ESD_NO_SPAWN");
- }
- g_free (saved_env);
-
- if (esdsink->ctrl_fd < 0)
- goto couldnt_connect;
-
- /* get server info */
- server_info = esd_get_server_info (esdsink->ctrl_fd);
- if (!server_info)
- goto no_server_info;
-
- GST_INFO_OBJECT (esdsink, "got server info rate: %i", server_info->rate);
-
- pad_template = gst_static_pad_template_get (&sink_factory);
- esdsink->cur_caps = gst_caps_copy (gst_pad_template_get_caps (pad_template));
- gst_object_unref (pad_template);
-
- for (i = 0; i < esdsink->cur_caps->structs->len; i++) {
- GstStructure *s;
-
- s = gst_caps_get_structure (esdsink->cur_caps, i);
- gst_structure_set (s, "rate", G_TYPE_INT, server_info->rate, NULL);
- }
-
- esd_free_server_info (server_info);
-
- GST_INFO_OBJECT (esdsink, "server caps: %" GST_PTR_FORMAT, esdsink->cur_caps);
-
- return TRUE;
-
- /* ERRORS */
-couldnt_connect:
- {
- GST_ELEMENT_ERROR (esdsink, RESOURCE, OPEN_WRITE,
- (_("Could not establish connection to sound server")),
- ("can't open connection to esound server"));
- return FALSE;
- }
-no_server_info:
- {
- GST_ELEMENT_ERROR (esdsink, RESOURCE, OPEN_WRITE,
- (_("Failed to query sound server capabilities")),
- ("couldn't get server info!"));
- return FALSE;
- }
-}
-
-static gboolean
-gst_esdsink_close (GstAudioSink * asink)
-{
- GstEsdSink *esdsink = GST_ESDSINK (asink);
-
- GST_DEBUG_OBJECT (esdsink, "close");
-
- gst_caps_replace (&esdsink->cur_caps, NULL);
- esd_close (esdsink->ctrl_fd);
- esdsink->ctrl_fd = -1;
-
- return TRUE;
-}
-
-static gboolean
-gst_esdsink_prepare (GstAudioSink * asink, GstRingBufferSpec * spec)
-{
- GstEsdSink *esdsink = GST_ESDSINK (asink);
- esd_format_t esdformat;
-
- /* Name used by esound for this connection. */
- const char connname[] = "GStreamer";
-
- GST_DEBUG_OBJECT (esdsink, "prepare");
-
- /* Bitmap describing audio format. */
- esdformat = ESD_STREAM | ESD_PLAY;
-
- switch (spec->depth) {
- case 8:
- esdformat |= ESD_BITS8;
- break;
- case 16:
- esdformat |= ESD_BITS16;
- break;
- default:
- goto unsupported_depth;
- }
-
- switch (spec->channels) {
- case 1:
- esdformat |= ESD_MONO;
- break;
- case 2:
- esdformat |= ESD_STEREO;
- break;
- default:
- goto unsupported_channels;
- }
-
- GST_INFO_OBJECT (esdsink,
- "attempting to open data connection to esound server");
-
- esdsink->fd =
- esd_play_stream (esdformat, spec->rate, esdsink->host, connname);
-
- if ((esdsink->fd < 0) || (esdsink->ctrl_fd < 0))
- goto cannot_open;
-
- esdsink->rate = spec->rate;
-
- spec->segsize = ESD_BUF_SIZE;
- spec->segtotal = (ESD_MAX_WRITE_SIZE / spec->segsize);
-
- /* FIXME: this is wrong for signed ints (and the
- * audioringbuffers should do it for us anyway) */
- spec->silence_sample[0] = 0;
- spec->silence_sample[1] = 0;
- spec->silence_sample[2] = 0;
- spec->silence_sample[3] = 0;
-
- GST_INFO_OBJECT (esdsink, "successfully opened connection to esound server");
-
- return TRUE;
-
- /* ERRORS */
-unsupported_depth:
- {
- GST_ELEMENT_ERROR (esdsink, STREAM, WRONG_TYPE, (NULL),
- ("can't handle sample depth of %d, only 8 or 16 supported",
- spec->depth));
- return FALSE;
- }
-unsupported_channels:
- {
- GST_ELEMENT_ERROR (esdsink, STREAM, WRONG_TYPE, (NULL),
- ("can't handle %d channels, only 1 or 2 supported", spec->channels));
- return FALSE;
- }
-cannot_open:
- {
- GST_ELEMENT_ERROR (esdsink, RESOURCE, OPEN_WRITE,
- (_("Could not establish connection to sound server")),
- ("can't open connection to esound server"));
- return FALSE;
- }
-}
-
-static gboolean
-gst_esdsink_unprepare (GstAudioSink * asink)
-{
- GstEsdSink *esdsink = GST_ESDSINK (asink);
-
- if ((esdsink->fd < 0) && (esdsink->ctrl_fd < 0))
- return TRUE;
-
- close (esdsink->fd);
- esdsink->fd = -1;
-
- GST_INFO_OBJECT (esdsink, "closed sound device");
-
- return TRUE;
-}
-
-
-static guint
-gst_esdsink_write (GstAudioSink * asink, gpointer data, guint length)
-{
- GstEsdSink *esdsink = GST_ESDSINK (asink);
- gint to_write = 0;
-
- to_write = length;
-
- while (to_write > 0) {
- int done;
-
- done = write (esdsink->fd, data, to_write);
-
- if (done < 0)
- goto write_error;
-
- to_write -= done;
- data = (char *) data + done;
- }
- return length;
-
- /* ERRORS */
-write_error:
- {
- GST_ELEMENT_ERROR (esdsink, RESOURCE, WRITE,
- ("Failed to write data to the esound daemon"), GST_ERROR_SYSTEM);
- return -1;
- }
-}
-
-static guint
-gst_esdsink_delay (GstAudioSink * asink)
-{
- GstEsdSink *esdsink = GST_ESDSINK (asink);
- guint latency;
-
- latency = esd_get_latency (esdsink->ctrl_fd);
-
- if (latency == (guint) - 1) {
- GST_WARNING_OBJECT (asink, "couldn't get latency");
- return 0;
- }
-
- /* latency is measured in samples at a rate of 44100, this
- * cannot overflow. */
- latency = latency * G_GINT64_CONSTANT (44100) / esdsink->rate;
-
- GST_DEBUG_OBJECT (asink, "got latency: %u", latency);
-
- return latency;
-}
-
-static void
-gst_esdsink_reset (GstAudioSink * asink)
-{
- GST_DEBUG_OBJECT (asink, "reset called");
-}
-
-static void
-gst_esdsink_set_property (GObject * object, guint prop_id, const GValue * value,
- GParamSpec * pspec)
-{
- GstEsdSink *esdsink = GST_ESDSINK (object);
-
- switch (prop_id) {
- case PROP_HOST:
- g_free (esdsink->host);
- esdsink->host = g_value_dup_string (value);
- break;
- default:
- break;
- }
-}
-
-static void
-gst_esdsink_get_property (GObject * object, guint prop_id, GValue * value,
- GParamSpec * pspec)
-{
- GstEsdSink *esdsink = GST_ESDSINK (object);
-
- switch (prop_id) {
- case PROP_HOST:
- g_value_set_string (value, esdsink->host);
- break;
- default:
- G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
- break;
- }
-}
+++ /dev/null
-/* GStreamer
- * Copyright (C) <2005> Arwed v. Merkatz <v.merkatz@gmx.net>
- *
- * esdsink.h: an EsounD audio sink
- *
- * This library is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Library General Public
- * License as published by the Free Software Foundation; either
- * version 2 of the License, or (at your option) any later version.
- *
- * This library is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Library General Public License for more details.
- *
- * You should have received a copy of the GNU Library General Public
- * License along with this library; if not, write to the
- * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
- * Boston, MA 02111-1307, USA.
- */
-
-
-#ifndef __GST_ESDSINK_H__
-#define __GST_ESDSINK_H__
-
-#include <gst/gst.h>
-#include <gst/audio/gstaudiosink.h>
-
-G_BEGIN_DECLS
-
-#define GST_TYPE_ESDSINK \
- (gst_esdsink_get_type())
-#define GST_ESDSINK(obj) \
- (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_ESDSINK,GstEsdSink))
-#define GST_ESDSINK_CLASS(klass) \
- (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_ESDSINK,GstEsdSinkClass))
-#define GST_IS_ESDSINK(obj) \
- (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_ESDSINK))
-#define GST_IS_ESDSINK_CLASS(klass) \
- (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_ESDSINK))
-
-typedef struct _GstEsdSink GstEsdSink;
-typedef struct _GstEsdSinkClass GstEsdSinkClass;
-
-struct _GstEsdSink {
- GstAudioSink sink;
-
- int fd;
- int ctrl_fd;
- gchar *host;
-
- guint rate;
- GstCaps *cur_caps;
-};
-
-struct _GstEsdSinkClass {
- GstAudioSinkClass parent_class;
-};
-
-GType gst_esdsink_get_type (void);
-
-G_END_DECLS
-
-#endif /* __GST_ESDSINK_H__ */
+++ /dev/null
-/* GStreamer
- * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
- * Copyright (C) <2003> David A. Schleef <ds@schleef.org>
- *
- * This library is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Library General Public
- * License as published by the Free Software Foundation; either
- * version 2 of the License, or (at your option) any later version.
- *
- * This library is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Library General Public License for more details.
- *
- * You should have received a copy of the GNU Library General Public
- * License along with this library; if not, write to the
- * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
- * Boston, MA 02111-1307, USA.
- */
-
-#ifdef HAVE_CONFIG_H
-#include "config.h"
-#endif
-#include "esdsink.h"
-#if 0
-#include "esdmon.h"
-#endif
-
-#include "gst/gst-i18n-plugin.h"
-
-GST_DEBUG_CATEGORY (esd_debug);
-
-static gboolean
-plugin_init (GstPlugin * plugin)
-{
- if (!gst_element_register (plugin, "esdsink", GST_RANK_MARGINAL,
- GST_TYPE_ESDSINK))
- return FALSE;
-
-#if 0
- if (!gst_element_register (plugin, "esdmon", GST_RANK_NONE, GST_TYPE_ESDMON))
- return FALSE;
-#endif
-
- GST_DEBUG_CATEGORY_INIT (esd_debug, "esd", 0, "ESounD elements");
-
-#ifdef ENABLE_NLS
- setlocale (LC_ALL, "");
- bindtextdomain (GETTEXT_PACKAGE, LOCALEDIR);
- bind_textdomain_codeset (GETTEXT_PACKAGE, "UTF-8");
-#endif /* ENABLE_NLS */
-
- return TRUE;
-}
-
-GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
- GST_VERSION_MINOR,
- "esdsink",
- "ESD Element Plugins",
- plugin_init, VERSION, "LGPL", GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)
/* GStreamer
* Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
- * Copyright (C) <2006> Tim-Philipp Müller <tim centricular net>
+ * Copyright (C) <2006,2011> Tim-Philipp Müller <tim centricular net>
* Copyright (C) <2006> Jan Schmidt <thaytan at mad scientist com>
*
* This library is free software; you can redistribute it and/or
* <refsect2>
* <title>Example launch line</title>
* |[
- * gst-launch filesrc location=media/small/dark.441-16-s.flac ! flacdec ! audioconvert ! audioresample ! autoaudiosink
+ * gst-launch-0.11 filesrc location=media/small/dark.441-16-s.flac ! flacparse ! flacdec ! audioconvert ! audioresample ! autoaudiosink
* ]|
* |[
- * gst-launch gnomevfssrc location=http://gstreamer.freedesktop.org/media/small/dark.441-16-s.flac ! flacdec ! audioconvert ! audioresample ! queue min-threshold-buffers=10 ! autoaudiosink
+ * gst-launch-0.11 souphttpsrc location=http://gstreamer.freedesktop.org/media/small/dark.441-16-s.flac ! flacparse ! flacdec ! audioconvert ! audioresample ! queue min-threshold-buffers=10 ! autoaudiosink
* ]|
* </refsect2>
*/
-/* TODO: add seeking when operating chain-based with unframed input */
-/* FIXME: demote/remove granulepos handling and make more time-centric */
-
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
-/* FIXME 0.11: suppress warnings for deprecated API such as GStaticRecMutex
- * with newer GLib versions (>= 2.31.0) */
-#define GLIB_DISABLE_DEPRECATION_WARNINGS
-
#include <string.h>
#include "gstflacdec.h"
#include <gst/gst-i18n-plugin.h>
-#include <gst/gsttagsetter.h>
-#include <gst/base/gsttypefindhelper.h>
-#include <gst/audio/multichannel.h>
#include <gst/tag/tag.h>
/* Taken from http://flac.sourceforge.net/format.html#frame_header */
static const GstAudioChannelPosition channel_positions[8][8] = {
- {GST_AUDIO_CHANNEL_POSITION_FRONT_MONO},
+ {GST_AUDIO_CHANNEL_POSITION_MONO},
{GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT,
GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT}, {
GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT,
GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT,
GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT,
GST_AUDIO_CHANNEL_POSITION_FRONT_CENTER,
- GST_AUDIO_CHANNEL_POSITION_LFE,
+ GST_AUDIO_CHANNEL_POSITION_LFE1,
GST_AUDIO_CHANNEL_POSITION_REAR_LEFT,
GST_AUDIO_CHANNEL_POSITION_REAR_RIGHT},
/* FIXME: 7/8 channel layouts are not defined in the FLAC specs */
GST_AUDIO_CHANNEL_POSITION_REAR_LEFT,
GST_AUDIO_CHANNEL_POSITION_REAR_RIGHT,
GST_AUDIO_CHANNEL_POSITION_FRONT_CENTER,
- GST_AUDIO_CHANNEL_POSITION_LFE,
+ GST_AUDIO_CHANNEL_POSITION_LFE1,
GST_AUDIO_CHANNEL_POSITION_REAR_CENTER}, {
GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT,
GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT,
GST_AUDIO_CHANNEL_POSITION_REAR_LEFT,
GST_AUDIO_CHANNEL_POSITION_REAR_RIGHT,
GST_AUDIO_CHANNEL_POSITION_FRONT_CENTER,
- GST_AUDIO_CHANNEL_POSITION_LFE,
+ GST_AUDIO_CHANNEL_POSITION_LFE1,
GST_AUDIO_CHANNEL_POSITION_SIDE_LEFT,
GST_AUDIO_CHANNEL_POSITION_SIDE_RIGHT}
};
GST_DEBUG_CATEGORY_STATIC (flacdec_debug);
#define GST_CAT_DEFAULT flacdec_debug
-static void gst_flac_dec_finalize (GObject * object);
-static void gst_flac_dec_loop (GstPad * pad);
-
-static GstStateChangeReturn gst_flac_dec_change_state (GstElement * element,
- GstStateChange transition);
-static const GstQueryType *gst_flac_dec_get_src_query_types (GstPad * pad);
-static const GstQueryType *gst_flac_dec_get_sink_query_types (GstPad * pad);
-static gboolean gst_flac_dec_sink_query (GstPad * pad, GstQuery * query);
-static gboolean gst_flac_dec_src_query (GstPad * pad, GstQuery * query);
-static gboolean gst_flac_dec_convert_src (GstPad * pad, GstFormat src_format,
- gint64 src_value, GstFormat * dest_format, gint64 * dest_value);
-static gboolean gst_flac_dec_src_event (GstPad * pad, GstEvent * event);
-static gboolean gst_flac_dec_sink_activate (GstPad * sinkpad);
-static gboolean gst_flac_dec_sink_activate_pull (GstPad * sinkpad,
- gboolean active);
-static gboolean gst_flac_dec_sink_activate_push (GstPad * sinkpad,
- gboolean active);
-static gboolean gst_flac_dec_sink_event (GstPad * pad, GstEvent * event);
-static GstFlowReturn gst_flac_dec_chain (GstPad * pad, GstBuffer * buf);
-
-static void gst_flac_dec_reset_decoders (GstFlacDec * flacdec);
-static void gst_flac_dec_setup_decoder (GstFlacDec * flacdec);
-
-static FLAC__StreamDecoderReadStatus
-gst_flac_dec_read_seekable (const FLAC__StreamDecoder * decoder,
- FLAC__byte buffer[], size_t * bytes, void *client_data);
static FLAC__StreamDecoderReadStatus
gst_flac_dec_read_stream (const FLAC__StreamDecoder * decoder,
FLAC__byte buffer[], size_t * bytes, void *client_data);
-static FLAC__StreamDecoderSeekStatus
-gst_flac_dec_seek (const FLAC__StreamDecoder * decoder,
- FLAC__uint64 position, void *client_data);
-static FLAC__StreamDecoderTellStatus
-gst_flac_dec_tell (const FLAC__StreamDecoder * decoder,
- FLAC__uint64 * position, void *client_data);
-static FLAC__StreamDecoderLengthStatus
-gst_flac_dec_length (const FLAC__StreamDecoder * decoder,
- FLAC__uint64 * length, void *client_data);
-static FLAC__bool gst_flac_dec_eof (const FLAC__StreamDecoder * decoder,
- void *client_data);
static FLAC__StreamDecoderWriteStatus
gst_flac_dec_write_stream (const FLAC__StreamDecoder * decoder,
const FLAC__Frame * frame,
static void gst_flac_dec_error_cb (const FLAC__StreamDecoder *
decoder, FLAC__StreamDecoderErrorStatus status, void *client_data);
-GST_BOILERPLATE (GstFlacDec, gst_flac_dec, GstElement, GST_TYPE_ELEMENT);
+static void gst_flac_dec_flush (GstAudioDecoder * audio_dec, gboolean hard);
+static gboolean gst_flac_dec_set_format (GstAudioDecoder * dec, GstCaps * caps);
+static gboolean gst_flac_dec_start (GstAudioDecoder * dec);
+static gboolean gst_flac_dec_stop (GstAudioDecoder * dec);
+static GstFlowReturn gst_flac_dec_handle_frame (GstAudioDecoder * audio_dec,
+ GstBuffer * buf);
+
+G_DEFINE_TYPE (GstFlacDec, gst_flac_dec, GST_TYPE_AUDIO_DECODER);
+
+#if G_BYTE_ORDER == G_LITTLE_ENDIAN
+#define FORMATS "{ S8LE, S16LE, S32LE } "
+#else
+#define FORMATS "{ S8BE, S16BE, S32BE } "
+#endif
-/* FIXME 0.11: Use width=32 for all depths and let audioconvert
- * handle the conversions instead of doing it ourself.
- */
#define GST_FLAC_DEC_SRC_CAPS \
- "audio/x-raw-int, " \
- "endianness = (int) BYTE_ORDER, " \
- "signed = (boolean) true, " \
- "width = (int) { 8, 16, 32 }, " \
- "depth = (int) [ 4, 32 ], " \
+ "audio/x-raw, " \
+ "format = (string) " FORMATS ", " \
+ "layout = (string) interleaved, " \
+ "rate = (int) [ 1, 655350 ], " \
+ "channels = (int) [ 1, 8 ]"
+
+#define GST_FLAC_DEC_SINK_CAPS \
+ "audio/x-flac, " \
+ "framed = (boolean) true, " \
"rate = (int) [ 1, 655350 ], " \
"channels = (int) [ 1, 8 ]"
GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("audio/x-flac")
- );
-
-static void
-gst_flac_dec_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_static_pad_template (element_class,
- &flac_dec_src_factory);
- gst_element_class_add_static_pad_template (element_class,
- &flac_dec_sink_factory);
- gst_element_class_set_details_simple (element_class, "FLAC audio decoder",
- "Codec/Decoder/Audio",
- "Decodes FLAC lossless audio streams", "Wim Taymans <wim@fluendo.com>");
-
- GST_DEBUG_CATEGORY_INIT (flacdec_debug, "flacdec", 0, "flac decoder");
-}
+ GST_STATIC_CAPS (GST_FLAC_DEC_SINK_CAPS));
static void
gst_flac_dec_class_init (GstFlacDecClass * klass)
{
+ GstAudioDecoderClass *audiodecoder_class;
GstElementClass *gstelement_class;
- GObjectClass *gobject_class;
+ audiodecoder_class = (GstAudioDecoderClass *) klass;
gstelement_class = (GstElementClass *) klass;
- gobject_class = (GObjectClass *) klass;
- gobject_class->finalize = gst_flac_dec_finalize;
+ GST_DEBUG_CATEGORY_INIT (flacdec_debug, "flacdec", 0, "flac decoder");
+
+ audiodecoder_class->stop = GST_DEBUG_FUNCPTR (gst_flac_dec_stop);
+ audiodecoder_class->start = GST_DEBUG_FUNCPTR (gst_flac_dec_start);
+ audiodecoder_class->flush = GST_DEBUG_FUNCPTR (gst_flac_dec_flush);
+ audiodecoder_class->set_format = GST_DEBUG_FUNCPTR (gst_flac_dec_set_format);
+ audiodecoder_class->handle_frame =
+ GST_DEBUG_FUNCPTR (gst_flac_dec_handle_frame);
- gstelement_class->change_state =
- GST_DEBUG_FUNCPTR (gst_flac_dec_change_state);
-}
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&flac_dec_src_factory));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&flac_dec_sink_factory));
-static void
-gst_flac_dec_init (GstFlacDec * flacdec, GstFlacDecClass * klass)
-{
- flacdec->sinkpad =
- gst_pad_new_from_static_template (&flac_dec_sink_factory, "sink");
- gst_pad_set_activate_function (flacdec->sinkpad,
- GST_DEBUG_FUNCPTR (gst_flac_dec_sink_activate));
- gst_pad_set_activatepull_function (flacdec->sinkpad,
- GST_DEBUG_FUNCPTR (gst_flac_dec_sink_activate_pull));
- gst_pad_set_activatepush_function (flacdec->sinkpad,
- GST_DEBUG_FUNCPTR (gst_flac_dec_sink_activate_push));
- gst_pad_set_query_type_function (flacdec->sinkpad,
- GST_DEBUG_FUNCPTR (gst_flac_dec_get_sink_query_types));
- gst_pad_set_query_function (flacdec->sinkpad,
- GST_DEBUG_FUNCPTR (gst_flac_dec_sink_query));
- gst_pad_set_event_function (flacdec->sinkpad,
- GST_DEBUG_FUNCPTR (gst_flac_dec_sink_event));
- gst_pad_set_chain_function (flacdec->sinkpad,
- GST_DEBUG_FUNCPTR (gst_flac_dec_chain));
- gst_element_add_pad (GST_ELEMENT (flacdec), flacdec->sinkpad);
-
- flacdec->srcpad =
- gst_pad_new_from_static_template (&flac_dec_src_factory, "src");
- gst_pad_set_query_type_function (flacdec->srcpad,
- GST_DEBUG_FUNCPTR (gst_flac_dec_get_src_query_types));
- gst_pad_set_query_function (flacdec->srcpad,
- GST_DEBUG_FUNCPTR (gst_flac_dec_src_query));
- gst_pad_set_event_function (flacdec->srcpad,
- GST_DEBUG_FUNCPTR (gst_flac_dec_src_event));
- gst_pad_use_fixed_caps (flacdec->srcpad);
- gst_element_add_pad (GST_ELEMENT (flacdec), flacdec->srcpad);
-
- gst_flac_dec_reset_decoders (flacdec);
+ gst_element_class_set_details_simple (gstelement_class, "FLAC audio decoder",
+ "Codec/Decoder/Audio", "Decodes FLAC lossless audio streams",
+ "Tim-Philipp Müller <tim@centricular.net>, "
+ "Wim Taymans <wim.taymans@gmail.com>");
}
static void
-gst_flac_dec_reset_decoders (GstFlacDec * flacdec)
+gst_flac_dec_init (GstFlacDec * flacdec)
{
- /* Clean up the decoder */
- if (flacdec->decoder) {
- FLAC__stream_decoder_delete (flacdec->decoder);
- flacdec->decoder = NULL;
- }
-
- if (flacdec->adapter) {
- gst_adapter_clear (flacdec->adapter);
- g_object_unref (flacdec->adapter);
- flacdec->adapter = NULL;
- }
-
- if (flacdec->close_segment) {
- gst_event_unref (flacdec->close_segment);
- flacdec->close_segment = NULL;
- }
- if (flacdec->start_segment) {
- gst_event_unref (flacdec->start_segment);
- flacdec->start_segment = NULL;
- }
- if (flacdec->tags) {
- gst_tag_list_free (flacdec->tags);
- flacdec->tags = NULL;
- }
- if (flacdec->pending) {
- gst_buffer_unref (flacdec->pending);
- flacdec->pending = NULL;
- }
-
- flacdec->segment.last_stop = 0;
- flacdec->offset = 0;
- flacdec->init = TRUE;
+ /* nothing to do here */
}
-static void
-gst_flac_dec_setup_decoder (GstFlacDec * dec)
+static gboolean
+gst_flac_dec_start (GstAudioDecoder * audio_dec)
{
- gst_flac_dec_reset_decoders (dec);
+ FLAC__StreamDecoderInitStatus s;
+ GstFlacDec *dec;
- dec->tags = gst_tag_list_new ();
- gst_tag_list_add (dec->tags, GST_TAG_MERGE_REPLACE,
- GST_TAG_AUDIO_CODEC, "FLAC", NULL);
+ dec = GST_FLAC_DEC (audio_dec);
dec->adapter = gst_adapter_new ();
dec->decoder = FLAC__stream_decoder_new ();
- /* no point calculating since it's never checked here */
+ gst_audio_info_init (&dec->info);
+ dec->depth = 0;
+
+ /* no point calculating MD5 since it's never checked here */
FLAC__stream_decoder_set_md5_checking (dec->decoder, false);
- FLAC__stream_decoder_set_metadata_respond (dec->decoder,
- FLAC__METADATA_TYPE_VORBIS_COMMENT);
- FLAC__stream_decoder_set_metadata_respond (dec->decoder,
- FLAC__METADATA_TYPE_PICTURE);
+
+ GST_DEBUG_OBJECT (dec, "initializing decoder");
+ s = FLAC__stream_decoder_init_stream (dec->decoder,
+ gst_flac_dec_read_stream, NULL, NULL, NULL, NULL,
+ gst_flac_dec_write_stream, gst_flac_dec_metadata_cb,
+ gst_flac_dec_error_cb, dec);
+
+ if (s != FLAC__STREAM_DECODER_INIT_STATUS_OK) {
+ GST_ELEMENT_ERROR (GST_ELEMENT (dec), LIBRARY, INIT, (NULL), (NULL));
+ return FALSE;
+ }
+
+ dec->got_headers = FALSE;
+
+ return TRUE;
}
-static void
-gst_flac_dec_finalize (GObject * object)
+static gboolean
+gst_flac_dec_stop (GstAudioDecoder * dec)
{
- GstFlacDec *flacdec;
+ GstFlacDec *flacdec = GST_FLAC_DEC (dec);
- flacdec = GST_FLAC_DEC (object);
+ if (flacdec->decoder) {
+ FLAC__stream_decoder_delete (flacdec->decoder);
+ flacdec->decoder = NULL;
+ }
- gst_flac_dec_reset_decoders (flacdec);
+ if (flacdec->adapter) {
+ gst_adapter_clear (flacdec->adapter);
+ g_object_unref (flacdec->adapter);
+ flacdec->adapter = NULL;
+ }
- G_OBJECT_CLASS (parent_class)->finalize (object);
+ return TRUE;
}
-
static gboolean
-gst_flac_dec_update_metadata (GstFlacDec * flacdec,
- const FLAC__StreamMetadata * metadata)
+gst_flac_dec_set_format (GstAudioDecoder * dec, GstCaps * caps)
{
- GstTagList *list;
- guint num, i;
+ const GValue *headers;
+ GstFlacDec *flacdec;
+ GstStructure *s;
+ guint i, num;
- if (flacdec->tags)
- list = flacdec->tags;
- else
- flacdec->tags = list = gst_tag_list_new ();
+ flacdec = GST_FLAC_DEC (dec);
- num = metadata->data.vorbis_comment.num_comments;
- GST_DEBUG_OBJECT (flacdec, "%u tag(s) found", num);
+ GST_LOG_OBJECT (dec, "sink caps: %" GST_PTR_FORMAT, caps);
- for (i = 0; i < num; ++i) {
- gchar *vc, *name, *value;
+ s = gst_caps_get_structure (caps, 0);
+ headers = gst_structure_get_value (s, "streamheader");
+ if (headers == NULL || !GST_VALUE_HOLDS_ARRAY (headers)) {
+ GST_WARNING_OBJECT (dec, "no 'streamheader' field in input caps, try "
+ "adding a flacparse element upstream");
+ return FALSE;
+ }
+
+ if (gst_adapter_available (flacdec->adapter) > 0) {
+ GST_WARNING_OBJECT (dec, "unexpected data left in adapter");
+ gst_adapter_clear (flacdec->adapter);
+ }
- vc = g_strndup ((gchar *) metadata->data.vorbis_comment.comments[i].entry,
- metadata->data.vorbis_comment.comments[i].length);
+ num = gst_value_array_get_size (headers);
+ for (i = 0; i < num; ++i) {
+ const GValue *header_val;
+ GstBuffer *header_buf;
- if (gst_tag_parse_extended_comment (vc, &name, NULL, &value, TRUE)) {
- GST_DEBUG_OBJECT (flacdec, "%s : %s", name, value);
- if (value && strlen (value))
- gst_vorbis_tag_add (list, name, value);
- g_free (name);
- g_free (value);
- }
+ header_val = gst_value_array_get_value (headers, i);
+ if (header_val == NULL || !GST_VALUE_HOLDS_BUFFER (header_val))
+ return FALSE;
- g_free (vc);
+ header_buf = g_value_dup_boxed (header_val);
+ GST_INFO_OBJECT (dec, "pushing header buffer of %" G_GSIZE_FORMAT " bytes "
+ "into adapter", gst_buffer_get_size (header_buf));
+ gst_adapter_push (flacdec->adapter, header_buf);
}
+ GST_DEBUG_OBJECT (dec, "Processing headers and metadata");
+ if (!FLAC__stream_decoder_process_until_end_of_metadata (flacdec->decoder)) {
+ GST_WARNING_OBJECT (dec, "process_until_end_of_metadata failed");
+ }
+ GST_INFO_OBJECT (dec, "headers and metadata are now processed");
return TRUE;
}
return crc;
}
+/* FIXME: for our purposes it's probably enough to just check for the sync
+ * marker - we just want to know if it's a header frame or not */
static gboolean
gst_flac_dec_scan_got_frame (GstFlacDec * flacdec, guint8 * data, guint size,
gint64 * last_sample_num)
GST_DEBUG_OBJECT (flacdec, "frame number: %" G_GINT64_FORMAT,
*last_sample_num);
- if (flacdec->sample_rate > 0 && *last_sample_num != 0) {
+ if (flacdec->info.rate > 0 && *last_sample_num != 0) {
GST_DEBUG_OBJECT (flacdec, "last sample %" G_GINT64_FORMAT " = %"
GST_TIME_FORMAT, *last_sample_num,
- GST_TIME_ARGS (*last_sample_num * GST_SECOND / flacdec->sample_rate));
+ GST_TIME_ARGS (*last_sample_num * GST_SECOND / flacdec->info.rate));
}
return TRUE;
}
-#define SCANBLOCK_SIZE (64*1024)
-
-static void
-gst_flac_dec_scan_for_last_block (GstFlacDec * flacdec, gint64 * samples)
-{
- GstFormat format = GST_FORMAT_BYTES;
- gint64 file_size, offset;
-
- GST_INFO_OBJECT (flacdec, "total number of samples unknown, scanning file");
-
- if (!gst_pad_query_peer_duration (flacdec->sinkpad, &format, &file_size)) {
- GST_WARNING_OBJECT (flacdec, "failed to query upstream size!");
- return;
- }
-
- if (flacdec->min_blocksize != flacdec->max_blocksize) {
- GST_WARNING_OBJECT (flacdec, "scanning for last sample only works "
- "for FLAC files with constant blocksize");
- return;
- }
-
- GST_DEBUG_OBJECT (flacdec, "upstream size: %" G_GINT64_FORMAT, file_size);
-
- offset = file_size - 1;
- while (offset >= MAX (SCANBLOCK_SIZE / 2, file_size / 2)) {
- GstFlowReturn flow;
- GstBuffer *buf = NULL;
- guint8 *data;
- guint size;
-
- /* divide by 2 = not very sophisticated way to deal with overlapping */
- offset -= SCANBLOCK_SIZE / 2;
- GST_LOG_OBJECT (flacdec, "looking for frame at %" G_GINT64_FORMAT
- "-%" G_GINT64_FORMAT, offset, offset + SCANBLOCK_SIZE);
-
- flow = gst_pad_pull_range (flacdec->sinkpad, offset, SCANBLOCK_SIZE, &buf);
- if (flow != GST_FLOW_OK) {
- GST_DEBUG_OBJECT (flacdec, "flow = %s", gst_flow_get_name (flow));
- return;
- }
-
- size = GST_BUFFER_SIZE (buf);
- data = GST_BUFFER_DATA (buf);
-
- while (size > 16) {
- if (gst_flac_dec_scan_got_frame (flacdec, data, size, samples)) {
- GST_DEBUG_OBJECT (flacdec, "frame sync at offset %" G_GINT64_FORMAT,
- offset + GST_BUFFER_SIZE (buf) - size);
- gst_buffer_unref (buf);
- return;
- }
- ++data;
- --size;
- }
-
- gst_buffer_unref (buf);
- }
-}
-
-static void
-gst_flac_extract_picture_buffer (GstFlacDec * dec,
- const FLAC__StreamMetadata * metadata)
-{
- FLAC__StreamMetadata_Picture picture;
- GstTagList *tags;
-
- g_return_if_fail (metadata->type == FLAC__METADATA_TYPE_PICTURE);
-
- GST_LOG_OBJECT (dec, "Got PICTURE block");
- picture = metadata->data.picture;
-
- GST_DEBUG_OBJECT (dec, "declared MIME type is: '%s'",
- GST_STR_NULL (picture.mime_type));
- GST_DEBUG_OBJECT (dec, "image data is %u bytes", picture.data_length);
-
- tags = gst_tag_list_new ();
-
- gst_tag_list_add_id3_image (tags, (guint8 *) picture.data,
- picture.data_length, picture.type);
-
- if (!gst_tag_list_is_empty (tags)) {
- gst_element_found_tags_for_pad (GST_ELEMENT (dec), dec->srcpad, tags);
- } else {
- GST_DEBUG_OBJECT (dec, "problem parsing PICTURE block, skipping");
- gst_tag_list_free (tags);
- }
-}
-
static void
gst_flac_dec_metadata_cb (const FLAC__StreamDecoder * decoder,
const FLAC__StreamMetadata * metadata, void *client_data)
switch (metadata->type) {
case FLAC__METADATA_TYPE_STREAMINFO:{
gint64 samples;
- guint depth;
+ guint depth, width;
samples = metadata->data.stream_info.total_samples;
flacdec->min_blocksize = metadata->data.stream_info.min_blocksize;
flacdec->max_blocksize = metadata->data.stream_info.max_blocksize;
- flacdec->sample_rate = metadata->data.stream_info.sample_rate;
flacdec->depth = depth = metadata->data.stream_info.bits_per_sample;
- flacdec->channels = metadata->data.stream_info.channels;
if (depth < 9)
- flacdec->width = 8;
+ width = 8;
else if (depth < 17)
- flacdec->width = 16;
+ width = 16;
else
- flacdec->width = 32;
+ width = 32;
+
+ gst_audio_info_set_format (&flacdec->info,
+ gst_audio_format_build_integer (TRUE, G_BYTE_ORDER, width, width),
+ metadata->data.stream_info.sample_rate,
+ metadata->data.stream_info.channels, NULL);
+
+ memcpy (flacdec->info.position,
+ channel_positions[flacdec->info.channels - 1],
+ sizeof (GstAudioChannelPosition) * flacdec->info.channels);
+ gst_audio_channel_positions_to_valid_order (flacdec->info.position,
+ flacdec->info.channels);
+ /* Note: we create the inverse reordering map here */
+ gst_audio_get_channel_reorder_map (flacdec->info.channels,
+ flacdec->info.position, channel_positions[flacdec->info.channels - 1],
+ flacdec->channel_reorder_map);
GST_DEBUG_OBJECT (flacdec, "blocksize: min=%u, max=%u",
flacdec->min_blocksize, flacdec->max_blocksize);
GST_DEBUG_OBJECT (flacdec, "sample rate: %u, channels: %u",
- flacdec->sample_rate, flacdec->channels);
+ flacdec->info.rate, flacdec->info.channels);
GST_DEBUG_OBJECT (flacdec, "depth: %u, width: %u", flacdec->depth,
- flacdec->width);
-
- /* Only scan for last block in pull-mode, since it uses pull_range() */
- if (samples == 0 && !flacdec->streaming) {
- gst_flac_dec_scan_for_last_block (flacdec, &samples);
- }
+ flacdec->info.finfo->width);
GST_DEBUG_OBJECT (flacdec, "total samples = %" G_GINT64_FORMAT, samples);
-
- /* in framed mode the demuxer/parser upstream has already pushed a
- * newsegment event in TIME format which we've passed on */
- if (samples > 0 && !flacdec->framed) {
- gint64 duration;
-
- gst_segment_set_duration (&flacdec->segment, GST_FORMAT_DEFAULT,
- samples);
-
- /* convert duration to time */
- duration = gst_util_uint64_scale_int (samples, GST_SECOND,
- flacdec->sample_rate);
-
- /* fixme, at this time we could seek to the queued seek event if we have
- * any */
- if (flacdec->start_segment)
- gst_event_unref (flacdec->start_segment);
- flacdec->start_segment =
- gst_event_new_new_segment_full (FALSE,
- flacdec->segment.rate, flacdec->segment.applied_rate,
- GST_FORMAT_TIME, 0, duration, 0);
- }
- break;
- }
- case FLAC__METADATA_TYPE_PICTURE:{
- gst_flac_extract_picture_buffer (flacdec, metadata);
break;
}
- case FLAC__METADATA_TYPE_VORBIS_COMMENT:
- gst_flac_dec_update_metadata (flacdec, metadata);
- break;
default:
break;
}
dec->last_flow = GST_FLOW_ERROR;
}
-static FLAC__StreamDecoderSeekStatus
-gst_flac_dec_seek (const FLAC__StreamDecoder * decoder,
- FLAC__uint64 position, void *client_data)
-{
- GstFlacDec *flacdec;
-
- flacdec = GST_FLAC_DEC (client_data);
-
- GST_DEBUG_OBJECT (flacdec, "seek %" G_GUINT64_FORMAT, (guint64) position);
- flacdec->offset = position;
-
- return FLAC__STREAM_DECODER_SEEK_STATUS_OK;
-}
-
-static FLAC__StreamDecoderTellStatus
-gst_flac_dec_tell (const FLAC__StreamDecoder * decoder,
- FLAC__uint64 * position, void *client_data)
-{
- GstFlacDec *flacdec;
-
- flacdec = GST_FLAC_DEC (client_data);
-
- *position = flacdec->offset;
-
- GST_DEBUG_OBJECT (flacdec, "tell %" G_GINT64_FORMAT, (gint64) * position);
-
- return FLAC__STREAM_DECODER_TELL_STATUS_OK;
-}
-
-static FLAC__StreamDecoderLengthStatus
-gst_flac_dec_length (const FLAC__StreamDecoder * decoder,
- FLAC__uint64 * length, void *client_data)
-{
- GstFlacDec *flacdec;
- GstFormat fmt = GST_FORMAT_BYTES;
- gint64 len = -1;
-
- flacdec = GST_FLAC_DEC (client_data);
-
- if (!gst_pad_query_peer_duration (flacdec->sinkpad, &fmt, &len) ||
- (fmt != GST_FORMAT_BYTES || len == -1))
- return FLAC__STREAM_DECODER_LENGTH_STATUS_ERROR;
-
- *length = len;
-
- GST_DEBUG_OBJECT (flacdec, "encoded byte length %" G_GINT64_FORMAT,
- (gint64) * length);
-
- return FLAC__STREAM_DECODER_LENGTH_STATUS_OK;
-}
-
-static FLAC__bool
-gst_flac_dec_eof (const FLAC__StreamDecoder * decoder, void *client_data)
-{
- GstFlacDec *flacdec;
- GstFormat fmt;
- GstPad *peer;
- gboolean ret = FALSE;
- gint64 len;
-
- flacdec = GST_FLAC_DEC (client_data);
-
- if (!(peer = gst_pad_get_peer (flacdec->sinkpad))) {
- GST_WARNING_OBJECT (flacdec, "no peer pad, returning EOF");
- return TRUE;
- }
-
- fmt = GST_FORMAT_BYTES;
- if (gst_pad_query_duration (peer, &fmt, &len) && fmt == GST_FORMAT_BYTES &&
- len != -1 && flacdec->offset >= len) {
- GST_DEBUG_OBJECT (flacdec,
- "offset=%" G_GINT64_FORMAT ", len=%" G_GINT64_FORMAT
- ", returning EOF", flacdec->offset, len);
- ret = TRUE;
- }
-
- gst_object_unref (peer);
-
- return ret;
-}
-
-static FLAC__StreamDecoderReadStatus
-gst_flac_dec_read_seekable (const FLAC__StreamDecoder * decoder,
- FLAC__byte buffer[], size_t * bytes, void *client_data)
-{
- GstFlowReturn flow;
- GstFlacDec *flacdec;
- GstBuffer *buf;
-
- flacdec = GST_FLAC_DEC (client_data);
-
- flow = gst_pad_pull_range (flacdec->sinkpad, flacdec->offset, *bytes, &buf);
-
- GST_PAD_STREAM_LOCK (flacdec->sinkpad);
- flacdec->pull_flow = flow;
- GST_PAD_STREAM_UNLOCK (flacdec->sinkpad);
-
- if (G_UNLIKELY (flow != GST_FLOW_OK)) {
- GST_INFO_OBJECT (flacdec, "pull_range flow: %s", gst_flow_get_name (flow));
- if (flow == GST_FLOW_UNEXPECTED)
- return FLAC__STREAM_DECODER_READ_STATUS_END_OF_STREAM;
- else
- return FLAC__STREAM_DECODER_READ_STATUS_ABORT;
- }
-
- GST_DEBUG_OBJECT (flacdec, "Read %d bytes at %" G_GUINT64_FORMAT,
- GST_BUFFER_SIZE (buf), flacdec->offset);
- memcpy (buffer, GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (buf));
- *bytes = GST_BUFFER_SIZE (buf);
- gst_buffer_unref (buf);
- flacdec->offset += *bytes;
-
- return FLAC__STREAM_DECODER_READ_STATUS_CONTINUE;
-}
-
static FLAC__StreamDecoderReadStatus
gst_flac_dec_read_stream (const FLAC__StreamDecoder * decoder,
FLAC__byte buffer[], size_t * bytes, void *client_data)
return FLAC__STREAM_DECODER_READ_STATUS_ABORT;
}
- GST_LOG_OBJECT (dec, "feeding %u bytes to decoder (available=%u, bytes=%u)",
+ GST_LOG_OBJECT (dec, "feeding %u bytes to decoder "
+ "(available=%" G_GSIZE_FORMAT ", bytes=%u)",
len, gst_adapter_available (dec->adapter), (guint) * bytes);
gst_adapter_copy (dec->adapter, buffer, 0, len);
*bytes = len;
guint channels = frame->header.channels;
guint samples = frame->header.blocksize;
guint j, i;
- GstClockTime next;
+ GstMapInfo map;
+ gboolean caps_changed;
GST_LOG_OBJECT (flacdec, "samples in frame header: %d", samples);
- /* if a DEFAULT segment is configured, don't send samples past the end
- * of the segment */
- if (flacdec->segment.format == GST_FORMAT_DEFAULT &&
- flacdec->segment.stop != -1 &&
- flacdec->segment.last_stop >= 0 &&
- flacdec->segment.last_stop + samples > flacdec->segment.stop) {
- samples = flacdec->segment.stop - flacdec->segment.last_stop;
- GST_DEBUG_OBJECT (flacdec,
- "clipping last buffer to %d samples because of segment", samples);
+ if (depth == 0) {
+ if (flacdec->depth < 4 || flacdec->depth > 32) {
+ GST_ERROR_OBJECT (flacdec, "unsupported depth %d from STREAMINFO",
+ flacdec->depth);
+ ret = GST_FLOW_ERROR;
+ goto done;
+ }
+
+ depth = flacdec->depth;
}
switch (depth) {
case 32:
width = 32;
break;
- case 0:
- if (flacdec->depth < 4 || flacdec->depth > 32) {
- GST_ERROR_OBJECT (flacdec, "unsupported depth %d from STREAMINFO",
- flacdec->depth);
- ret = GST_FLOW_ERROR;
- goto done;
- }
-
- depth = flacdec->depth;
- if (depth < 9)
- width = 8;
- else if (depth < 17)
- width = 16;
- else
- width = 32;
-
- break;
default:
GST_ERROR_OBJECT (flacdec, "unsupported depth %d", depth);
ret = GST_FLOW_ERROR;
}
if (sample_rate == 0) {
- if (flacdec->sample_rate != 0) {
- sample_rate = flacdec->sample_rate;
+ if (flacdec->info.rate != 0) {
+ sample_rate = flacdec->info.rate;
} else {
GST_ERROR_OBJECT (flacdec, "unknown sample rate");
ret = GST_FLOW_ERROR;
}
}
- if (!GST_PAD_CAPS (flacdec->srcpad)) {
- GstCaps *caps;
-
- GST_DEBUG_OBJECT (flacdec, "Negotiating %d Hz @ %d channels",
- frame->header.sample_rate, channels);
+ caps_changed = (sample_rate != flacdec->info.rate)
+ || (width != flacdec->info.finfo->width)
+ || (channels != flacdec->info.channels);
- caps = gst_caps_new_simple ("audio/x-raw-int",
- "endianness", G_TYPE_INT, G_BYTE_ORDER,
- "signed", G_TYPE_BOOLEAN, TRUE,
- "width", G_TYPE_INT, width,
- "depth", G_TYPE_INT, depth,
- "rate", G_TYPE_INT, frame->header.sample_rate,
- "channels", G_TYPE_INT, channels, NULL);
+ if (caps_changed
+ || !gst_pad_has_current_caps (GST_AUDIO_DECODER_SRC_PAD (flacdec))) {
+ GST_DEBUG_OBJECT (flacdec, "Negotiating %d Hz @ %d channels", sample_rate,
+ channels);
- if (channels > 2) {
- GstStructure *s = gst_caps_get_structure (caps, 0);
+ gst_audio_info_set_format (&flacdec->info,
+ gst_audio_format_build_integer (TRUE, G_BYTE_ORDER, width, width),
+ sample_rate, channels, NULL);
- gst_audio_set_channel_positions (s, channel_positions[channels - 1]);
- }
+ memcpy (flacdec->info.position,
+ channel_positions[flacdec->info.channels - 1],
+ sizeof (GstAudioChannelPosition) * flacdec->info.channels);
+ gst_audio_channel_positions_to_valid_order (flacdec->info.position,
+ flacdec->info.channels);
+ /* Note: we create the inverse reordering map here */
+ gst_audio_get_channel_reorder_map (flacdec->info.channels,
+ flacdec->info.position, channel_positions[flacdec->info.channels - 1],
+ flacdec->channel_reorder_map);
flacdec->depth = depth;
- flacdec->width = width;
- flacdec->channels = channels;
- flacdec->sample_rate = sample_rate;
-
- gst_pad_set_caps (flacdec->srcpad, caps);
- gst_caps_unref (caps);
- }
-
- if (flacdec->close_segment) {
- GST_DEBUG_OBJECT (flacdec, "pushing close segment");
- gst_pad_push_event (flacdec->srcpad, flacdec->close_segment);
- flacdec->close_segment = NULL;
- }
- if (flacdec->start_segment) {
- GST_DEBUG_OBJECT (flacdec, "pushing start segment");
- gst_pad_push_event (flacdec->srcpad, flacdec->start_segment);
- flacdec->start_segment = NULL;
- }
- if (flacdec->tags) {
- gst_element_found_tags_for_pad (GST_ELEMENT (flacdec), flacdec->srcpad,
- flacdec->tags);
- flacdec->tags = NULL;
+ gst_audio_decoder_set_output_format (GST_AUDIO_DECODER (flacdec),
+ &flacdec->info);
}
- if (flacdec->pending) {
- GST_DEBUG_OBJECT (flacdec,
- "pushing pending samples at offset %" G_GINT64_FORMAT " (%"
- GST_TIME_FORMAT " + %" GST_TIME_FORMAT ")",
- GST_BUFFER_OFFSET (flacdec->pending),
- GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (flacdec->pending)),
- GST_TIME_ARGS (GST_BUFFER_DURATION (flacdec->pending)));
- /* Pending buffer was always allocated from the seeking thread,
- * which means it wasn't gst_buffer_alloc'd. Do so now to let
- * downstream negotiation work on older basetransform */
- ret = gst_pad_alloc_buffer_and_set_caps (flacdec->srcpad,
- GST_BUFFER_OFFSET (flacdec->pending),
- GST_BUFFER_SIZE (flacdec->pending),
- GST_BUFFER_CAPS (flacdec->pending), &outbuf);
- if (ret == GST_FLOW_OK) {
- gst_pad_push (flacdec->srcpad, flacdec->pending);
- gst_buffer_unref (outbuf);
- }
-
- outbuf = flacdec->pending = NULL;
- flacdec->segment.last_stop += flacdec->pending_samples;
- flacdec->pending_samples = 0;
- }
-
- if (flacdec->seeking) {
- GST_DEBUG_OBJECT (flacdec, "a pad_alloc would block here, do normal alloc");
- outbuf = gst_buffer_new_and_alloc (samples * channels * (width / 8));
- gst_buffer_set_caps (outbuf, GST_PAD_CAPS (flacdec->srcpad));
- GST_BUFFER_OFFSET (outbuf) = flacdec->segment.last_stop;
- } else {
- GST_LOG_OBJECT (flacdec, "alloc_buffer_and_set_caps");
- ret = gst_pad_alloc_buffer_and_set_caps (flacdec->srcpad,
- flacdec->segment.last_stop, samples * channels * (width / 8),
- GST_PAD_CAPS (flacdec->srcpad), &outbuf);
-
- if (ret != GST_FLOW_OK) {
- GST_DEBUG_OBJECT (flacdec, "gst_pad_alloc_buffer() returned %s",
- gst_flow_get_name (ret));
- goto done;
- }
- }
-
- if (flacdec->cur_granulepos != GST_BUFFER_OFFSET_NONE) {
- /* this should be fine since it should be one flac frame per ogg packet */
- /* note the + 1, as the granpos is the presentation time of the last sample,
- whereas the last stop represents the end time of that sample */
- flacdec->segment.last_stop = flacdec->cur_granulepos - samples + 1;
- GST_LOG_OBJECT (flacdec, "granulepos = %" G_GINT64_FORMAT ", samples = %u",
- flacdec->cur_granulepos, samples);
- }
-
- GST_BUFFER_TIMESTAMP (outbuf) =
- gst_util_uint64_scale_int (flacdec->segment.last_stop, GST_SECOND,
- frame->header.sample_rate);
-
- /* get next timestamp to calculate the duration */
- next = gst_util_uint64_scale_int (flacdec->segment.last_stop + samples,
- GST_SECOND, frame->header.sample_rate);
-
- GST_BUFFER_DURATION (outbuf) = next - GST_BUFFER_TIMESTAMP (outbuf);
+ GST_LOG_OBJECT (flacdec, "alloc_buffer_and_set_caps");
+ outbuf = gst_buffer_new_allocate (NULL, samples * channels * (width / 8), 0);
+ gst_buffer_map (outbuf, &map, GST_MAP_WRITE);
if (width == 8) {
- gint8 *outbuffer = (gint8 *) GST_BUFFER_DATA (outbuf);
+ gint8 *outbuffer = (gint8 *) map.data;
+ gint *reorder_map = flacdec->channel_reorder_map;
- for (i = 0; i < samples; i++) {
- for (j = 0; j < channels; j++) {
- *outbuffer++ = (gint8) buffer[j][i];
+ if (width != depth) {
+ for (i = 0; i < samples; i++) {
+ for (j = 0; j < channels; j++) {
+ *outbuffer++ = (gint8) (buffer[reorder_map[j]][i] << (width - depth));
+ }
+ }
+ } else {
+ for (i = 0; i < samples; i++) {
+ for (j = 0; j < channels; j++) {
+ *outbuffer++ = (gint8) buffer[reorder_map[j]][i];
+ }
}
}
} else if (width == 16) {
- gint16 *outbuffer = (gint16 *) GST_BUFFER_DATA (outbuf);
-
- for (i = 0; i < samples; i++) {
- for (j = 0; j < channels; j++) {
- *outbuffer++ = (gint16) buffer[j][i];
+ gint16 *outbuffer = (gint16 *) map.data;
+ gint *reorder_map = flacdec->channel_reorder_map;
+
+ if (width != depth) {
+ for (i = 0; i < samples; i++) {
+ for (j = 0; j < channels; j++) {
+ *outbuffer++ =
+ (gint16) (buffer[reorder_map[j]][i] << (width - depth));
+ }
+ }
+ } else {
+ for (i = 0; i < samples; i++) {
+ for (j = 0; j < channels; j++) {
+ *outbuffer++ = (gint16) buffer[reorder_map[j]][i];
+ }
}
}
} else if (width == 32) {
- gint32 *outbuffer = (gint32 *) GST_BUFFER_DATA (outbuf);
-
- for (i = 0; i < samples; i++) {
- for (j = 0; j < channels; j++) {
- *outbuffer++ = (gint32) buffer[j][i];
+ gint32 *outbuffer = (gint32 *) map.data;
+ gint *reorder_map = flacdec->channel_reorder_map;
+
+ if (width != depth) {
+ for (i = 0; i < samples; i++) {
+ for (j = 0; j < channels; j++) {
+ *outbuffer++ =
+ (gint32) (buffer[reorder_map[j]][i] << (width - depth));
+ }
+ }
+ } else {
+ for (i = 0; i < samples; i++) {
+ for (j = 0; j < channels; j++) {
+ *outbuffer++ = (gint32) buffer[reorder_map[j]][i];
+ }
}
}
} else {
g_assert_not_reached ();
}
+ gst_buffer_unmap (outbuf, &map);
- if (!flacdec->seeking) {
- GST_DEBUG_OBJECT (flacdec, "pushing %d samples at offset %" G_GINT64_FORMAT
- " (%" GST_TIME_FORMAT " + %" GST_TIME_FORMAT ")",
- samples, GST_BUFFER_OFFSET (outbuf),
- GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)),
- GST_TIME_ARGS (GST_BUFFER_DURATION (outbuf)));
-
- if (flacdec->discont) {
- GST_DEBUG_OBJECT (flacdec, "marking discont");
- outbuf = gst_buffer_make_metadata_writable (outbuf);
- GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
- flacdec->discont = FALSE;
- }
- ret = gst_pad_push (flacdec->srcpad, outbuf);
- GST_DEBUG_OBJECT (flacdec, "returned %s", gst_flow_get_name (ret));
- flacdec->segment.last_stop += samples;
- } else {
- GST_DEBUG_OBJECT (flacdec,
- "not pushing %d samples at offset %" G_GINT64_FORMAT
- " (in seek)", samples, GST_BUFFER_OFFSET (outbuf));
- gst_buffer_replace (&flacdec->pending, outbuf);
- gst_buffer_unref (outbuf);
- flacdec->pending_samples = samples;
- ret = GST_FLOW_OK;
- }
+ GST_DEBUG_OBJECT (flacdec, "pushing %d samples", samples);
- if (ret != GST_FLOW_OK) {
- GST_DEBUG_OBJECT (flacdec, "gst_pad_push() returned %s",
- gst_flow_get_name (ret));
+ ret = gst_audio_decoder_finish_frame (GST_AUDIO_DECODER (flacdec), outbuf, 1);
+
+ if (G_UNLIKELY (ret != GST_FLOW_OK)) {
+ GST_DEBUG_OBJECT (flacdec, "finish_frame flow %s", gst_flow_get_name (ret));
}
done:
-
- /* we act on the flow return value later in the loop function, as we don't
- * want to mess up the internal decoder state by returning ABORT when the
- * error is in fact non-fatal (like a pad in flushing mode) and we want
+ /* we act on the flow return value later in the handle_frame function, as we
+ * don't want to mess up the internal decoder state by returning ABORT when
+ * the error is in fact non-fatal (like a pad in flushing mode) and we want
* to continue later. So just pretend everything's dandy and act later. */
flacdec->last_flow = ret;
}
static void
-gst_flac_dec_loop (GstPad * sinkpad)
+gst_flac_dec_flush (GstAudioDecoder * audio_dec, gboolean hard)
{
- GstFlacDec *flacdec;
- FLAC__StreamDecoderState s;
- FLAC__StreamDecoderInitStatus is;
-
- flacdec = GST_FLAC_DEC (GST_OBJECT_PARENT (sinkpad));
-
- GST_LOG_OBJECT (flacdec, "entering loop");
-
- if (flacdec->eos) {
- GST_DEBUG_OBJECT (flacdec, "Seeked after end of file");
+ GstFlacDec *dec = GST_FLAC_DEC (audio_dec);
- if (flacdec->close_segment) {
- GST_DEBUG_OBJECT (flacdec, "pushing close segment");
- gst_pad_push_event (flacdec->srcpad, flacdec->close_segment);
- flacdec->close_segment = NULL;
- }
- if (flacdec->start_segment) {
- GST_DEBUG_OBJECT (flacdec, "pushing start segment");
- gst_pad_push_event (flacdec->srcpad, flacdec->start_segment);
- flacdec->start_segment = NULL;
- }
-
- if (flacdec->tags) {
- gst_element_found_tags_for_pad (GST_ELEMENT (flacdec), flacdec->srcpad,
- flacdec->tags);
- flacdec->tags = NULL;
- }
+ if (!hard) {
+ guint available = gst_adapter_available (dec->adapter);
- if ((flacdec->segment.flags & GST_SEEK_FLAG_SEGMENT) == 0) {
- goto eos_and_pause;
- } else {
- goto segment_done_and_pause;
+ if (available > 0) {
+ GST_INFO_OBJECT (dec, "draining, %u bytes left in adapter", available);
+ FLAC__stream_decoder_process_until_end_of_stream (dec->decoder);
}
}
- if (flacdec->init) {
- GST_DEBUG_OBJECT (flacdec, "initializing new decoder");
- is = FLAC__stream_decoder_init_stream (flacdec->decoder,
- gst_flac_dec_read_seekable, gst_flac_dec_seek, gst_flac_dec_tell,
- gst_flac_dec_length, gst_flac_dec_eof, gst_flac_dec_write_stream,
- gst_flac_dec_metadata_cb, gst_flac_dec_error_cb, flacdec);
- if (is != FLAC__STREAM_DECODER_INIT_STATUS_OK)
- goto analyze_state;
-
- /* FLAC__seekable_decoder_process_metadata (flacdec->decoder); */
- flacdec->init = FALSE;
- }
-
- flacdec->cur_granulepos = GST_BUFFER_OFFSET_NONE;
-
- flacdec->last_flow = GST_FLOW_OK;
-
- GST_LOG_OBJECT (flacdec, "processing single");
- FLAC__stream_decoder_process_single (flacdec->decoder);
-
-analyze_state:
-
- GST_LOG_OBJECT (flacdec, "done processing, checking encoder state");
- s = FLAC__stream_decoder_get_state (flacdec->decoder);
- switch (s) {
- case FLAC__STREAM_DECODER_SEARCH_FOR_METADATA:
- case FLAC__STREAM_DECODER_READ_METADATA:
- case FLAC__STREAM_DECODER_SEARCH_FOR_FRAME_SYNC:
- case FLAC__STREAM_DECODER_READ_FRAME:
- {
- GST_DEBUG_OBJECT (flacdec, "everything ok");
-
- if (flacdec->last_flow < GST_FLOW_UNEXPECTED ||
- flacdec->last_flow == GST_FLOW_NOT_LINKED) {
- GST_ELEMENT_ERROR (flacdec, STREAM, FAILED,
- (_("Internal data stream error.")),
- ("stream stopped, reason %s",
- gst_flow_get_name (flacdec->last_flow)));
- goto eos_and_pause;
- } else if (flacdec->last_flow == GST_FLOW_UNEXPECTED) {
- goto eos_and_pause;
- } else if (flacdec->last_flow != GST_FLOW_OK) {
- goto pause;
- }
+ FLAC__stream_decoder_flush (dec->decoder);
+ gst_adapter_clear (dec->adapter);
+}
- /* check if we're at the end of a configured segment */
- if (flacdec->segment.stop != -1 &&
- flacdec->segment.last_stop > 0 &&
- flacdec->segment.last_stop >= flacdec->segment.stop) {
- GST_DEBUG_OBJECT (flacdec, "reached end of the configured segment");
+static GstFlowReturn
+gst_flac_dec_handle_frame (GstAudioDecoder * audio_dec, GstBuffer * buf)
+{
+ GstFlacDec *dec;
- if ((flacdec->segment.flags & GST_SEEK_FLAG_SEGMENT) == 0) {
- goto eos_and_pause;
- } else {
- goto segment_done_and_pause;
- }
+ dec = GST_FLAC_DEC (audio_dec);
- g_assert_not_reached ();
- }
+ /* drain remaining data? */
+ if (G_UNLIKELY (buf == NULL)) {
+ gst_flac_dec_flush (audio_dec, FALSE);
+ return GST_FLOW_OK;
+ }
- return;
- }
+ GST_LOG_OBJECT (dec, "frame: ts %" GST_TIME_FORMAT ", flags 0x%04x, "
+ "%" G_GSIZE_FORMAT " bytes", GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
+ GST_BUFFER_FLAGS (buf), gst_buffer_get_size (buf));
- case FLAC__STREAM_DECODER_END_OF_STREAM:{
- GST_DEBUG_OBJECT (flacdec, "EOS");
- FLAC__stream_decoder_reset (flacdec->decoder);
+ /* drop any in-stream headers, we've processed those in set_format already */
+ if (G_UNLIKELY (!dec->got_headers)) {
+ gboolean got_audio_frame;
+ gint64 unused;
+ GstMapInfo map;
- if ((flacdec->segment.flags & GST_SEEK_FLAG_SEGMENT) != 0) {
- if (flacdec->segment.duration > 0) {
- flacdec->segment.stop = flacdec->segment.duration;
- } else {
- flacdec->segment.stop = flacdec->segment.last_stop;
- }
- goto segment_done_and_pause;
- }
+ /* check if this is a flac audio frame (rather than a header or junk) */
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ got_audio_frame =
+ gst_flac_dec_scan_got_frame (dec, map.data, map.size, &unused);
+ gst_buffer_unmap (buf, &map);
- goto eos_and_pause;
+ if (!got_audio_frame) {
+ GST_INFO_OBJECT (dec, "dropping in-stream header, %" G_GSIZE_FORMAT " "
+ "bytes", map.size);
+ gst_audio_decoder_finish_frame (audio_dec, NULL, 1);
+ return GST_FLOW_OK;
}
- /* gst_flac_dec_read_seekable() returned ABORTED */
- case FLAC__STREAM_DECODER_ABORTED:
- {
- GST_INFO_OBJECT (flacdec, "read aborted: last pull_range flow = %s",
- gst_flow_get_name (flacdec->pull_flow));
- if (flacdec->pull_flow == GST_FLOW_WRONG_STATE) {
- /* it seems we need to flush the decoder here to reset the decoder
- * state after the abort for FLAC__stream_decoder_seek_absolute()
- * to work properly */
- GST_DEBUG_OBJECT (flacdec, "flushing decoder to reset decoder state");
- FLAC__stream_decoder_flush (flacdec->decoder);
- goto pause;
- }
- /* fall through */
- }
- case FLAC__STREAM_DECODER_OGG_ERROR:
- case FLAC__STREAM_DECODER_SEEK_ERROR:
- case FLAC__STREAM_DECODER_MEMORY_ALLOCATION_ERROR:
- case FLAC__STREAM_DECODER_UNINITIALIZED:
- default:{
- /* fixme: this error sucks -- should try to figure out when/if an more
- specific error was already sent via the callback */
- GST_ELEMENT_ERROR (flacdec, STREAM, DECODE, (NULL),
- ("%s", FLAC__StreamDecoderStateString[s]));
- goto eos_and_pause;
- }
+ GST_INFO_OBJECT (dec, "first audio frame, got all in-stream headers now");
+ dec->got_headers = TRUE;
}
- return;
+ gst_adapter_push (dec->adapter, gst_buffer_ref (buf));
+ buf = NULL;
-segment_done_and_pause:
- {
- gint64 stop_time;
+ dec->last_flow = GST_FLOW_OK;
- stop_time = gst_util_uint64_scale_int (flacdec->segment.stop,
- GST_SECOND, flacdec->sample_rate);
+ /* framed - there should always be enough data to decode something */
+ GST_LOG_OBJECT (dec, "%" G_GSIZE_FORMAT " bytes available",
+ gst_adapter_available (dec->adapter));
- GST_DEBUG_OBJECT (flacdec, "posting SEGMENT_DONE message, stop time %"
- GST_TIME_FORMAT, GST_TIME_ARGS (stop_time));
-
- gst_element_post_message (GST_ELEMENT (flacdec),
- gst_message_new_segment_done (GST_OBJECT (flacdec),
- GST_FORMAT_TIME, stop_time));
-
- goto pause;
- }
-eos_and_pause:
- {
- GST_DEBUG_OBJECT (flacdec, "sending EOS event");
- flacdec->running = FALSE;
- gst_pad_push_event (flacdec->srcpad, gst_event_new_eos ());
- /* fall through to pause */
- }
-pause:
- {
- GST_DEBUG_OBJECT (flacdec, "pausing");
- gst_pad_pause_task (sinkpad);
- return;
- }
-}
-
-static gboolean
-gst_flac_dec_sink_event (GstPad * pad, GstEvent * event)
-{
- GstFlacDec *dec;
- gboolean res;
-
- dec = GST_FLAC_DEC (gst_pad_get_parent (pad));
-
- switch (GST_EVENT_TYPE (event)) {
- case GST_EVENT_FLUSH_STOP:{
- if (dec->init == FALSE) {
- FLAC__stream_decoder_flush (dec->decoder);
- gst_adapter_clear (dec->adapter);
- }
- res = gst_pad_push_event (dec->srcpad, event);
- break;
- }
- case GST_EVENT_NEWSEGMENT:{
- GstFormat fmt;
- gboolean update;
- gdouble rate, applied_rate;
- gint64 cur, stop, time;
-
- gst_event_parse_new_segment_full (event, &update, &rate, &applied_rate,
- &fmt, &cur, &stop, &time);
-
- if (fmt == GST_FORMAT_TIME) {
- GstFormat dformat = GST_FORMAT_DEFAULT;
-
- GST_DEBUG_OBJECT (dec, "newsegment event in TIME format => framed");
- dec->framed = TRUE;
- res = gst_pad_push_event (dec->srcpad, event);
-
- /* this won't work for the first newsegment event though ... */
- if (gst_flac_dec_convert_src (dec->srcpad, GST_FORMAT_TIME, cur,
- &dformat, &cur) && cur != -1 &&
- gst_flac_dec_convert_src (dec->srcpad, GST_FORMAT_TIME, stop,
- &dformat, &stop) && stop != -1) {
- gst_segment_set_newsegment_full (&dec->segment, update, rate,
- applied_rate, dformat, cur, stop, time);
- GST_DEBUG_OBJECT (dec, "segment %" GST_SEGMENT_FORMAT, &dec->segment);
- } else {
- GST_WARNING_OBJECT (dec, "couldn't convert time => samples");
- }
- } else if (fmt == GST_FORMAT_BYTES || TRUE) {
- GST_DEBUG_OBJECT (dec, "newsegment event in %s format => not framed",
- gst_format_get_name (fmt));
- dec->framed = FALSE;
-
- /* prepare generic newsegment event, for some reason our metadata
- * callback where we usually set this up is not being called in
- * push mode */
- if (dec->start_segment)
- gst_event_unref (dec->start_segment);
- dec->start_segment = gst_event_new_new_segment (FALSE, 1.0,
- GST_FORMAT_TIME, 0, -1, 0);
-
- gst_event_unref (event);
- res = TRUE;
- }
- break;
- }
- case GST_EVENT_EOS:{
- GST_LOG_OBJECT (dec, "EOS, with %u bytes available in adapter",
- gst_adapter_available (dec->adapter));
- if (dec->init == FALSE) {
- if (gst_adapter_available (dec->adapter) > 0) {
- FLAC__stream_decoder_process_until_end_of_stream (dec->decoder);
- }
- FLAC__stream_decoder_flush (dec->decoder);
- }
- gst_adapter_clear (dec->adapter);
- res = gst_pad_push_event (dec->srcpad, event);
- break;
- }
- default:
- res = gst_pad_event_default (pad, event);
- break;
- }
-
- gst_object_unref (dec);
-
- return res;
-}
-
-static gboolean
-gst_flac_dec_chain_parse_headers (GstFlacDec * dec)
-{
- guint8 marker[4];
- guint avail, off;
-
- avail = gst_adapter_available (dec->adapter);
- if (avail < 4)
- return FALSE;
-
- gst_adapter_copy (dec->adapter, marker, 0, 4);
- if (strncmp ((const gchar *) marker, "fLaC", 4) != 0) {
- GST_ERROR_OBJECT (dec, "Unexpected header, expected fLaC header");
- return TRUE; /* abort header parsing */
- }
-
- GST_DEBUG_OBJECT (dec, "fLaC header : len 4 @ %7u", 0);
-
- off = 4;
- while (avail > (off + 1 + 3)) {
- gboolean is_last;
- guint8 mb_hdr[4];
- guint len, block_type;
-
- gst_adapter_copy (dec->adapter, mb_hdr, off, 4);
-
- is_last = ((mb_hdr[0] & 0x80) == 0x80);
- block_type = mb_hdr[0] & 0x7f;
- len = GST_READ_UINT24_BE (mb_hdr + 1);
- GST_DEBUG_OBJECT (dec, "Metadata block type %u: len %7u + 4 @ %7u%s",
- block_type, len, off, (is_last) ? " (last)" : "");
- off += 4 + len;
-
- if (is_last)
- break;
-
- if (off >= avail) {
- GST_LOG_OBJECT (dec, "Need more data: next offset %u > avail %u", off,
- avail);
- return FALSE;
- }
- }
-
- /* want metadata blocks plus at least one frame */
- return (off + FLAC__MAX_BLOCK_SIZE >= avail);
-}
-
-static GstFlowReturn
-gst_flac_dec_chain (GstPad * pad, GstBuffer * buf)
-{
- FLAC__StreamDecoderInitStatus s;
- GstFlacDec *dec;
- gboolean got_audio_frame;
-
- dec = GST_FLAC_DEC (GST_PAD_PARENT (pad));
-
- GST_LOG_OBJECT (dec,
- "buffer with ts=%" GST_TIME_FORMAT ", offset=%" G_GINT64_FORMAT
- ", end_offset=%" G_GINT64_FORMAT ", size=%u",
- GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)), GST_BUFFER_OFFSET (buf),
- GST_BUFFER_OFFSET_END (buf), GST_BUFFER_SIZE (buf));
-
- if (dec->init) {
- GST_DEBUG_OBJECT (dec, "initializing decoder");
- s = FLAC__stream_decoder_init_stream (dec->decoder,
- gst_flac_dec_read_stream, NULL, NULL, NULL, NULL,
- gst_flac_dec_write_stream, gst_flac_dec_metadata_cb,
- gst_flac_dec_error_cb, dec);
- if (s != FLAC__STREAM_DECODER_INIT_STATUS_OK) {
- GST_ELEMENT_ERROR (GST_ELEMENT (dec), LIBRARY, INIT, (NULL), (NULL));
- return GST_FLOW_ERROR;
- }
- GST_DEBUG_OBJECT (dec, "initialized (framed=%d)", dec->framed);
- dec->init = FALSE;
- } else if (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DISCONT)) {
- /* Clear the adapter and the decoder */
- gst_adapter_clear (dec->adapter);
- FLAC__stream_decoder_flush (dec->decoder);
- }
-
- if (dec->framed) {
- gint64 unused;
-
- /* check if this is a flac audio frame (rather than a header or junk) */
- got_audio_frame = gst_flac_dec_scan_got_frame (dec, GST_BUFFER_DATA (buf),
- GST_BUFFER_SIZE (buf), &unused);
-
- /* oggdemux will set granulepos in OFFSET_END instead of timestamp */
- if (G_LIKELY (got_audio_frame)) {
- /* old oggdemux for now */
- if (!GST_BUFFER_TIMESTAMP_IS_VALID (buf)) {
- dec->cur_granulepos = GST_BUFFER_OFFSET_END (buf);
- } else {
- GstFormat dformat = GST_FORMAT_DEFAULT;
-
- /* upstream (e.g. demuxer) presents us time,
- * convert to default samples */
- gst_flac_dec_convert_src (dec->srcpad, GST_FORMAT_TIME,
- GST_BUFFER_TIMESTAMP (buf), &dformat, &dec->segment.last_stop);
- dec->cur_granulepos = GST_BUFFER_OFFSET_NONE;
- }
- }
- } else {
- dec->cur_granulepos = GST_BUFFER_OFFSET_NONE;
- got_audio_frame = TRUE;
- }
-
- gst_adapter_push (dec->adapter, buf);
- buf = NULL;
-
- dec->last_flow = GST_FLOW_OK;
-
- if (!dec->framed) {
- if (G_UNLIKELY (!dec->got_headers)) {
- if (!gst_flac_dec_chain_parse_headers (dec)) {
- GST_LOG_OBJECT (dec, "don't have metadata blocks yet, need more data");
- goto out;
- }
- GST_INFO_OBJECT (dec, "have all metadata blocks now");
- dec->got_headers = TRUE;
- }
-
- /* wait until we have at least 64kB because libflac's StreamDecoder
- * interface is a bit dumb it seems (if we don't have as much data as
- * it wants it will call our read callback repeatedly and the only
- * way to stop that is to error out or EOS, which will affect the
- * decoder state). And the decoder seems to always ask for MAX_BLOCK_SIZE
- * bytes rather than the max. block size from the header). Requiring
- * MAX_BLOCK_SIZE bytes here should make sure it always gets enough data
- * to decode at least one block */
- while (gst_adapter_available (dec->adapter) >= FLAC__MAX_BLOCK_SIZE &&
- dec->last_flow == GST_FLOW_OK) {
- GST_LOG_OBJECT (dec, "%u bytes available",
- gst_adapter_available (dec->adapter));
- if (!FLAC__stream_decoder_process_single (dec->decoder)) {
- GST_DEBUG_OBJECT (dec, "process_single failed");
- break;
- }
-
- if (FLAC__stream_decoder_get_state (dec->decoder) ==
- FLAC__STREAM_DECODER_ABORTED) {
- GST_WARNING_OBJECT (dec, "Read callback caused internal abort");
- dec->last_flow = GST_FLOW_ERROR;
- break;
- }
- }
- } else if (dec->framed && got_audio_frame) {
- /* framed - there should always be enough data to decode something */
- GST_LOG_OBJECT (dec, "%u bytes available",
- gst_adapter_available (dec->adapter));
- if (G_UNLIKELY (!dec->got_headers)) {
- /* The first time we get audio data, we know we got all the headers.
- * We then loop until all the metadata is processed, then do an extra
- * "process_single" step for the audio frame. */
- GST_DEBUG_OBJECT (dec,
- "First audio frame, ensuring all metadata is processed");
- if (!FLAC__stream_decoder_process_until_end_of_metadata (dec->decoder)) {
- GST_DEBUG_OBJECT (dec, "process_until_end_of_metadata failed");
- }
- GST_DEBUG_OBJECT (dec,
- "All metadata is now processed, reading to process audio data");
- dec->got_headers = TRUE;
- }
- if (!FLAC__stream_decoder_process_single (dec->decoder)) {
- GST_DEBUG_OBJECT (dec, "process_single failed");
- }
- } else {
- GST_DEBUG_OBJECT (dec, "don't have all headers yet");
- }
-
-out:
+ if (!FLAC__stream_decoder_process_single (dec->decoder)) {
+ GST_INFO_OBJECT (dec, "process_single failed");
+ }
return dec->last_flow;
}
-
-static gboolean
-gst_flac_dec_convert_sink (GstFlacDec * dec, GstFormat src_format,
- gint64 src_value, GstFormat * dest_format, gint64 * dest_value)
-{
- gboolean res = TRUE;
-
- if (dec->width == 0 || dec->channels == 0 || dec->sample_rate == 0) {
- /* no frame decoded yet */
- GST_DEBUG_OBJECT (dec, "cannot convert: not set up yet");
- return FALSE;
- }
-
- switch (src_format) {
- case GST_FORMAT_BYTES:{
- res = FALSE;
- break;
- }
- case GST_FORMAT_DEFAULT:
- switch (*dest_format) {
- case GST_FORMAT_BYTES:
- res = FALSE;
- break;
- case GST_FORMAT_TIME:
- /* granulepos = sample */
- *dest_value = gst_util_uint64_scale_int (src_value, GST_SECOND,
- dec->sample_rate);
- break;
- default:
- res = FALSE;
- break;
- }
- break;
- case GST_FORMAT_TIME:
- switch (*dest_format) {
- case GST_FORMAT_BYTES:
- res = FALSE;
- break;
- case GST_FORMAT_DEFAULT:
- *dest_value = gst_util_uint64_scale_int (src_value,
- dec->sample_rate, GST_SECOND);
- break;
- default:
- res = FALSE;
- break;
- }
- break;
- default:
- res = FALSE;
- break;
- }
- return res;
-}
-
-static const GstQueryType *
-gst_flac_dec_get_sink_query_types (GstPad * pad)
-{
- static const GstQueryType types[] = {
- GST_QUERY_CONVERT,
- 0,
- };
-
- return types;
-}
-
-static gboolean
-gst_flac_dec_sink_query (GstPad * pad, GstQuery * query)
-{
- GstFlacDec *dec;
- gboolean res = FALSE;
-
- dec = GST_FLAC_DEC (gst_pad_get_parent (pad));
-
- GST_LOG_OBJECT (dec, "%s query", GST_QUERY_TYPE_NAME (query));
-
- switch (GST_QUERY_TYPE (query)) {
- case GST_QUERY_CONVERT:{
- GstFormat src_fmt, dest_fmt;
-
- gint64 src_val, dest_val;
-
- gst_query_parse_convert (query, &src_fmt, &src_val, &dest_fmt, NULL);
-
- res = gst_flac_dec_convert_sink (dec, src_fmt, src_val, &dest_fmt,
- &dest_val);
-
- if (res) {
- gst_query_set_convert (query, src_fmt, src_val, dest_fmt, dest_val);
- }
- GST_LOG_OBJECT (dec, "conversion %s", (res) ? "ok" : "FAILED");
- break;
- }
-
- default:{
- res = gst_pad_query_default (pad, query);
- break;
- }
- }
-
- gst_object_unref (dec);
- return res;
-}
-
-static gboolean
-gst_flac_dec_convert_src (GstPad * pad, GstFormat src_format, gint64 src_value,
- GstFormat * dest_format, gint64 * dest_value)
-{
- GstFlacDec *flacdec = GST_FLAC_DEC (GST_PAD_PARENT (pad));
- gboolean res = TRUE;
- guint bytes_per_sample;
- guint scale = 1;
-
- if (flacdec->width == 0 || flacdec->channels == 0 ||
- flacdec->sample_rate == 0) {
- /* no frame decoded yet */
- GST_DEBUG_OBJECT (flacdec, "cannot convert: not set up yet");
- return FALSE;
- }
-
- bytes_per_sample = flacdec->channels * (flacdec->width / 8);
-
- switch (src_format) {
- case GST_FORMAT_BYTES:{
- switch (*dest_format) {
- case GST_FORMAT_DEFAULT:
- *dest_value =
- gst_util_uint64_scale_int (src_value, 1, bytes_per_sample);
- break;
- case GST_FORMAT_TIME:
- {
- gint byterate = bytes_per_sample * flacdec->sample_rate;
-
- *dest_value = gst_util_uint64_scale_int (src_value, GST_SECOND,
- byterate);
- break;
- }
- default:
- res = FALSE;
- }
- break;
- }
- case GST_FORMAT_DEFAULT:
- switch (*dest_format) {
- case GST_FORMAT_BYTES:
- *dest_value = src_value * bytes_per_sample;
- break;
- case GST_FORMAT_TIME:
- *dest_value = gst_util_uint64_scale_int (src_value, GST_SECOND,
- flacdec->sample_rate);
- break;
- default:
- res = FALSE;
- }
- break;
- case GST_FORMAT_TIME:
- switch (*dest_format) {
- case GST_FORMAT_BYTES:
- scale = bytes_per_sample;
- case GST_FORMAT_DEFAULT:
- *dest_value = gst_util_uint64_scale_int_round (src_value,
- scale * flacdec->sample_rate, GST_SECOND);
- break;
- default:
- res = FALSE;
- }
- break;
- default:
- res = FALSE;
- }
- return res;
-}
-
-static const GstQueryType *
-gst_flac_dec_get_src_query_types (GstPad * pad)
-{
- static const GstQueryType types[] = {
- GST_QUERY_POSITION,
- GST_QUERY_DURATION,
- GST_QUERY_CONVERT,
- GST_QUERY_SEEKING,
- 0,
- };
-
- return types;
-}
-
-static gboolean
-gst_flac_dec_src_query (GstPad * pad, GstQuery * query)
-{
- GstFlacDec *flacdec;
- gboolean res = TRUE;
- GstPad *peer;
-
- flacdec = GST_FLAC_DEC (gst_pad_get_parent (pad));
- peer = gst_pad_get_peer (flacdec->sinkpad);
-
- switch (GST_QUERY_TYPE (query)) {
- case GST_QUERY_POSITION:{
- GstFormat fmt;
- gint64 pos;
-
- gst_query_parse_position (query, &fmt, NULL);
-
- /* there might be a demuxer in front of us who can handle this */
- if (fmt == GST_FORMAT_TIME && (res = gst_pad_query (peer, query)))
- break;
-
- if (fmt != GST_FORMAT_DEFAULT) {
- if (!gst_flac_dec_convert_src (flacdec->srcpad, GST_FORMAT_DEFAULT,
- flacdec->segment.last_stop, &fmt, &pos)) {
- GST_DEBUG_OBJECT (flacdec, "failed to convert position into %s "
- "format", gst_format_get_name (fmt));
- res = FALSE;
- goto done;
- }
- } else {
- pos = flacdec->segment.last_stop;
- }
-
- gst_query_set_position (query, fmt, pos);
-
- GST_DEBUG_OBJECT (flacdec, "returning position %" G_GUINT64_FORMAT
- " (format: %s)", pos, gst_format_get_name (fmt));
-
- res = TRUE;
- break;
- }
-
- case GST_QUERY_DURATION:{
- GstFormat fmt;
- gint64 len;
-
- gst_query_parse_duration (query, &fmt, NULL);
-
- /* try any demuxers or parsers before us first */
- if ((fmt == GST_FORMAT_TIME || fmt == GST_FORMAT_DEFAULT) &&
- peer != NULL && gst_pad_query (peer, query)) {
- gst_query_parse_duration (query, NULL, &len);
- GST_DEBUG_OBJECT (flacdec, "peer returned duration %" GST_TIME_FORMAT,
- GST_TIME_ARGS (len));
- res = TRUE;
- goto done;
- }
-
- if (flacdec->segment.duration == 0 || flacdec->segment.duration == -1) {
- GST_DEBUG_OBJECT (flacdec, "duration not known yet");
- res = FALSE;
- goto done;
- }
-
- /* convert total number of samples to request format */
- if (fmt != GST_FORMAT_DEFAULT) {
- if (!gst_flac_dec_convert_src (flacdec->srcpad, GST_FORMAT_DEFAULT,
- flacdec->segment.duration, &fmt, &len)) {
- GST_DEBUG_OBJECT (flacdec, "failed to convert duration into %s "
- "format", gst_format_get_name (fmt));
- res = FALSE;
- goto done;
- }
- } else {
- len = flacdec->segment.duration;
- }
-
- gst_query_set_duration (query, fmt, len);
-
- GST_DEBUG_OBJECT (flacdec, "returning duration %" G_GUINT64_FORMAT
- " (format: %s)", len, gst_format_get_name (fmt));
-
- res = TRUE;
- break;
- }
-
- case GST_QUERY_CONVERT:{
- GstFormat src_fmt, dest_fmt;
- gint64 src_val, dest_val;
-
- gst_query_parse_convert (query, &src_fmt, &src_val, &dest_fmt, NULL);
-
- res = gst_flac_dec_convert_src (pad, src_fmt, src_val, &dest_fmt,
- &dest_val);
-
- if (res) {
- gst_query_set_convert (query, src_fmt, src_val, dest_fmt, dest_val);
- }
-
- break;
- }
- case GST_QUERY_SEEKING:{
- GstFormat fmt;
- gboolean seekable = FALSE;
-
- res = TRUE;
- /* If upstream can handle the query we're done */
- seekable = gst_pad_peer_query (flacdec->sinkpad, query);
- if (seekable)
- gst_query_parse_seeking (query, NULL, &seekable, NULL, NULL);
- if (seekable)
- goto done;
-
- gst_query_parse_seeking (query, &fmt, NULL, NULL, NULL);
- if ((fmt != GST_FORMAT_TIME && fmt != GST_FORMAT_DEFAULT) ||
- flacdec->streaming) {
- gst_query_set_seeking (query, fmt, FALSE, -1, -1);
- } else {
- gst_query_set_seeking (query, GST_FORMAT_TIME, TRUE, 0, -1);
- }
- break;
- }
-
- default:{
- res = gst_pad_query_default (pad, query);
- break;
- }
- }
-
-done:
-
- if (peer)
- gst_object_unref (peer);
-
- gst_object_unref (flacdec);
-
- return res;
-}
-
-static gboolean
-gst_flac_dec_handle_seek_event (GstFlacDec * flacdec, GstEvent * event)
-{
- FLAC__bool seek_ok;
- GstSeekFlags seek_flags;
- GstSeekType start_type;
- GstSeekType stop_type;
- GstSegment segment;
- GstFormat seek_format;
- gboolean only_update = FALSE;
- gboolean flush;
- gdouble rate;
- gint64 start, last_stop;
- gint64 stop;
-
- if (flacdec->streaming) {
- GST_DEBUG_OBJECT (flacdec, "seeking in streaming mode not implemented yet");
- return FALSE;
- }
-
- gst_event_parse_seek (event, &rate, &seek_format, &seek_flags, &start_type,
- &start, &stop_type, &stop);
-
- if (seek_format != GST_FORMAT_DEFAULT && seek_format != GST_FORMAT_TIME) {
- GST_DEBUG_OBJECT (flacdec,
- "seeking is only supported in TIME or DEFAULT format");
- return FALSE;
- }
-
- if (rate < 0.0) {
- GST_DEBUG_OBJECT (flacdec,
- "only forward playback supported, rate %f not allowed", rate);
- return FALSE;
- }
-
- if (seek_format != GST_FORMAT_DEFAULT) {
- GstFormat target_format = GST_FORMAT_DEFAULT;
-
- if (start_type != GST_SEEK_TYPE_NONE &&
- !gst_flac_dec_convert_src (flacdec->srcpad, seek_format, start,
- &target_format, &start)) {
- GST_DEBUG_OBJECT (flacdec, "failed to convert start to DEFAULT format");
- return FALSE;
- }
-
- if (stop_type != GST_SEEK_TYPE_NONE &&
- !gst_flac_dec_convert_src (flacdec->srcpad, seek_format, stop,
- &target_format, &stop)) {
- GST_DEBUG_OBJECT (flacdec, "failed to convert stop to DEFAULT format");
- return FALSE;
- }
- }
-
- /* Check if we seeked after the end of file */
- if (start_type != GST_SEEK_TYPE_NONE && flacdec->segment.duration > 0 &&
- start >= flacdec->segment.duration) {
- flacdec->eos = TRUE;
- } else {
- flacdec->eos = FALSE;
- }
-
- flush = ((seek_flags & GST_SEEK_FLAG_FLUSH) == GST_SEEK_FLAG_FLUSH);
-
- if (flush) {
- /* flushing seek, clear the pipeline of stuff, we need a newsegment after
- * this. */
- GST_DEBUG_OBJECT (flacdec, "flushing");
- gst_pad_push_event (flacdec->sinkpad, gst_event_new_flush_start ());
- gst_pad_push_event (flacdec->srcpad, gst_event_new_flush_start ());
- } else {
- /* non flushing seek, pause the task */
- GST_DEBUG_OBJECT (flacdec, "stopping task");
- gst_pad_stop_task (flacdec->sinkpad);
- }
-
- /* acquire the stream lock, this either happens when the streaming thread
- * stopped because of the flush or when the task is paused after the loop
- * function finished an iteration, which can never happen when it's blocked
- * downstream in PAUSED, for example */
- GST_PAD_STREAM_LOCK (flacdec->sinkpad);
-
- /* start seek with clear state to avoid seeking thread pushing segments/data.
- * Note current state may have some pending,
- * e.g. multi-sink seek leads to immediate subsequent seek events */
- if (flacdec->start_segment) {
- gst_event_unref (flacdec->start_segment);
- flacdec->start_segment = NULL;
- }
- gst_buffer_replace (&flacdec->pending, NULL);
- flacdec->pending_samples = 0;
-
- /* save a segment copy until we know the seek worked. The idea is that
- * when the seek fails, we want to restore with what we were doing. */
- segment = flacdec->segment;
-
- /* update the segment with the seek values, last_stop will contain the new
- * position we should seek to */
- gst_segment_set_seek (&flacdec->segment, rate, GST_FORMAT_DEFAULT,
- seek_flags, start_type, start, stop_type, stop, &only_update);
-
- GST_DEBUG_OBJECT (flacdec,
- "configured segment: [%" G_GINT64_FORMAT "-%" G_GINT64_FORMAT
- "] = [%" GST_TIME_FORMAT "-%" GST_TIME_FORMAT "]",
- flacdec->segment.start, flacdec->segment.stop,
- GST_TIME_ARGS (flacdec->segment.start * GST_SECOND /
- flacdec->sample_rate),
- GST_TIME_ARGS (flacdec->segment.stop * GST_SECOND /
- flacdec->sample_rate));
-
- GST_DEBUG_OBJECT (flacdec, "performing seek to sample %" G_GINT64_FORMAT,
- flacdec->segment.last_stop);
-
- /* flush sinkpad again because we need to pull and push buffers while doing
- * the seek */
- if (flush) {
- GST_DEBUG_OBJECT (flacdec, "flushing stop");
- gst_pad_push_event (flacdec->sinkpad, gst_event_new_flush_stop ());
- gst_pad_push_event (flacdec->srcpad, gst_event_new_flush_stop ());
- }
-
- /* mark ourselves as seeking because the above lines will trigger some
- * callbacks that need to behave differently when seeking */
- flacdec->seeking = TRUE;
-
- if (!flacdec->eos) {
- GST_LOG_OBJECT (flacdec, "calling seek_absolute");
- seek_ok = FLAC__stream_decoder_seek_absolute (flacdec->decoder,
- flacdec->segment.last_stop);
- GST_LOG_OBJECT (flacdec, "done with seek_absolute, seek_ok=%d", seek_ok);
- } else {
- GST_LOG_OBJECT (flacdec, "not seeking, seeked after end of file");
- seek_ok = TRUE;
- }
-
- flacdec->seeking = FALSE;
-
- GST_DEBUG_OBJECT (flacdec, "performed seek to sample %" G_GINT64_FORMAT,
- flacdec->segment.last_stop);
-
- if (!seek_ok) {
- GST_WARNING_OBJECT (flacdec, "seek failed");
- /* seek failed, restore the segment and start streaming again with
- * the previous segment values */
- flacdec->segment = segment;
- } else if (!flush && flacdec->running) {
- /* we are running the current segment and doing a non-flushing seek,
- * close the segment first based on the last_stop. */
- GST_DEBUG_OBJECT (flacdec, "closing running segment %" G_GINT64_FORMAT
- " to %" G_GINT64_FORMAT, segment.start, segment.last_stop);
-
- /* convert the old segment values to time to close the old segment */
- start = gst_util_uint64_scale_int (segment.start, GST_SECOND,
- flacdec->sample_rate);
- last_stop =
- gst_util_uint64_scale_int (segment.last_stop, GST_SECOND,
- flacdec->sample_rate);
-
- /* queue the segment for sending in the stream thread, start and time are
- * always the same. */
- if (flacdec->close_segment)
- gst_event_unref (flacdec->close_segment);
- flacdec->close_segment =
- gst_event_new_new_segment_full (TRUE,
- segment.rate, segment.applied_rate, GST_FORMAT_TIME,
- start, last_stop, start);
- }
-
- if (seek_ok) {
- /* seek succeeded, flacdec->segment contains the new positions */
- GST_DEBUG_OBJECT (flacdec, "seek successful");
- }
-
- /* convert the (new) segment values to time, we will need them to generate the
- * new segment events. */
- start = gst_util_uint64_scale_int (flacdec->segment.start, GST_SECOND,
- flacdec->sample_rate);
- last_stop = gst_util_uint64_scale_int (flacdec->segment.last_stop, GST_SECOND,
- flacdec->sample_rate);
-
- /* for deriving a stop position for the playback segment from the seek
- * segment, we must take the duration when the stop is not set */
- if (flacdec->segment.stop != -1)
- stop = gst_util_uint64_scale_int (flacdec->segment.stop, GST_SECOND,
- flacdec->sample_rate);
- else
- stop = gst_util_uint64_scale_int (flacdec->segment.duration, GST_SECOND,
- flacdec->sample_rate);
-
- /* notify start of new segment when we were asked to do so. */
- if (flacdec->segment.flags & GST_SEEK_FLAG_SEGMENT) {
- /* last_stop contains the position we start from */
- gst_element_post_message (GST_ELEMENT (flacdec),
- gst_message_new_segment_start (GST_OBJECT (flacdec),
- GST_FORMAT_TIME, last_stop));
- }
-
- /* if the seek was ok or (when it failed) we are flushing, we need to send out
- * a new segment. If we did not flush and the seek failed, we simply do
- * nothing here and continue where we were. */
- if (seek_ok || flush) {
- GST_DEBUG_OBJECT (flacdec, "Creating newsegment from %" GST_TIME_FORMAT
- " to %" GST_TIME_FORMAT, GST_TIME_ARGS (last_stop),
- GST_TIME_ARGS (stop));
- /* now replace the old segment so that we send it in the stream thread the
- * next time it is scheduled. */
- if (flacdec->start_segment)
- gst_event_unref (flacdec->start_segment);
- flacdec->start_segment =
- gst_event_new_new_segment_full (FALSE,
- flacdec->segment.rate, flacdec->segment.applied_rate, GST_FORMAT_TIME,
- last_stop, stop, last_stop);
- }
-
- /* we'll generate a discont on the next buffer */
- flacdec->discont = TRUE;
- /* the task is running again now */
- flacdec->running = TRUE;
- gst_pad_start_task (flacdec->sinkpad,
- (GstTaskFunction) gst_flac_dec_loop, flacdec->sinkpad);
-
- GST_PAD_STREAM_UNLOCK (flacdec->sinkpad);
-
- return seek_ok;
-}
-
-static gboolean
-gst_flac_dec_src_event (GstPad * pad, GstEvent * event)
-{
- gboolean res = TRUE;
- GstFlacDec *flacdec;
-
- flacdec = GST_FLAC_DEC (gst_pad_get_parent (pad));
-
- switch (GST_EVENT_TYPE (event)) {
- case GST_EVENT_SEEK:{
- GST_DEBUG_OBJECT (flacdec, "received seek event %p", event);
- /* first, see if we're before a demuxer that
- * might handle the seek for us */
- gst_event_ref (event);
- res = gst_pad_event_default (pad, event);
- /* if not, try to handle it ourselves */
- if (!res) {
- GST_DEBUG_OBJECT (flacdec, "default failed, handling ourselves");
- res = gst_flac_dec_handle_seek_event (flacdec, event);
- }
- gst_event_unref (event);
- break;
- }
- default:
- res = gst_pad_event_default (pad, event);
- break;
- }
-
- gst_object_unref (flacdec);
-
- return res;
-}
-
-static gboolean
-gst_flac_dec_sink_activate (GstPad * sinkpad)
-{
- if (gst_pad_check_pull_range (sinkpad))
- return gst_pad_activate_pull (sinkpad, TRUE);
-
- return gst_pad_activate_push (sinkpad, TRUE);
-}
-
-static gboolean
-gst_flac_dec_sink_activate_push (GstPad * sinkpad, gboolean active)
-{
- GstFlacDec *dec = GST_FLAC_DEC (GST_OBJECT_PARENT (sinkpad));
-
- if (active) {
- gst_flac_dec_setup_decoder (dec);
- dec->streaming = TRUE;
- dec->got_headers = FALSE;
- }
- return TRUE;
-}
-
-static gboolean
-gst_flac_dec_sink_activate_pull (GstPad * sinkpad, gboolean active)
-{
- gboolean res;
-
- if (active) {
- GstFlacDec *flacdec;
-
- flacdec = GST_FLAC_DEC (GST_PAD_PARENT (sinkpad));
-
- flacdec->offset = 0;
- gst_flac_dec_setup_decoder (flacdec);
- flacdec->running = TRUE;
- flacdec->streaming = FALSE;
-
- res = gst_pad_start_task (sinkpad, (GstTaskFunction) gst_flac_dec_loop,
- sinkpad);
- } else {
- res = gst_pad_stop_task (sinkpad);
- }
- return res;
-}
-
-static GstStateChangeReturn
-gst_flac_dec_change_state (GstElement * element, GstStateChange transition)
-{
- GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
- GstFlacDec *flacdec = GST_FLAC_DEC (element);
-
- switch (transition) {
- case GST_STATE_CHANGE_READY_TO_PAUSED:
- flacdec->eos = FALSE;
- flacdec->seeking = FALSE;
- flacdec->channels = 0;
- flacdec->depth = 0;
- flacdec->width = 0;
- flacdec->sample_rate = 0;
- gst_segment_init (&flacdec->segment, GST_FORMAT_DEFAULT);
- break;
- default:
- break;
- }
-
- ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
- if (ret == GST_STATE_CHANGE_FAILURE)
- return ret;
-
- switch (transition) {
- case GST_STATE_CHANGE_PAUSED_TO_READY:
- gst_segment_init (&flacdec->segment, GST_FORMAT_UNDEFINED);
- gst_flac_dec_reset_decoders (flacdec);
- break;
- default:
- break;
- }
-
- return ret;
-}
/* GStreamer
* Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * Copyright (C) <2011> Tim-Philipp Müller <tim centricular net>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
#ifndef __GST_FLAC_DEC_H__
#define __GST_FLAC_DEC_H__
-
#include <gst/gst.h>
-#include <gst/base/gstadapter.h>
+#include <gst/audio/audio.h>
+#include <gst/audio/gstaudiodecoder.h>
#include <FLAC/all.h>
typedef struct _GstFlacDecClass GstFlacDecClass;
struct _GstFlacDec {
- GstElement element;
-
- /* < private > */
+ GstAudioDecoder audiodecoder;
+ /*< private >*/
FLAC__StreamDecoder *decoder;
GstAdapter *adapter;
- gboolean framed;
- gboolean streaming;
-
- gboolean got_headers; /* if we've parsed the headers (unframed push mode only) */
-
- GstPad *sinkpad;
- GstPad *srcpad;
-
- gboolean init;
- guint64 offset; /* current byte offset of input */
+ gboolean got_headers; /* have we received all the header buffers yet? */
- gboolean seeking; /* set to TRUE while seeking to make sure we
- * don't push any buffers in the write callback
- * until we are actually at the new position */
+ GstFlowReturn last_flow; /* to marshal flow return from finis_frame to
+ * handle_frame via flac callbacks */
- gboolean eos; /* set to TRUE if seeked after the end of file */
-
- GstSegment segment; /* the currently configured segment, in
- * samples/audio frames (DEFAULT format) */
- gboolean running;
- gboolean discont;
- GstBuffer *pending; /* pending buffer, produced in seek */
- guint pending_samples;
- GstEvent *close_segment;
- GstEvent *start_segment;
- GstTagList *tags;
-
- GstFlowReturn pull_flow; /* last flow from pull_range */ /* STREAM_LOCK */
-
- GstFlowReturn last_flow; /* the last flow return received from either
- * gst_pad_push or gst_pad_buffer_alloc */
-
- gint channels;
+ GstAudioInfo info;
+ gint channel_reorder_map[8];
gint depth;
- gint width;
- gint sample_rate;
/* from the stream info, needed for scanning */
guint16 min_blocksize;
guint16 max_blocksize;
-
- gint64 cur_granulepos; /* only used in framed mode (flac-in-ogg) */
};
struct _GstFlacDecClass {
- GstElementClass parent_class;
+ GstAudioDecoderClass audiodecoder;
};
GType gst_flac_dec_get_type (void);
#include <gstflacenc.h>
#include <gst/audio/audio.h>
-#include <gst/audio/multichannel.h>
#include <gst/tag/tag.h>
#include <gst/gsttagsetter.h>
/* Taken from http://flac.sourceforge.net/format.html#frame_header */
static const GstAudioChannelPosition channel_positions[8][8] = {
- {GST_AUDIO_CHANNEL_POSITION_FRONT_MONO},
+ {GST_AUDIO_CHANNEL_POSITION_MONO},
{GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT,
GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT}, {
GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT,
GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT,
GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT,
GST_AUDIO_CHANNEL_POSITION_FRONT_CENTER,
- GST_AUDIO_CHANNEL_POSITION_LFE,
+ GST_AUDIO_CHANNEL_POSITION_LFE1,
GST_AUDIO_CHANNEL_POSITION_REAR_LEFT,
GST_AUDIO_CHANNEL_POSITION_REAR_RIGHT},
/* FIXME: 7/8 channel layouts are not defined in the FLAC specs */
GST_AUDIO_CHANNEL_POSITION_REAR_LEFT,
GST_AUDIO_CHANNEL_POSITION_REAR_RIGHT,
GST_AUDIO_CHANNEL_POSITION_FRONT_CENTER,
- GST_AUDIO_CHANNEL_POSITION_LFE,
+ GST_AUDIO_CHANNEL_POSITION_LFE1,
GST_AUDIO_CHANNEL_POSITION_REAR_CENTER}, {
GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT,
GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT,
GST_AUDIO_CHANNEL_POSITION_REAR_LEFT,
GST_AUDIO_CHANNEL_POSITION_REAR_RIGHT,
GST_AUDIO_CHANNEL_POSITION_FRONT_CENTER,
- GST_AUDIO_CHANNEL_POSITION_LFE,
+ GST_AUDIO_CHANNEL_POSITION_LFE1,
GST_AUDIO_CHANNEL_POSITION_SIDE_LEFT,
GST_AUDIO_CHANNEL_POSITION_SIDE_RIGHT}
};
-#define FLAC_SINK_CAPS \
- "audio/x-raw-int, " \
- "endianness = (int) BYTE_ORDER, " \
- "signed = (boolean) TRUE, " \
- "width = (int) 8, " \
- "depth = (int) 8, " \
- "rate = (int) [ 1, 655350 ], " \
- "channels = (int) [ 1, 8 ]; " \
- "audio/x-raw-int, " \
- "endianness = (int) BYTE_ORDER, " \
- "signed = (boolean) TRUE, " \
- "width = (int) 16, " \
- "depth = (int) { 12, 16 }, " \
- "rate = (int) [ 1, 655350 ], " \
- "channels = (int) [ 1, 8 ]; " \
- "audio/x-raw-int, " \
- "endianness = (int) BYTE_ORDER, " \
- "signed = (boolean) TRUE, " \
- "width = (int) 32, " \
- "depth = (int) { 20, 24 }, " \
- "rate = (int) [ 1, 655350 ], " \
- "channels = (int) [ 1, 8 ]"
+#if G_BYTE_ORDER == G_LITTLE_ENDIAN
+#define FORMATS "{ S8LE, S16LE, S24LE, S32LE } "
+#else
+#define FORMATS "{ S8BE, S16BE, S24BE, S32BE } "
+#endif
+
+#define FLAC_SINK_CAPS \
+ "audio/x-raw, " \
+ "format = (string) " FORMATS ", " \
+ "layout = (string) interleaved, " \
+ "rate = (int) [ 1, 655350 ], " \
+ "channels = (int) [ 1, 8 ]"
static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_DEBUG_CATEGORY_STATIC (flacenc_debug);
#define GST_CAT_DEFAULT flacenc_debug
-
-#define _do_init(type) \
- G_STMT_START{ \
- static const GInterfaceInfo tag_setter_info = { \
- NULL, \
- NULL, \
- NULL \
- }; \
- g_type_add_interface_static (type, GST_TYPE_TAG_SETTER, \
- &tag_setter_info); \
- }G_STMT_END
-
-GST_BOILERPLATE_FULL (GstFlacEnc, gst_flac_enc, GstAudioEncoder,
- GST_TYPE_AUDIO_ENCODER, _do_init);
+#define gst_flac_enc_parent_class parent_class
+G_DEFINE_TYPE_WITH_CODE (GstFlacEnc, gst_flac_enc, GST_TYPE_AUDIO_ENCODER,
+ G_IMPLEMENT_INTERFACE (GST_TYPE_TAG_SETTER, NULL));
static gboolean gst_flac_enc_start (GstAudioEncoder * enc);
static gboolean gst_flac_enc_stop (GstAudioEncoder * enc);
GstAudioInfo * info);
static GstFlowReturn gst_flac_enc_handle_frame (GstAudioEncoder * enc,
GstBuffer * in_buf);
-static GstCaps *gst_flac_enc_getcaps (GstAudioEncoder * enc);
+static GstCaps *gst_flac_enc_getcaps (GstAudioEncoder * enc, GstCaps * filter);
static gboolean gst_flac_enc_sink_event (GstAudioEncoder * enc,
GstEvent * event);
}
static void
-gst_flac_enc_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_static_pad_template (element_class, &src_factory);
- gst_element_class_add_static_pad_template (element_class, &sink_factory);
-
- gst_element_class_set_details_simple (element_class, "FLAC audio encoder",
- "Codec/Encoder/Audio",
- "Encodes audio with the FLAC lossless audio encoder",
- "Wim Taymans <wim.taymans@chello.be>");
-
- GST_DEBUG_CATEGORY_INIT (flacenc_debug, "flacenc", 0,
- "Flac encoding element");
-}
-
-static void
gst_flac_enc_class_init (GstFlacEncClass * klass)
{
GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
GstAudioEncoderClass *base_class;
gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
base_class = (GstAudioEncoderClass *) (klass);
+ GST_DEBUG_CATEGORY_INIT (flacenc_debug, "flacenc", 0,
+ "Flac encoding element");
+
gobject_class->set_property = gst_flac_enc_set_property;
gobject_class->get_property = gst_flac_enc_get_property;
gobject_class->finalize = gst_flac_enc_finalize;
- base_class->start = GST_DEBUG_FUNCPTR (gst_flac_enc_start);
- base_class->stop = GST_DEBUG_FUNCPTR (gst_flac_enc_stop);
- base_class->set_format = GST_DEBUG_FUNCPTR (gst_flac_enc_set_format);
- base_class->handle_frame = GST_DEBUG_FUNCPTR (gst_flac_enc_handle_frame);
- base_class->getcaps = GST_DEBUG_FUNCPTR (gst_flac_enc_getcaps);
- base_class->event = GST_DEBUG_FUNCPTR (gst_flac_enc_sink_event);
-
g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_QUALITY,
g_param_spec_enum ("quality",
"Quality",
-G_MAXINT, G_MAXINT,
DEFAULT_SEEKPOINTS,
G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&src_factory));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&sink_factory));
+
+ gst_element_class_set_details_simple (gstelement_class, "FLAC audio encoder",
+ "Codec/Encoder/Audio",
+ "Encodes audio with the FLAC lossless audio encoder",
+ "Wim Taymans <wim.taymans@chello.be>");
+
+ base_class->start = GST_DEBUG_FUNCPTR (gst_flac_enc_start);
+ base_class->stop = GST_DEBUG_FUNCPTR (gst_flac_enc_stop);
+ base_class->set_format = GST_DEBUG_FUNCPTR (gst_flac_enc_set_format);
+ base_class->handle_frame = GST_DEBUG_FUNCPTR (gst_flac_enc_handle_frame);
+ base_class->getcaps = GST_DEBUG_FUNCPTR (gst_flac_enc_getcaps);
+ base_class->event = GST_DEBUG_FUNCPTR (gst_flac_enc_sink_event);
}
static void
-gst_flac_enc_init (GstFlacEnc * flacenc, GstFlacEncClass * klass)
+gst_flac_enc_init (GstFlacEnc * flacenc)
{
GstAudioEncoder *enc = GST_AUDIO_ENCODER (flacenc);
flacenc->got_headers = FALSE;
flacenc->last_flow = GST_FLOW_OK;
flacenc->offset = 0;
- flacenc->channels = 0;
- flacenc->depth = 0;
- flacenc->sample_rate = 0;
flacenc->eos = FALSE;
- flacenc->tags = gst_tag_list_new ();
+ flacenc->tags = gst_tag_list_new_empty ();
return TRUE;
}
GstTagList *copy;
gint entries = 1;
gint n_images, n_preview_images;
+ GstAudioInfo *info =
+ gst_audio_encoder_get_audio_info (GST_AUDIO_ENCODER (flacenc));
g_return_if_fail (flacenc != NULL);
user_tags = gst_tag_setter_get_tag_list (GST_TAG_SETTER (flacenc));
if (n_images + n_preview_images > 0) {
GstBuffer *buffer;
+#if 0
GstCaps *caps;
GstStructure *structure;
GstTagImageType image_type = GST_TAG_IMAGE_TYPE_NONE;
+#endif
gint i;
+ GstMapInfo map;
for (i = 0; i < n_images + n_preview_images; i++) {
if (i < n_images) {
flacenc->meta[entries] =
FLAC__metadata_object_new (FLAC__METADATA_TYPE_PICTURE);
+#if 0
caps = gst_buffer_get_caps (buffer);
structure = gst_caps_get_structure (caps, 0);
image_type = (i < n_images) ? 0x00 : 0x01;
else
image_type = image_type + 2;
+#endif
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
FLAC__metadata_object_picture_set_data (flacenc->meta[entries],
- GST_BUFFER_DATA (buffer), GST_BUFFER_SIZE (buffer), TRUE);
+ map.data, map.size, TRUE);
+ gst_buffer_unmap (buffer, &map);
+
+#if 0
/* FIXME: There's no way to set the picture type in libFLAC */
flacenc->meta[entries]->data.picture.type = image_type;
FLAC__metadata_object_picture_set_mime_type (flacenc->meta[entries],
(char *) gst_structure_get_name (structure), TRUE);
-
gst_caps_unref (caps);
+#endif
+
gst_buffer_unref (buffer);
entries++;
}
FLAC__metadata_object_seektable_template_append_spaced_points
(flacenc->meta[entries], flacenc->seekpoints, total_samples);
} else {
- samples = -flacenc->seekpoints * flacenc->sample_rate;
+ samples = -flacenc->seekpoints * GST_AUDIO_INFO_RATE (info);
res =
FLAC__metadata_object_seektable_template_append_spaced_points_by_samples
(flacenc->meta[entries], samples, total_samples);
gst_tag_list_free (copy);
}
-static void
-gst_flac_enc_caps_append_structure_with_widths (GstCaps * caps,
- GstStructure * s)
-{
- GstStructure *tmp;
- GValue list = { 0, };
- GValue depth = { 0, };
-
-
- tmp = gst_structure_copy (s);
- gst_structure_set (tmp, "width", G_TYPE_INT, 8, "depth", G_TYPE_INT, 8, NULL);
- gst_caps_append_structure (caps, tmp);
-
- tmp = gst_structure_copy (s);
-
- g_value_init (&depth, G_TYPE_INT);
- g_value_init (&list, GST_TYPE_LIST);
- g_value_set_int (&depth, 12);
- gst_value_list_append_value (&list, &depth);
- g_value_set_int (&depth, 16);
- gst_value_list_append_value (&list, &depth);
-
- gst_structure_set (tmp, "width", G_TYPE_INT, 16, NULL);
- gst_structure_set_value (tmp, "depth", &list);
- gst_caps_append_structure (caps, tmp);
-
- g_value_reset (&list);
-
- tmp = s;
-
- g_value_set_int (&depth, 20);
- gst_value_list_append_value (&list, &depth);
- g_value_set_int (&depth, 24);
- gst_value_list_append_value (&list, &depth);
-
- gst_structure_set (tmp, "width", G_TYPE_INT, 32, NULL);
- gst_structure_set_value (tmp, "depth", &list);
- gst_caps_append_structure (caps, tmp);
-
- g_value_unset (&list);
- g_value_unset (&depth);
-}
-
static GstCaps *
-gst_flac_enc_getcaps (GstAudioEncoder * enc)
+gst_flac_enc_getcaps (GstAudioEncoder * enc, GstCaps * filter)
{
GstCaps *ret = NULL, *caps = NULL;
GstPad *pad;
GST_OBJECT_LOCK (pad);
- if (GST_PAD_CAPS (pad)) {
- ret = gst_caps_ref (GST_PAD_CAPS (pad));
+ if (gst_pad_has_current_caps (pad)) {
+ ret = gst_pad_get_current_caps (pad);
} else {
- gint i, c;
+ gint i;
+ GValue v_arr = { 0, };
+ GValue v = { 0, };
+ GstStructure *s, *s2;
+
+ g_value_init (&v_arr, GST_TYPE_ARRAY);
+ g_value_init (&v, G_TYPE_STRING);
+
+ g_value_set_string (&v, GST_AUDIO_NE (S8));
+ gst_value_array_append_value (&v_arr, &v);
+ g_value_set_string (&v, GST_AUDIO_NE (S16));
+ gst_value_array_append_value (&v_arr, &v);
+ g_value_set_string (&v, GST_AUDIO_NE (S24));
+ gst_value_array_append_value (&v_arr, &v);
+ g_value_set_string (&v, GST_AUDIO_NE (S32));
+ gst_value_array_append_value (&v_arr, &v);
+ g_value_unset (&v);
+
+ s = gst_structure_new_empty ("audio/x-raw");
+ gst_structure_set_value (s, "format", &v_arr);
+ g_value_unset (&v_arr);
+
+ gst_structure_set (s, "layout", G_TYPE_STRING, "interleaved",
+ "rate", GST_TYPE_INT_RANGE, 1, 655350, NULL);
ret = gst_caps_new_empty ();
+ for (i = 1; i <= 8; i++) {
+ s2 = gst_structure_copy (s);
- gst_flac_enc_caps_append_structure_with_widths (ret,
- gst_structure_new ("audio/x-raw-int",
- "endianness", G_TYPE_INT, G_BYTE_ORDER,
- "signed", G_TYPE_BOOLEAN, TRUE,
- "rate", GST_TYPE_INT_RANGE, 1, 655350,
- "channels", GST_TYPE_INT_RANGE, 1, 2, NULL));
-
- for (i = 3; i <= 8; i++) {
- GValue positions = { 0, };
- GValue pos = { 0, };
- GstStructure *s;
-
- g_value_init (&positions, GST_TYPE_ARRAY);
- g_value_init (&pos, GST_TYPE_AUDIO_CHANNEL_POSITION);
+ if (i == 1) {
+ gst_structure_set (s, "channels", G_TYPE_INT, 1, NULL);
+ } else {
+ guint64 channel_mask;
- for (c = 0; c < i; c++) {
- g_value_set_enum (&pos, channel_positions[i - 1][c]);
- gst_value_array_append_value (&positions, &pos);
+ gst_audio_channel_positions_to_mask (channel_positions[i - 1], i,
+ &channel_mask);
+ gst_structure_set (s, "channels", G_TYPE_INT, 1, "channel-mask",
+ GST_TYPE_BITMASK, channel_mask, NULL);
}
- g_value_unset (&pos);
-
- s = gst_structure_new ("audio/x-raw-int",
- "endianness", G_TYPE_INT, G_BYTE_ORDER,
- "signed", G_TYPE_BOOLEAN, TRUE,
- "rate", GST_TYPE_INT_RANGE, 1, 655350,
- "channels", G_TYPE_INT, i, NULL);
- gst_structure_set_value (s, "channel-positions", &positions);
- g_value_unset (&positions);
- gst_flac_enc_caps_append_structure_with_widths (ret, s);
+ gst_caps_append_structure (ret, s2);
}
+ gst_structure_free (s);
}
GST_OBJECT_UNLOCK (pad);
}
static guint64
-gst_flac_enc_query_peer_total_samples (GstFlacEnc * flacenc, GstPad * pad)
+gst_flac_enc_peer_query_total_samples (GstFlacEnc * flacenc, GstPad * pad)
{
- GstFormat fmt = GST_FORMAT_DEFAULT;
gint64 duration;
+ GstAudioInfo *info =
+ gst_audio_encoder_get_audio_info (GST_AUDIO_ENCODER (flacenc));
GST_DEBUG_OBJECT (flacenc, "querying peer for DEFAULT format duration");
- if (gst_pad_query_peer_duration (pad, &fmt, &duration)
- && fmt == GST_FORMAT_DEFAULT && duration != GST_CLOCK_TIME_NONE)
+ if (gst_pad_peer_query_duration (pad, GST_FORMAT_DEFAULT, &duration)
+ && duration != GST_CLOCK_TIME_NONE)
goto done;
- fmt = GST_FORMAT_TIME;
GST_DEBUG_OBJECT (flacenc, "querying peer for TIME format duration");
- if (gst_pad_query_peer_duration (pad, &fmt, &duration) &&
- fmt == GST_FORMAT_TIME && duration != GST_CLOCK_TIME_NONE) {
+ if (gst_pad_peer_query_duration (pad, GST_FORMAT_TIME, &duration)
+ && duration != GST_CLOCK_TIME_NONE) {
GST_DEBUG_OBJECT (flacenc, "peer reported duration %" GST_TIME_FORMAT,
GST_TIME_ARGS (duration));
- duration = GST_CLOCK_TIME_TO_FRAMES (duration, flacenc->sample_rate);
+ duration = GST_CLOCK_TIME_TO_FRAMES (duration, GST_AUDIO_INFO_RATE (info));
goto done;
}
FLAC__STREAM_ENCODER_UNINITIALIZED)
goto encoder_already_initialized;
- flacenc->channels = GST_AUDIO_INFO_CHANNELS (info);
- flacenc->width = GST_AUDIO_INFO_WIDTH (info);
- flacenc->depth = GST_AUDIO_INFO_DEPTH (info);
- flacenc->sample_rate = GST_AUDIO_INFO_RATE (info);
-
caps = gst_caps_new_simple ("audio/x-flac",
- "channels", G_TYPE_INT, flacenc->channels,
- "rate", G_TYPE_INT, flacenc->sample_rate, NULL);
+ "channels", G_TYPE_INT, GST_AUDIO_INFO_CHANNELS (info),
+ "rate", G_TYPE_INT, GST_AUDIO_INFO_RATE (info), NULL);
- if (!gst_pad_set_caps (GST_AUDIO_ENCODER_SRC_PAD (enc), caps))
+ if (!gst_audio_encoder_set_output_format (enc, caps))
goto setting_src_caps_failed;
gst_caps_unref (caps);
- total_samples = gst_flac_enc_query_peer_total_samples (flacenc,
+ gst_audio_get_channel_reorder_map (GST_AUDIO_INFO_CHANNELS (info),
+ channel_positions[GST_AUDIO_INFO_CHANNELS (info) - 1], info->position,
+ flacenc->channel_reorder_map);
+
+ total_samples = gst_flac_enc_peer_query_total_samples (flacenc,
GST_AUDIO_ENCODER_SINK_PAD (enc));
- FLAC__stream_encoder_set_bits_per_sample (flacenc->encoder, flacenc->depth);
- FLAC__stream_encoder_set_sample_rate (flacenc->encoder, flacenc->sample_rate);
- FLAC__stream_encoder_set_channels (flacenc->encoder, flacenc->channels);
+ FLAC__stream_encoder_set_bits_per_sample (flacenc->encoder,
+ GST_AUDIO_INFO_WIDTH (info));
+ FLAC__stream_encoder_set_sample_rate (flacenc->encoder,
+ GST_AUDIO_INFO_RATE (info));
+ FLAC__stream_encoder_set_channels (flacenc->encoder,
+ GST_AUDIO_INFO_CHANNELS (info));
if (total_samples != GST_CLOCK_TIME_NONE)
FLAC__stream_encoder_set_total_samples_estimate (flacenc->encoder,
static gboolean
gst_flac_enc_update_quality (GstFlacEnc * flacenc, gint quality)
{
+ GstAudioInfo *info =
+ gst_audio_encoder_get_audio_info (GST_AUDIO_ENCODER (flacenc));
+
flacenc->quality = quality;
#define DO_UPDATE(name, val, str) \
g_object_freeze_notify (G_OBJECT (flacenc));
- if (flacenc->channels == 2 || flacenc->channels == 0) {
+ if (GST_AUDIO_INFO_CHANNELS (info) == 2
+ || GST_AUDIO_INFO_CHANNELS (info) == 0) {
DO_UPDATE (do_mid_side_stereo, mid_side, "mid_side_stereo");
DO_UPDATE (loose_mid_side_stereo, loose_mid_side, "loose_mid_side");
}
{
GstFlacEnc *flacenc;
GstPad *peerpad;
+ GstSegment seg;
flacenc = GST_FLAC_ENC (client_data);
return FLAC__STREAM_ENCODER_SEEK_STATUS_OK;
if ((peerpad = gst_pad_get_peer (GST_AUDIO_ENCODER_SRC_PAD (flacenc)))) {
- GstEvent *event = gst_event_new_new_segment (TRUE, 1.0, GST_FORMAT_BYTES,
- absolute_byte_offset, GST_BUFFER_OFFSET_NONE, 0);
- gboolean ret = gst_pad_send_event (peerpad, event);
+ GstEvent *event;
+ gboolean ret;
+
+ gst_segment_init (&seg, GST_FORMAT_BYTES);
+ seg.start = absolute_byte_offset;
+ seg.stop = GST_BUFFER_OFFSET_NONE;
+ seg.time = 0;
+ event = gst_event_new_segment (&seg);
+ ret = gst_pad_send_event (peerpad, event);
gst_object_unref (peerpad);
if (ret) {
/* copy buffer to avoid problems with circular refcounts */
buf = gst_buffer_copy (buf);
/* again, for good measure */
- GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_IN_CAPS);
+ GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_HEADER);
gst_value_set_buffer (&value, buf);
gst_buffer_unref (buf);
gst_value_array_append_value (array_val, &value);
GstCaps *caps;
GList *l;
GstFlowReturn ret = GST_FLOW_OK;
+ GstAudioInfo *info =
+ gst_audio_encoder_get_audio_info (GST_AUDIO_ENCODER (enc));
caps = gst_caps_new_simple ("audio/x-flac",
- "channels", G_TYPE_INT, enc->channels,
- "rate", G_TYPE_INT, enc->sample_rate, NULL);
+ "channels", G_TYPE_INT, GST_AUDIO_INFO_CHANNELS (info),
+ "rate", G_TYPE_INT, GST_AUDIO_INFO_RATE (info), NULL);
for (l = enc->headers; l != NULL; l = l->next) {
- const guint8 *data;
- guint size;
+ GstBuffer *buf;
+ GstMapInfo map;
+ guint8 *data;
+ gsize size;
/* mark buffers so oggmux will ignore them if it already muxed the
* header buffers from the streamheaders field in the caps */
- l->data = gst_buffer_make_metadata_writable (GST_BUFFER (l->data));
- GST_BUFFER_FLAG_SET (GST_BUFFER (l->data), GST_BUFFER_FLAG_IN_CAPS);
+ l->data = gst_buffer_make_writable (GST_BUFFER_CAST (l->data));
- data = GST_BUFFER_DATA (GST_BUFFER_CAST (l->data));
- size = GST_BUFFER_SIZE (GST_BUFFER_CAST (l->data));
+ buf = GST_BUFFER_CAST (l->data);
+ GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_HEADER);
+
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ data = map.data;
+ size = map.size;
/* find initial 4-byte marker which we need to skip later on */
if (size == 4 && memcmp (data, "fLaC", 4) == 0) {
- marker = GST_BUFFER_CAST (l->data);
+ marker = buf;
} else if (size > 1 && (data[0] & 0x7f) == HDR_TYPE_STREAMINFO) {
- streaminfo = GST_BUFFER_CAST (l->data);
+ streaminfo = buf;
} else if (size > 1 && (data[0] & 0x7f) == HDR_TYPE_VORBISCOMMENT) {
- vorbiscomment = GST_BUFFER_CAST (l->data);
+ vorbiscomment = buf;
}
+
+ gst_buffer_unmap (buf, &map);
}
if (marker == NULL || streaminfo == NULL || vorbiscomment == NULL) {
{
GstBuffer *buf;
guint16 num;
+ GstMapInfo map;
+ guint8 *bdata;
+ gsize slen;
/* minus one for the marker that is merged with streaminfo here */
num = g_list_length (enc->headers) - 1;
- buf = gst_buffer_new_and_alloc (13 + GST_BUFFER_SIZE (streaminfo));
- GST_BUFFER_DATA (buf)[0] = 0x7f;
- memcpy (GST_BUFFER_DATA (buf) + 1, "FLAC", 4);
- GST_BUFFER_DATA (buf)[5] = 0x01; /* mapping version major */
- GST_BUFFER_DATA (buf)[6] = 0x00; /* mapping version minor */
- GST_BUFFER_DATA (buf)[7] = (num & 0xFF00) >> 8;
- GST_BUFFER_DATA (buf)[8] = (num & 0x00FF) >> 0;
- memcpy (GST_BUFFER_DATA (buf) + 9, "fLaC", 4);
- memcpy (GST_BUFFER_DATA (buf) + 13, GST_BUFFER_DATA (streaminfo),
- GST_BUFFER_SIZE (streaminfo));
+ slen = gst_buffer_get_size (streaminfo);
+ buf = gst_buffer_new_and_alloc (13 + slen);
+
+ gst_buffer_map (buf, &map, GST_MAP_WRITE);
+ bdata = map.data;
+ bdata[0] = 0x7f;
+ memcpy (bdata + 1, "FLAC", 4);
+ bdata[5] = 0x01; /* mapping version major */
+ bdata[6] = 0x00; /* mapping version minor */
+ bdata[7] = (num & 0xFF00) >> 8;
+ bdata[8] = (num & 0x00FF) >> 0;
+ memcpy (bdata + 9, "fLaC", 4);
+ gst_buffer_extract (streaminfo, 0, bdata + 13, slen);
+ gst_buffer_unmap (buf, &map);
+
notgst_value_array_append_buffer (&array, buf);
gst_buffer_unref (buf);
}
/* add other headers, if there are any */
for (l = enc->headers; l != NULL; l = l->next) {
- if (GST_BUFFER_CAST (l->data) != marker &&
- GST_BUFFER_CAST (l->data) != streaminfo &&
- GST_BUFFER_CAST (l->data) != vorbiscomment) {
- notgst_value_array_append_buffer (&array, GST_BUFFER_CAST (l->data));
+ GstBuffer *buf = GST_BUFFER_CAST (l->data);
+
+ if (buf != marker && buf != streaminfo && buf != vorbiscomment) {
+ notgst_value_array_append_buffer (&array, buf);
}
}
GstBuffer *buf;
buf = GST_BUFFER (l->data);
- gst_buffer_set_caps (buf, caps);
- GST_LOG_OBJECT (enc, "Pushing header buffer, size %u bytes",
- GST_BUFFER_SIZE (buf));
+ GST_LOG_OBJECT (enc,
+ "Pushing header buffer, size %" G_GSIZE_FORMAT " bytes",
+ gst_buffer_get_size (buf));
+#if 0
GST_MEMDUMP_OBJECT (enc, "header buffer", GST_BUFFER_DATA (buf),
GST_BUFFER_SIZE (buf));
+#endif
ret = gst_pad_push (GST_AUDIO_ENCODER_SRC_PAD (enc), buf);
l->data = NULL;
}
return FLAC__STREAM_ENCODER_WRITE_STATUS_OK;
outbuf = gst_buffer_new_and_alloc (bytes);
- memcpy (GST_BUFFER_DATA (outbuf), buffer, bytes);
+ gst_buffer_fill (outbuf, 0, buffer, bytes);
/* we assume libflac passes us stuff neatly framed */
if (!flacenc->got_headers) {
/* header fixup, push downstream directly */
GST_DEBUG_OBJECT (flacenc, "Fixing up headers at pos=%" G_GUINT64_FORMAT
", size=%u", flacenc->offset, (guint) bytes);
+#if 0
GST_MEMDUMP_OBJECT (flacenc, "Presumed header fragment",
GST_BUFFER_DATA (outbuf), GST_BUFFER_SIZE (outbuf));
- gst_buffer_set_caps (outbuf,
- GST_PAD_CAPS (GST_AUDIO_ENCODER_SRC_PAD (flacenc)));
+#endif
ret = gst_pad_push (GST_AUDIO_ENCODER_SRC_PAD (flacenc), outbuf);
} else {
/* regular frame data, pass to base class */
GST_DEBUG ("Received %s event on sinkpad", GST_EVENT_TYPE_NAME (event));
switch (GST_EVENT_TYPE (event)) {
- case GST_EVENT_NEWSEGMENT:{
- GstFormat format;
+ case GST_EVENT_SEGMENT:{
+ GstSegment seg;
gint64 start, stream_time;
if (flacenc->offset == 0) {
- gst_event_parse_new_segment (event, NULL, NULL, &format, &start, NULL,
- &stream_time);
+ gst_event_copy_segment (event, &seg);
+ start = seg.start;
+ stream_time = seg.time;
} else {
start = -1;
stream_time = -1;
else
GST_DEBUG ("Not handling newsegment event with non-zero start");
} else {
- GstEvent *e = gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_BYTES,
- 0, -1, 0);
+ GstEvent *e;
+ gst_segment_init (&seg, GST_FORMAT_BYTES);
+ e = gst_event_new_segment (&seg);
ret = gst_pad_push_event (GST_AUDIO_ENCODER_SRC_PAD (enc), e);
}
return ret;
}
+#if G_BYTE_ORDER == G_LITTLE_ENDIAN
+#define READ_INT24 GST_READ_UINT24_LE
+#else
+#define READ_INT24 GST_READ_UINT24_BE
+#endif
+
static GstFlowReturn
gst_flac_enc_handle_frame (GstAudioEncoder * enc, GstBuffer * buffer)
{
GstFlacEnc *flacenc;
FLAC__int32 *data;
- gulong insize;
- gint samples, width;
+ gint samples, width, channels;
gulong i;
+ gint j;
FLAC__bool res;
+ GstMapInfo map;
+ GstAudioInfo *info =
+ gst_audio_encoder_get_audio_info (GST_AUDIO_ENCODER (enc));
+ gint *reorder_map;
flacenc = GST_FLAC_ENC (enc);
/* base class ensures configuration */
- g_return_val_if_fail (flacenc->depth != 0, GST_FLOW_NOT_NEGOTIATED);
+ g_return_val_if_fail (GST_AUDIO_INFO_WIDTH (info) != 0,
+ GST_FLOW_NOT_NEGOTIATED);
- width = flacenc->width;
+ width = GST_AUDIO_INFO_WIDTH (info);
+ channels = GST_AUDIO_INFO_CHANNELS (info);
+ reorder_map = flacenc->channel_reorder_map;
if (G_UNLIKELY (!buffer)) {
if (flacenc->eos) {
return flacenc->last_flow;
}
- insize = GST_BUFFER_SIZE (buffer);
- samples = insize / (width >> 3);
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ samples = map.size / (width >> 3);
data = g_malloc (samples * sizeof (FLAC__int32));
+ samples /= channels;
if (width == 8) {
- gint8 *indata = (gint8 *) GST_BUFFER_DATA (buffer);
+ gint8 *indata = (gint8 *) map.data;
for (i = 0; i < samples; i++)
- data[i] = (FLAC__int32) indata[i];
+ for (j = 0; j < channels; j++)
+ data[i * channels + reorder_map[j]] =
+ (FLAC__int32) indata[i * channels + j];
} else if (width == 16) {
- gint16 *indata = (gint16 *) GST_BUFFER_DATA (buffer);
+ gint16 *indata = (gint16 *) map.data;
for (i = 0; i < samples; i++)
- data[i] = (FLAC__int32) indata[i];
+ for (j = 0; j < channels; j++)
+ data[i * channels + reorder_map[j]] =
+ (FLAC__int32) indata[i * channels + j];
+ } else if (width == 24) {
+ guint8 *indata = (guint8 *) map.data;
+ guint32 val;
+
+ for (i = 0; i < samples; i++)
+ for (j = 0; j < channels; j++) {
+ val = READ_INT24 (&indata[3 * (i * channels + j)]);
+ if (val & 0x00800000)
+ val |= 0xff000000;
+ data[i * channels + reorder_map[j]] = (FLAC__int32) val;
+ }
} else if (width == 32) {
- gint32 *indata = (gint32 *) GST_BUFFER_DATA (buffer);
+ gint32 *indata = (gint32 *) map.data;
for (i = 0; i < samples; i++)
- data[i] = (FLAC__int32) indata[i];
+ for (j = 0; j < channels; j++)
+ data[i * channels + reorder_map[j]] =
+ (FLAC__int32) indata[i * channels + j];
} else {
g_assert_not_reached ();
}
+ gst_buffer_unmap (buffer, &map);
res = FLAC__stream_encoder_process_interleaved (flacenc->encoder,
- (const FLAC__int32 *) data, samples / flacenc->channels);
+ (const FLAC__int32 *) data, samples / channels);
g_free (data);
* fails for some reason */
guint64 offset;
- gint channels;
- gint width;
- gint depth;
- gint sample_rate;
gint quality;
gboolean stopped;
guint padding;
/* queue headers until we have them all so we can add streamheaders to caps */
gboolean got_headers;
GList *headers;
+
+ gint channel_reorder_map[8];
};
struct _GstFlacEncClass {
static void gst_flac_tag_dispose (GObject * object);
-static GstFlowReturn gst_flac_tag_chain (GstPad * pad, GstBuffer * buffer);
+static GstFlowReturn gst_flac_tag_chain (GstPad * pad, GstObject * parent,
+ GstBuffer * buffer);
static GstStateChangeReturn gst_flac_tag_change_state (GstElement * element,
GstStateChange transition);
-static gboolean gst_flac_tag_sink_setcaps (GstPad * pad, GstCaps * caps);
+static gboolean gst_flac_tag_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
-static void
-gst_flac_tag_setup_interfaces (GType flac_tag_type)
-{
- static const GInterfaceInfo tag_setter_info = { NULL, NULL, NULL };
-
- g_type_add_interface_static (flac_tag_type, GST_TYPE_TAG_SETTER,
- &tag_setter_info);
-}
+#define gst_flac_tag_parent_class parent_class
+G_DEFINE_TYPE_WITH_CODE (GstFlacTag, gst_flac_tag, GST_TYPE_ELEMENT,
+ G_IMPLEMENT_INTERFACE (GST_TYPE_TAG_SETTER, NULL));
-GST_BOILERPLATE_FULL (GstFlacTag, gst_flac_tag, GstElement, GST_TYPE_ELEMENT,
- gst_flac_tag_setup_interfaces);
-
-static void
-gst_flac_tag_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_set_details_simple (element_class, "FLAC tagger",
- "Formatter/Metadata",
- "Rewrite tags in a FLAC file", "Christophe Fergeau <teuf@gnome.org>");
-
- gst_element_class_add_static_pad_template (element_class,
- &flac_tag_sink_template);
- gst_element_class_add_static_pad_template (element_class,
- &flac_tag_src_template);
-
- GST_DEBUG_CATEGORY_INIT (flactag_debug, "flactag", 0, "flac tag rewriter");
-}
static void
gst_flac_tag_class_init (GstFlacTagClass * klass)
GstElementClass *gstelement_class;
GObjectClass *gobject_class;
+ GST_DEBUG_CATEGORY_INIT (flactag_debug, "flactag", 0, "flac tag rewriter");
+
gstelement_class = (GstElementClass *) klass;
gobject_class = (GObjectClass *) klass;
- parent_class = g_type_class_peek_parent (klass);
-
gobject_class->dispose = gst_flac_tag_dispose;
gstelement_class->change_state = gst_flac_tag_change_state;
+
+ gst_element_class_set_details_simple (gstelement_class, "FLAC tagger",
+ "Formatter/Metadata",
+ "Rewrite tags in a FLAC file", "Christophe Fergeau <teuf@gnome.org>");
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&flac_tag_sink_template));
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&flac_tag_src_template));
}
static void
static void
-gst_flac_tag_init (GstFlacTag * tag, GstFlacTagClass * klass)
+gst_flac_tag_init (GstFlacTag * tag)
{
/* create the sink and src pads */
tag->sinkpad =
gst_pad_new_from_static_template (&flac_tag_sink_template, "sink");
gst_pad_set_chain_function (tag->sinkpad,
GST_DEBUG_FUNCPTR (gst_flac_tag_chain));
- gst_pad_set_setcaps_function (tag->sinkpad,
- GST_DEBUG_FUNCPTR (gst_flac_tag_sink_setcaps));
+ gst_pad_set_event_function (tag->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_flac_tag_sink_event));
gst_element_add_pad (GST_ELEMENT (tag), tag->sinkpad);
tag->srcpad =
}
static gboolean
-gst_flac_tag_sink_setcaps (GstPad * pad, GstCaps * caps)
+gst_flac_tag_sink_event (GstPad * pad, GstObject * parent, GstEvent * event)
{
- GstFlacTag *tag = GST_FLAC_TAG (GST_PAD_PARENT (pad));
+ gboolean ret;
+
+ switch (GST_EVENT_TYPE (event)) {
+ default:
+ ret = gst_pad_event_default (pad, parent, event);
+ break;
+ }
- return gst_pad_set_caps (tag->srcpad, caps);
+ return ret;
}
#define FLAC_MAGIC "fLaC"
#define FLAC_MAGIC_SIZE (sizeof (FLAC_MAGIC) - 1)
static GstFlowReturn
-gst_flac_tag_chain (GstPad * pad, GstBuffer * buffer)
+gst_flac_tag_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer)
{
GstFlacTag *tag;
GstFlowReturn ret;
+ GstMapInfo map;
+ gsize size;
ret = GST_FLOW_OK;
- tag = GST_FLAC_TAG (gst_pad_get_parent (pad));
+ tag = GST_FLAC_TAG (parent);
gst_adapter_push (tag->adapter, buffer);
id_buffer = gst_adapter_take_buffer (tag->adapter, FLAC_MAGIC_SIZE);
GST_DEBUG_OBJECT (tag, "looking for " FLAC_MAGIC " identifier");
- if (memcmp (GST_BUFFER_DATA (id_buffer), FLAC_MAGIC, FLAC_MAGIC_SIZE) == 0) {
+ if (gst_buffer_memcmp (id_buffer, 0, FLAC_MAGIC, FLAC_MAGIC_SIZE) == 0) {
GST_DEBUG_OBJECT (tag, "pushing " FLAC_MAGIC " identifier buffer");
- gst_buffer_set_caps (id_buffer, GST_PAD_CAPS (tag->srcpad));
ret = gst_pad_push (tag->srcpad, id_buffer);
if (ret != GST_FLOW_OK)
goto cleanup;
* of a metadata block
*/
if (tag->state == GST_FLAC_TAG_STATE_METADATA_BLOCKS) {
- guint size;
guint type;
gboolean is_last;
const guint8 *block_header;
if (gst_adapter_available (tag->adapter) < 4)
goto cleanup;
- block_header = gst_adapter_peek (tag->adapter, 4);
+ block_header = gst_adapter_map (tag->adapter, 4);
is_last = ((block_header[0] & 0x80) == 0x80);
type = block_header[0] & 0x7F;
size = (block_header[1] << 16)
| (block_header[2] << 8)
| block_header[3];
+ gst_adapter_unmap (tag->adapter);
/* The 4 bytes long header isn't included in the metadata size */
tag->metadata_block_size = size + 4;
tag->metadata_last_block = is_last;
GST_DEBUG_OBJECT (tag,
- "got metadata block: %d bytes, type %d, is vorbiscomment: %d, is last: %d",
+ "got metadata block: %" G_GSIZE_FORMAT " bytes, type %d, "
+ "is vorbiscomment: %d, is last: %d",
size, type, (type == 0x04), is_last);
/* Metadata blocks of type 4 are vorbis comment blocks */
/* clear the is-last flag, as the last metadata block will
* be the vorbis comment block which we will build ourselves.
*/
- GST_BUFFER_DATA (metadata_buffer)[0] &= (~0x80);
+ gst_buffer_map (metadata_buffer, &map, GST_MAP_READWRITE);
+ map.data[0] &= (~0x80);
+ gst_buffer_unmap (metadata_buffer, &map);
if (tag->state == GST_FLAC_TAG_STATE_WRITING_METADATA_BLOCK) {
GST_DEBUG_OBJECT (tag, "pushing metadata block buffer");
- gst_buffer_set_caps (metadata_buffer, GST_PAD_CAPS (tag->srcpad));
ret = gst_pad_push (tag->srcpad, metadata_buffer);
if (ret != GST_FLOW_OK)
goto cleanup;
* block, and stop now if the user only wants to read tags
*/
if (tag->vorbiscomment != NULL) {
+ guint8 id_data[4];
/* We found some tags, try to parse them and notify the other elements
* that we encountered some tags
*/
GST_DEBUG_OBJECT (tag, "emitting vorbiscomment tags");
+ gst_buffer_extract (tag->vorbiscomment, 0, id_data, 4);
tag->tags = gst_tag_list_from_vorbiscomment_buffer (tag->vorbiscomment,
- GST_BUFFER_DATA (tag->vorbiscomment), 4, NULL);
+ id_data, 4, NULL);
if (tag->tags != NULL) {
- gst_element_found_tags (GST_ELEMENT (tag),
- gst_tag_list_copy (tag->tags));
+ gst_pad_push_event (tag->srcpad,
+ gst_event_new_tag (gst_tag_list_copy (tag->tags)));
}
gst_buffer_unref (tag->vorbiscomment);
*/
if (tag->state == GST_FLAC_TAG_STATE_ADD_VORBIS_COMMENT) {
GstBuffer *buffer;
- gint size;
const GstTagList *user_tags;
GstTagList *merged_tags;
*/
GST_WARNING_OBJECT (tag, "No tags found");
buffer = gst_buffer_new_and_alloc (12);
- if (buffer == NULL) {
- GST_ELEMENT_ERROR (tag, CORE, TOO_LAZY, (NULL),
- ("Error creating 12-byte buffer for padding block"));
- ret = GST_FLOW_ERROR;
- goto cleanup;
- }
- memset (GST_BUFFER_DATA (buffer), 0, GST_BUFFER_SIZE (buffer));
- GST_BUFFER_DATA (buffer)[0] = 0x81; /* 0x80 = Last metadata block,
- * 0x01 = padding block
- */
+ if (buffer == NULL)
+ goto no_buffer;
+
+ gst_buffer_map (buffer, &map, GST_MAP_WRITE);
+ memset (map.data, 0, map.size);
+ map.data[0] = 0x81; /* 0x80 = Last metadata block,
+ * 0x01 = padding block */
+ gst_buffer_unmap (buffer, &map);
} else {
guchar header[4];
+ guint8 fbit[1];
memset (header, 0, sizeof (header));
header[0] = 0x84; /* 0x80 = Last metadata block,
- * 0x04 = vorbiscomment block
- */
+ * 0x04 = vorbiscomment block */
buffer = gst_tag_list_to_vorbiscomment_buffer (merged_tags, header,
sizeof (header), NULL);
GST_DEBUG_OBJECT (tag, "Writing tags %" GST_PTR_FORMAT, merged_tags);
gst_tag_list_free (merged_tags);
- if (buffer == NULL) {
- GST_ELEMENT_ERROR (tag, CORE, TAG, (NULL),
- ("Error converting tag list to vorbiscomment buffer"));
- ret = GST_FLOW_ERROR;
- goto cleanup;
- }
- size = GST_BUFFER_SIZE (buffer) - 4;
- if ((size > 0xFFFFFF) || (size < 0)) {
- /* FLAC vorbis comment blocks are limited to 2^24 bytes,
- * while the vorbis specs allow more than that. Shouldn't
- * be a real world problem though
- */
- GST_ELEMENT_ERROR (tag, CORE, TAG, (NULL),
- ("Vorbis comment of size %d too long", size));
- ret = GST_FLOW_ERROR;
- goto cleanup;
- }
+ if (buffer == NULL)
+ goto no_comment;
+ size = gst_buffer_get_size (buffer);
+ if ((size < 4) || ((size - 4) > 0xFFFFFF))
+ goto comment_too_long;
+
+ fbit[0] = 1;
/* Get rid of the framing bit at the end of the vorbiscomment buffer
* if it exists since libFLAC seems to lose sync because of this
* bit in gstflacdec
*/
- if (GST_BUFFER_DATA (buffer)[GST_BUFFER_SIZE (buffer) - 1] == 1) {
- GstBuffer *sub;
-
- sub = gst_buffer_create_sub (buffer, 0, GST_BUFFER_SIZE (buffer) - 1);
- gst_buffer_unref (buffer);
- buffer = sub;
+ if (gst_buffer_memcmp (buffer, size - 1, fbit, 1) == 0) {
+ buffer = gst_buffer_make_writable (buffer);
+ gst_buffer_resize (buffer, 0, size - 1);
}
}
/* The 4 byte metadata block header isn't accounted for in the total
* size of the metadata block
*/
- size = GST_BUFFER_SIZE (buffer) - 4;
-
- GST_BUFFER_DATA (buffer)[1] = ((size & 0xFF0000) >> 16);
- GST_BUFFER_DATA (buffer)[2] = ((size & 0x00FF00) >> 8);
- GST_BUFFER_DATA (buffer)[3] = (size & 0x0000FF);
- GST_DEBUG_OBJECT (tag, "pushing %d byte vorbiscomment buffer",
- GST_BUFFER_SIZE (buffer));
- gst_buffer_set_caps (buffer, GST_PAD_CAPS (tag->srcpad));
+ gst_buffer_map (buffer, &map, GST_MAP_WRITE);
+ map.data[1] = (((map.size - 4) & 0xFF0000) >> 16);
+ map.data[2] = (((map.size - 4) & 0x00FF00) >> 8);
+ map.data[3] = ((map.size - 4) & 0x0000FF);
+ gst_buffer_unmap (buffer, &map);
+
+ GST_DEBUG_OBJECT (tag, "pushing %" G_GSIZE_FORMAT " byte vorbiscomment "
+ "buffer", map.size);
+
ret = gst_pad_push (tag->srcpad, buffer);
if (ret != GST_FLOW_OK) {
goto cleanup;
avail = gst_adapter_available (tag->adapter);
if (avail > 0) {
buffer = gst_adapter_take_buffer (tag->adapter, avail);
- gst_buffer_set_caps (buffer, GST_PAD_CAPS (tag->srcpad));
ret = gst_pad_push (tag->srcpad, buffer);
}
}
cleanup:
- gst_object_unref (tag);
return ret;
-}
+ /* ERRORS */
+no_buffer:
+ {
+ GST_ELEMENT_ERROR (tag, CORE, TOO_LAZY, (NULL),
+ ("Error creating 12-byte buffer for padding block"));
+ ret = GST_FLOW_ERROR;
+ goto cleanup;
+ }
+no_comment:
+ {
+ GST_ELEMENT_ERROR (tag, CORE, TAG, (NULL),
+ ("Error converting tag list to vorbiscomment buffer"));
+ ret = GST_FLOW_ERROR;
+ goto cleanup;
+ }
+comment_too_long:
+ {
+ /* FLAC vorbis comment blocks are limited to 2^24 bytes,
+ * while the vorbis specs allow more than that. Shouldn't
+ * be a real world problem though
+ */
+ GST_ELEMENT_ERROR (tag, CORE, TAG, (NULL),
+ ("Vorbis comment of size %" G_GSIZE_FORMAT " too long", size));
+ ret = GST_FLOW_ERROR;
+ goto cleanup;
+ }
+}
static GstStateChangeReturn
gst_flac_tag_change_state (GstElement * element, GstStateChange transition)
break;
}
- return parent_class->change_state (element, transition);
+ return GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
}
+++ /dev/null
-plugin_LTLIBRARIES = libgstgconfelements.la
-
-libgstgconfelements_la_SOURCES = \
- gstgconfaudiosink.c \
- gstgconfaudiosrc.c \
- gstgconfelements.c \
- gstgconfvideosink.c \
- gstgconfvideosrc.c \
- gstswitchsink.c \
- gstswitchsrc.c \
- gstgconf.c
-
-DIR_CFLAGS = -DGST_GCONF_DIR=\"/system/gstreamer/@GST_MAJORMINOR@\"
-libgstgconfelements_la_CFLAGS = $(GST_CFLAGS) $(GCONF_CFLAGS) $(DIR_CFLAGS)
-libgstgconfelements_la_LIBADD = $(GST_LIBS) $(GCONF_LIBS)
-libgstgconfelements_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
-libgstgconfelements_la_LIBTOOLFLAGS = --tag=disable-static
-
-noinst_HEADERS = \
- gstgconfaudiosink.h \
- gstgconfaudiosrc.h \
- gstgconfelements.h \
- gstgconfvideosink.h \
- gstgconfvideosrc.h \
- gstswitchsink.h \
- gstswitchsrc.h \
- gstgconf.h
+++ /dev/null
-/* GStreamer
- * nf_get_default_audio_sink
- * Copyright (C) <2002> Thomas Vander Stichele <thomas@apestaart.org>
- * Copyright (C) <2006> Jürg Billeter <j@bitron.ch>
- *
- * This library is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Library General Public
- * License as published by the Free Software Foundation; either
- * version 2 of the License, or (at your option) any later version.
- *
- * This library is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Library General Public License for more details.
- *
- * You should have received a copy of the GNU Library General Public
- * License along with this library; if not, write to the
- * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
- * Boston, MA 02111-1307, USA.
- */
-
-/*
- * this library handles interaction with GConf
- */
-
-#ifdef HAVE_CONFIG_H
-#include "config.h"
-#endif
-
-#include <gst/gst.h>
-
-#include "gstgconf.h"
-#include "gstgconfelements.h" /* for debug category */
-
-#ifndef GST_GCONF_DIR
-#error "GST_GCONF_DIR is not defined !"
-#endif
-
-static GConfClient *_gst_gconf_client = NULL; /* GConf connection */
-
-
-/* internal functions */
-
-static GConfClient *
-gst_gconf_get_client (void)
-{
- if (!_gst_gconf_client)
- _gst_gconf_client = gconf_client_get_default ();
-
- return _gst_gconf_client;
-}
-
-/* external functions */
-
-/**
- * gst_gconf_get_string:
- * @key: a #gchar corresponding to the key you want to get.
- *
- * Get GConf key @key's string value.
- *
- * Returns: a newly allocated #gchar string containing @key's value,
- * or NULL in the case of an error..
- */
-gchar *
-gst_gconf_get_string (const gchar * key)
-{
- GError *error = NULL;
- gchar *value = NULL;
- gchar *full_key;
-
- if (!g_str_has_prefix (key, GST_GCONF_DIR))
- full_key = g_strdup_printf ("%s/%s", GST_GCONF_DIR, key);
- else
- full_key = g_strdup (key);
-
- value = gconf_client_get_string (gst_gconf_get_client (), full_key, &error);
- g_free (full_key);
-
- if (error) {
- g_warning ("gst_gconf_get_string: error: %s\n", error->message);
- g_error_free (error);
- return NULL;
- }
-
- return value;
-}
-
-const gchar *
-gst_gconf_get_key_for_sink_profile (GstGConfProfile profile)
-{
- switch (profile) {
- case GCONF_PROFILE_SOUNDS:
- return GST_GCONF_DIR "/" GST_GCONF_AUDIOSINK_KEY;
- case GCONF_PROFILE_MUSIC:
- return GST_GCONF_DIR "/" GST_GCONF_MUSIC_AUDIOSINK_KEY;
- case GCONF_PROFILE_CHAT:
- return GST_GCONF_DIR "/" GST_GCONF_CHAT_AUDIOSINK_KEY;
- default:
- break;
- }
-
- g_return_val_if_reached (GST_GCONF_DIR "/" GST_GCONF_AUDIOSINK_KEY);
-}
-
-/**
- * gst_gconf_set_string:
- * @key: a #gchar corresponding to the key you want to set.
- * @value: a #gchar containing key value.
- *
- * Set GConf key @key to string value @value.
- */
-void
-gst_gconf_set_string (const gchar * key, const gchar * value)
-{
- GError *error = NULL;
- gchar *full_key;
-
- if (!g_str_has_prefix (key, GST_GCONF_DIR))
- full_key = g_strdup_printf ("%s/%s", GST_GCONF_DIR, key);
- else
- full_key = g_strdup (key);
-
- gconf_client_set_string (gst_gconf_get_client (), full_key, value, &error);
- if (error) {
- GST_ERROR ("gst_gconf_set_string: error: %s\n", error->message);
- g_error_free (error);
- }
- g_free (full_key);
-}
-
-/**
- * gst_gconf_render_bin_from_key:
- * @key: a #gchar string corresponding to a GConf key.
- *
- * Render bin from GConf key @key.
- *
- * Returns: a #GstElement containing the rendered bin.
- */
-GstElement *
-gst_gconf_render_bin_from_key (const gchar * key)
-{
- GstElement *bin = NULL;
- gchar *value;
-
- value = gst_gconf_get_string (key);
-
- GST_LOG ("%s = %s", GST_STR_NULL (key), GST_STR_NULL (value));
-
- if (value) {
- GError *err = NULL;
-
- bin = gst_parse_bin_from_description (value, TRUE, &err);
- if (err) {
- GST_ERROR ("gconf: error creating bin '%s': %s", value, err->message);
- g_error_free (err);
- }
-
- g_free (value);
- }
- return bin;
-}
-
-/**
- * gst_gconf_render_bin_with_default:
- * @bin: a #gchar string describing the pipeline to construct.
- * @default_sink: an element to use as default if the given pipeline fails to construct.
- *
- * Render bin from description @bin using @default_sink element as a fallback.
- *
- * Returns: a #GstElement containing the rendered bin.
- */
-GstElement *
-gst_gconf_render_bin_with_default (const gchar * bin,
- const gchar * default_sink)
-{
- GstElement *ret = NULL;
- GError *err = NULL;
-
- if (bin != NULL)
- ret = gst_parse_bin_from_description (bin, TRUE, &err);
-
- if (ret == NULL || err != NULL) {
- if (err) {
- GST_DEBUG ("Could not create audio sink from GConf settings: %s",
- err->message);
- g_error_free (err);
- } else {
- GST_DEBUG ("Could not create audio sink from GConf settings");
- }
-
- ret = gst_element_factory_make (default_sink, NULL);
-
- if (!ret)
- g_warning
- ("Could not build GConf audio sink and the replacement %s doesn't work",
- DEFAULT_AUDIOSINK);
- }
-
- return ret;
-}
-
-/**
- * gst_gconf_get_default_video_sink:
- *
- * Render video output bin from GStreamer GConf key : "default/videosink".
- * If key is invalid, the default video sink for the platform is used
- * (typically xvimagesink or ximagesink).
- *
- * Returns: a #GstElement containing the video output bin, or NULL if
- * everything failed.
- */
-GstElement *
-gst_gconf_get_default_video_sink (void)
-{
- GstElement *ret = gst_gconf_render_bin_from_key (GST_GCONF_VIDEOSINK_KEY);
-
- if (!ret) {
- ret = gst_element_factory_make (DEFAULT_VIDEOSINK, NULL);
-
- if (!ret)
- g_warning ("No GConf default video sink key and %s doesn't work",
- DEFAULT_VIDEOSINK);
- }
-
- return ret;
-}
-
-/**
- * gst_gconf_get_default_audio_src:
- *
- * Render audio acquisition bin from GStreamer GConf key : "default/audiosrc".
- * If key is invalid, the default audio source for the plaform is used.
- * (typically osssrc or sunaudiosrc).
- *
- * Returns: a #GstElement containing the audio source bin, or NULL if
- * everything failed.
- */
-GstElement *
-gst_gconf_get_default_audio_src (void)
-{
- GstElement *ret = gst_gconf_render_bin_from_key (GST_GCONF_AUDIOSRC_KEY);
-
- if (!ret) {
- ret = gst_element_factory_make (DEFAULT_AUDIOSRC, NULL);
-
- if (!ret)
- g_warning ("No GConf default audio src key and %s doesn't work",
- DEFAULT_AUDIOSRC);
- }
-
- return ret;
-}
-
-/**
- * gst_gconf_get_default_video_src:
- *
- * Render video acquisition bin from GStreamer GConf key :
- * "default/videosrc". If key is invalid, the default video source
- * for the platform is used (typically videotestsrc).
- *
- * Returns: a #GstElement containing the video source bin, or NULL if
- * everything failed.
- */
-GstElement *
-gst_gconf_get_default_video_src (void)
-{
- GstElement *ret = gst_gconf_render_bin_from_key (GST_GCONF_VIDEOSRC_KEY);
-
- if (!ret) {
- ret = gst_element_factory_make (DEFAULT_VIDEOSRC, NULL);
-
- if (!ret)
- g_warning ("No GConf default video src key and %s doesn't work",
- DEFAULT_VIDEOSRC);
- }
-
- return ret;
-}
-
-/**
- * gst_gconf_get_default_visualization_element:
- *
- * Render visualization bin from GStreamer GConf key : "default/visualization".
- * If key is invalid, the default visualization element is used.
- *
- * Returns: a #GstElement containing the visualization bin, or NULL if
- * everything failed.
- */
-GstElement *
-gst_gconf_get_default_visualization_element (void)
-{
- GstElement *ret = gst_gconf_render_bin_from_key ("default/visualization");
-
- if (!ret) {
- ret = gst_element_factory_make (DEFAULT_VISUALIZER, NULL);
-
- if (!ret)
- g_warning
- ("No GConf default visualization plugin key and %s doesn't work",
- DEFAULT_VISUALIZER);
- }
-
- return ret;
-}
+++ /dev/null
-/* GStreamer
- * Copyright (C) <2002> Thomas Vander Stichele <thomas@apestaart.org>
- * Copyright (C) <2006> Jürg Billeter <j@bitron.ch>
- *
- * This library is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Library General Public
- * License as published by the Free Software Foundation; either
- * version 2 of the License, or (at your option) any later version.
- *
- * This library is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Library General Public License for more details.
- *
- * You should have received a copy of the GNU Library General Public
- * License along with this library; if not, write to the
- * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
- * Boston, MA 02111-1307, USA.
- */
-
-#ifndef GST_GCONF_H
-#define GST_GCONF_H
-
-/*
- * this library handles interaction with GConf
- */
-
-#include <gst/gst.h>
-#include <gconf/gconf-client.h>
-
-G_BEGIN_DECLS
-
-#define GST_GCONF_AUDIOSRC_KEY "default/audiosrc"
-#define GST_GCONF_AUDIOSINK_KEY "default/audiosink"
-#define GST_GCONF_MUSIC_AUDIOSINK_KEY "default/musicaudiosink"
-#define GST_GCONF_CHAT_AUDIOSINK_KEY "default/chataudiosink"
-#define GST_GCONF_VIDEOSRC_KEY "default/videosrc"
-#define GST_GCONF_VIDEOSINK_KEY "default/videosink"
-
-typedef enum
-{
- GCONF_PROFILE_SOUNDS,
- GCONF_PROFILE_MUSIC,
- GCONF_PROFILE_CHAT,
- GCONF_PROFILE_NONE /* Internal value only */
-} GstGConfProfile;
-
-gchar * gst_gconf_get_string (const gchar *key);
-void gst_gconf_set_string (const gchar *key,
- const gchar *value);
-
-const gchar * gst_gconf_get_key_for_sink_profile (GstGConfProfile profile);
-
-GstElement * gst_gconf_render_bin_from_key (const gchar *key);
-GstElement * gst_gconf_render_bin_with_default (const gchar *bin,
- const gchar *default_sink);
-
-GstElement * gst_gconf_get_default_video_sink (void);
-GstElement * gst_gconf_get_default_audio_sink (int profile);
-GstElement * gst_gconf_get_default_video_src (void);
-GstElement * gst_gconf_get_default_audio_src (void);
-GstElement * gst_gconf_get_default_visualization_element (void);
-
-G_END_DECLS
-
-#endif /* GST_GCONF_H */
+++ /dev/null
-/* GStreamer
- * (c) 2005 Ronald S. Bultje <rbultje@ronald.bitfreak.net>
- * (c) 2006 Jürg Billeter <j@bitron.ch>
- *
- * This library is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Library General Public
- * License as published by the Free Software Foundation; either
- * version 2 of the License, or (at your option) any later version.
- *
- * This library is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Library General Public License for more details.
- *
- * You should have received a copy of the GNU Library General Public
- * License along with this library; if not, write to the
- * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
- * Boston, MA 02111-1307, USA.
- */
-/**
- * SECTION:element-gconfaudiosink
- *
- * This element outputs sound to the audiosink that has been configured in
- * GConf by the user.
- *
- * <refsect2>
- * <title>Example launch line</title>
- * |[
- * gst-launch filesrc location=foo.ogg ! decodebin ! audioconvert ! audioresample ! gconfaudiosink
- * ]| Play on configured audiosink
- * </refsect2>
- */
-
-#ifdef HAVE_CONFIG_H
-#include "config.h"
-#endif
-
-#include <string.h>
-
-#include "gstgconfelements.h"
-#include "gstgconfaudiosink.h"
-
-static void gst_gconf_audio_sink_dispose (GObject * object);
-static void gst_gconf_audio_sink_finalize (GstGConfAudioSink * sink);
-static void cb_change_child (GConfClient * client,
- guint connection_id, GConfEntry * entry, gpointer data);
-static GstStateChangeReturn
-gst_gconf_audio_sink_change_state (GstElement * element,
- GstStateChange transition);
-static void gst_gconf_switch_profile (GstGConfAudioSink * sink,
- GstGConfProfile profile);
-
-enum
-{
- PROP_0,
- PROP_PROFILE
-};
-
-GST_BOILERPLATE (GstGConfAudioSink, gst_gconf_audio_sink, GstSwitchSink,
- GST_TYPE_SWITCH_SINK);
-
-static void gst_gconf_audio_sink_set_property (GObject * object, guint prop_id,
- const GValue * value, GParamSpec * pspec);
-static void gst_gconf_audio_sink_get_property (GObject * object, guint prop_id,
- GValue * value, GParamSpec * pspec);
-
-static void
-gst_gconf_audio_sink_base_init (gpointer klass)
-{
- GstElementClass *eklass = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_set_details_simple (eklass, "GConf audio sink",
- "Sink/Audio",
- "Audio sink embedding the GConf-settings for audio output",
- "Jan Schmidt <thaytan@mad.scientist.com>");
-}
-
-#define GST_TYPE_GCONF_PROFILE (gst_gconf_profile_get_type())
-static GType
-gst_gconf_profile_get_type (void)
-{
- static GType gconf_profile_type = 0;
- static const GEnumValue gconf_profiles[] = {
- {GCONF_PROFILE_SOUNDS, "Sound Events", "sounds"},
- {GCONF_PROFILE_MUSIC, "Music and Movies", "music"},
- {GCONF_PROFILE_CHAT, "Audio/Video Conferencing", "chat"},
- {0, NULL, NULL}
- };
-
- if (!gconf_profile_type) {
- gconf_profile_type =
- g_enum_register_static ("GstGConfProfile", gconf_profiles);
- }
- return gconf_profile_type;
-}
-
-static void
-gst_gconf_audio_sink_class_init (GstGConfAudioSinkClass * klass)
-{
- GObjectClass *oklass = G_OBJECT_CLASS (klass);
- GstElementClass *eklass = GST_ELEMENT_CLASS (klass);
-
- oklass->set_property = gst_gconf_audio_sink_set_property;
- oklass->get_property = gst_gconf_audio_sink_get_property;
- oklass->dispose = gst_gconf_audio_sink_dispose;
- oklass->finalize = (GObjectFinalizeFunc) gst_gconf_audio_sink_finalize;
- eklass->change_state = gst_gconf_audio_sink_change_state;
-
- g_object_class_install_property (oklass, PROP_PROFILE,
- g_param_spec_enum ("profile", "Profile", "Profile",
- GST_TYPE_GCONF_PROFILE, GCONF_PROFILE_SOUNDS,
- G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
-}
-
-static void
-gst_gconf_audio_sink_reset (GstGConfAudioSink * sink)
-{
- gst_switch_sink_set_child (GST_SWITCH_SINK (sink), NULL);
-
- g_free (sink->gconf_str);
- sink->gconf_str = NULL;
-}
-
-static void
-gst_gconf_audio_sink_init (GstGConfAudioSink * sink,
- GstGConfAudioSinkClass * g_class)
-{
- gst_gconf_audio_sink_reset (sink);
-
- sink->client = gconf_client_get_default ();
- gconf_client_add_dir (sink->client, GST_GCONF_DIR "/default",
- GCONF_CLIENT_PRELOAD_RECURSIVE, NULL);
-
- gst_gconf_switch_profile (sink, GCONF_PROFILE_SOUNDS);
-}
-
-static void
-gst_gconf_audio_sink_dispose (GObject * object)
-{
- GstGConfAudioSink *sink = GST_GCONF_AUDIO_SINK (object);
-
- if (sink->client) {
- gst_gconf_switch_profile (sink, GCONF_PROFILE_NONE);
- g_object_unref (G_OBJECT (sink->client));
- sink->client = NULL;
- }
-
- GST_CALL_PARENT (G_OBJECT_CLASS, dispose, (object));
-}
-
-static void
-gst_gconf_audio_sink_finalize (GstGConfAudioSink * sink)
-{
- g_free (sink->gconf_str);
-
- GST_CALL_PARENT (G_OBJECT_CLASS, finalize, ((GObject *) (sink)));
-}
-
-static gboolean
-do_change_child (GstGConfAudioSink * sink)
-{
- const gchar *key;
- gchar *new_gconf_str;
- GstElement *new_kid;
-
- if (sink->profile == GCONF_PROFILE_NONE)
- return FALSE; /* Can't switch to a 'NONE' sink */
-
- key = gst_gconf_get_key_for_sink_profile (sink->profile);
- new_gconf_str = gst_gconf_get_string (key);
-
- GST_LOG_OBJECT (sink, "old gconf string: %s", GST_STR_NULL (sink->gconf_str));
- GST_LOG_OBJECT (sink, "new gconf string: %s", GST_STR_NULL (new_gconf_str));
-
- if (new_gconf_str != NULL && sink->gconf_str != NULL &&
- (strlen (new_gconf_str) == 0 ||
- strcmp (sink->gconf_str, new_gconf_str) == 0)) {
- g_free (new_gconf_str);
- GST_DEBUG_OBJECT (sink,
- "GConf key was updated, but it didn't change. Ignoring");
- return TRUE;
- }
-
- GST_DEBUG_OBJECT (sink, "GConf key changed: '%s' to '%s'",
- GST_STR_NULL (sink->gconf_str), GST_STR_NULL (new_gconf_str));
-
- GST_DEBUG_OBJECT (sink, "Creating new child for profile %d", sink->profile);
- new_kid =
- gst_gconf_render_bin_with_default (new_gconf_str, DEFAULT_AUDIOSINK);
-
- if (new_kid == NULL) {
- GST_ELEMENT_ERROR (sink, LIBRARY, SETTINGS, (NULL),
- ("Failed to render audio sink from GConf"));
- goto fail;
- }
-
- if (!gst_switch_sink_set_child (GST_SWITCH_SINK (sink), new_kid)) {
- GST_WARNING_OBJECT (sink, "Failed to update child element");
- goto fail;
- }
-
- g_free (sink->gconf_str);
- sink->gconf_str = new_gconf_str;
-
- GST_DEBUG_OBJECT (sink, "done changing gconf audio sink");
-
- return TRUE;
-
-fail:
- g_free (new_gconf_str);
- return FALSE;
-}
-
-static void
-gst_gconf_switch_profile (GstGConfAudioSink * sink, GstGConfProfile profile)
-{
- if (sink->client == NULL)
- return;
-
- if (sink->notify_id) {
- GST_DEBUG_OBJECT (sink, "Unsubscribing old key %s for profile %d",
- gst_gconf_get_key_for_sink_profile (sink->profile), sink->profile);
- gconf_client_notify_remove (sink->client, sink->notify_id);
- sink->notify_id = 0;
- }
-
- sink->profile = profile;
- if (profile != GCONF_PROFILE_NONE) {
- const gchar *key = gst_gconf_get_key_for_sink_profile (sink->profile);
-
- GST_DEBUG_OBJECT (sink, "Subscribing to key %s for profile %d",
- key, profile);
- sink->notify_id = gconf_client_notify_add (sink->client, key,
- cb_change_child, sink, NULL, NULL);
- }
-}
-
-static void
-gst_gconf_audio_sink_set_property (GObject * object, guint prop_id,
- const GValue * value, GParamSpec * pspec)
-{
- GstGConfAudioSink *sink;
-
- sink = GST_GCONF_AUDIO_SINK (object);
-
- switch (prop_id) {
- case PROP_PROFILE:
- gst_gconf_switch_profile (sink, g_value_get_enum (value));
- break;
- default:
- break;
- }
-}
-
-static void
-gst_gconf_audio_sink_get_property (GObject * object, guint prop_id,
- GValue * value, GParamSpec * pspec)
-{
- GstGConfAudioSink *sink;
-
- sink = GST_GCONF_AUDIO_SINK (object);
-
- switch (prop_id) {
- case PROP_PROFILE:
- g_value_set_enum (value, sink->profile);
- break;
- default:
- G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
- break;
- }
-}
-
-static void
-cb_change_child (GConfClient * client,
- guint connection_id, GConfEntry * entry, gpointer data)
-{
- do_change_child (GST_GCONF_AUDIO_SINK (data));
-}
-
-static GstStateChangeReturn
-gst_gconf_audio_sink_change_state (GstElement * element,
- GstStateChange transition)
-{
- GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
- GstGConfAudioSink *sink = GST_GCONF_AUDIO_SINK (element);
-
- switch (transition) {
- case GST_STATE_CHANGE_NULL_TO_READY:
- if (!do_change_child (sink)) {
- gst_gconf_audio_sink_reset (sink);
- return GST_STATE_CHANGE_FAILURE;
- }
- break;
- default:
- break;
- }
-
- ret = GST_CALL_PARENT_WITH_DEFAULT (GST_ELEMENT_CLASS, change_state,
- (element, transition), GST_STATE_CHANGE_SUCCESS);
-
- switch (transition) {
- case GST_STATE_CHANGE_READY_TO_NULL:
- gst_gconf_audio_sink_reset (sink);
- break;
- default:
- break;
- }
-
- return ret;
-}
+++ /dev/null
-/* GStreamer
- * (c) 2005 Ronald S. Bultje <rbultje@ronald.bitfreak.net>
- *
- * This library is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Library General Public
- * License as published by the Free Software Foundation; either
- * version 2 of the License, or (at your option) any later version.
- *
- * This library is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Library General Public License for more details.
- *
- * You should have received a copy of the GNU Library General Public
- * License along with this library; if not, write to the
- * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
- * Boston, MA 02111-1307, USA.
- */
-
-#ifndef __GST_GCONF_AUDIO_SINK_H__
-#define __GST_GCONF_AUDIO_SINK_H__
-
-#include <gst/gst.h>
-#include <gconf/gconf-client.h>
-#include "gstswitchsink.h"
-
-G_BEGIN_DECLS
-
-#define GST_TYPE_GCONF_AUDIO_SINK \
- (gst_gconf_audio_sink_get_type ())
-#define GST_GCONF_AUDIO_SINK(obj) \
- (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_GCONF_AUDIO_SINK, \
- GstGConfAudioSink))
-#define GST_GCONF_AUDIO_SINK_CLASS(klass) \
- (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_GCONF_AUDIO_SINK, \
- GstGConfAudioSinkClass))
-#define GST_IS_GCONF_AUDIO_SINK(obj) \
- (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_GCONF_AUDIO_SINK))
-#define GST_IS_GCONF_AUDIO_SINK_CLASS(klass) \
- (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_GCONF_AUDIO_SINK))
-
-typedef struct _GstGConfAudioSink {
- GstSwitchSink parent;
-
- /* explicit pointers to stuff used */
- GConfClient *client;
- GstGConfProfile profile;
- guint notify_id;
-
- /* Current gconf string */
- gchar *gconf_str;
-} GstGConfAudioSink;
-
-typedef struct _GstGConfAudioSinkClass {
- GstSwitchSinkClass parent_class;
-} GstGConfAudioSinkClass;
-
-GType gst_gconf_audio_sink_get_type (void);
-
-G_END_DECLS
-
-#endif /* __GST_GCONF_AUDIO_SINK_H__ */
+++ /dev/null
-/* GStreamer
- * (c) 2005 Ronald S. Bultje <rbultje@ronald.bitfreak.net>
- * (c) 2005 Tim-Philipp Müller <tim centricular net>
- *
- * This library is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Library General Public
- * License as published by the Free Software Foundation; either
- * version 2 of the License, or (at your option) any later version.
- *
- * This library is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Library General Public License for more details.
- *
- * You should have received a copy of the GNU Library General Public
- * License along with this library; if not, write to the
- * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
- * Boston, MA 02111-1307, USA.
- */
-/**
- * SECTION:element-gconfaudiosrc
- * @see_also: #GstAlsaSrc, #GstAutoAudioSrc
- *
- * This element records sound from the audiosink that has been configured in
- * GConf by the user.
- *
- * <refsect2>
- * <title>Example launch line</title>
- * |[
- * gst-launch gconfaudiosrc ! audioconvert ! wavenc ! filesink location=record.wav
- * ]| Record from configured audioinput
- * </refsect2>
- */
-
-#ifdef HAVE_CONFIG_H
-#include "config.h"
-#endif
-
-#include <string.h>
-
-#include "gstgconfelements.h"
-#include "gstgconfaudiosrc.h"
-
-static void gst_gconf_audio_src_dispose (GObject * object);
-static void gst_gconf_audio_src_finalize (GstGConfAudioSrc * src);
-static void cb_toggle_element (GConfClient * client,
- guint connection_id, GConfEntry * entry, gpointer data);
-static GstStateChangeReturn
-gst_gconf_audio_src_change_state (GstElement * element,
- GstStateChange transition);
-
-GST_BOILERPLATE (GstGConfAudioSrc, gst_gconf_audio_src, GstSwitchSrc,
- GST_TYPE_SWITCH_SRC);
-
-static void
-gst_gconf_audio_src_base_init (gpointer klass)
-{
- GstElementClass *eklass = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_set_details_simple (eklass, "GConf audio source",
- "Source/Audio",
- "Audio source embedding the GConf-settings for audio input",
- "GStreamer maintainers <gstreamer-devel@lists.sourceforge.net>");
-}
-
-static void
-gst_gconf_audio_src_class_init (GstGConfAudioSrcClass * klass)
-{
- GObjectClass *oklass = G_OBJECT_CLASS (klass);
- GstElementClass *eklass = GST_ELEMENT_CLASS (klass);
-
- oklass->dispose = gst_gconf_audio_src_dispose;
- oklass->finalize = (GObjectFinalizeFunc) gst_gconf_audio_src_finalize;
- eklass->change_state = gst_gconf_audio_src_change_state;
-}
-
-/*
- * Hack to make negotiation work.
- */
-
-static gboolean
-gst_gconf_audio_src_reset (GstGConfAudioSrc * src)
-{
- gst_switch_src_set_child (GST_SWITCH_SRC (src), NULL);
-
- g_free (src->gconf_str);
- src->gconf_str = NULL;
- return TRUE;
-}
-
-static void
-gst_gconf_audio_src_init (GstGConfAudioSrc * src,
- GstGConfAudioSrcClass * g_class)
-{
- gst_gconf_audio_src_reset (src);
-
- src->client = gconf_client_get_default ();
- gconf_client_add_dir (src->client, GST_GCONF_DIR,
- GCONF_CLIENT_PRELOAD_RECURSIVE, NULL);
- src->gconf_notify_id = gconf_client_notify_add (src->client,
- GST_GCONF_DIR "/" GST_GCONF_AUDIOSRC_KEY,
- cb_toggle_element, src, NULL, NULL);
-}
-
-static void
-gst_gconf_audio_src_dispose (GObject * object)
-{
- GstGConfAudioSrc *src = GST_GCONF_AUDIO_SRC (object);
-
- if (src->client) {
- if (src->gconf_notify_id) {
- gconf_client_notify_remove (src->client, src->gconf_notify_id);
- src->gconf_notify_id = 0;
- }
-
- g_object_unref (G_OBJECT (src->client));
- src->client = NULL;
- }
-
- GST_CALL_PARENT (G_OBJECT_CLASS, dispose, (object));
-}
-
-static void
-gst_gconf_audio_src_finalize (GstGConfAudioSrc * src)
-{
- g_free (src->gconf_str);
-
- GST_CALL_PARENT (G_OBJECT_CLASS, finalize, ((GObject *) (src)));
-}
-
-static gboolean
-do_toggle_element (GstGConfAudioSrc * src)
-{
- GstElement *new_kid;
- gchar *new_gconf_str;
-
- new_gconf_str = gst_gconf_get_string (GST_GCONF_AUDIOSRC_KEY);
- if (new_gconf_str != NULL && src->gconf_str != NULL &&
- (strlen (new_gconf_str) == 0 ||
- strcmp (src->gconf_str, new_gconf_str) == 0)) {
- g_free (new_gconf_str);
- GST_DEBUG_OBJECT (src, "GConf key was updated, but it didn't change");
- return TRUE;
- }
-
- GST_DEBUG_OBJECT (src, "GConf key changed: '%s' to '%s'",
- GST_STR_NULL (src->gconf_str), GST_STR_NULL (new_gconf_str));
-
- GST_DEBUG_OBJECT (src, "Creating new kid");
- if (!(new_kid = gst_gconf_get_default_audio_src ())) {
- GST_ELEMENT_ERROR (src, LIBRARY, SETTINGS, (NULL),
- ("Failed to render audio src from GConf"));
- return FALSE;
- }
-
- if (!gst_switch_src_set_child (GST_SWITCH_SRC (src), new_kid)) {
- GST_WARNING_OBJECT (src, "Failed to update child element");
- goto fail;
- }
-
- g_free (src->gconf_str);
- src->gconf_str = new_gconf_str;
-
- GST_DEBUG_OBJECT (src, "done changing gconf audio src");
-
- return TRUE;
-fail:
- g_free (new_gconf_str);
- return FALSE;
-}
-
-static void
-cb_toggle_element (GConfClient * client,
- guint connection_id, GConfEntry * entry, gpointer data)
-{
- do_toggle_element (GST_GCONF_AUDIO_SRC (data));
-}
-
-static GstStateChangeReturn
-gst_gconf_audio_src_change_state (GstElement * element,
- GstStateChange transition)
-{
- GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
- GstGConfAudioSrc *src = GST_GCONF_AUDIO_SRC (element);
-
- switch (transition) {
- case GST_STATE_CHANGE_NULL_TO_READY:
- if (!do_toggle_element (src)) {
- gst_gconf_audio_src_reset (src);
- return GST_STATE_CHANGE_FAILURE;
- }
- break;
- default:
- break;
- }
-
- ret = GST_CALL_PARENT_WITH_DEFAULT (GST_ELEMENT_CLASS, change_state,
- (element, transition), GST_STATE_CHANGE_SUCCESS);
-
- switch (transition) {
- case GST_STATE_CHANGE_READY_TO_NULL:
- if (!gst_gconf_audio_src_reset (src))
- ret = GST_STATE_CHANGE_FAILURE;
- break;
- default:
- break;
- }
-
- return ret;
-}
+++ /dev/null
-/* GStreamer
- * (c) 2005 Ronald S. Bultje <rbultje@ronald.bitfreak.net>
- * (c) 2005 Tim-Philipp Müller <tim centricular net>
- *
- * This library is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Library General Public
- * License as published by the Free Software Foundation; either
- * version 2 of the License, or (at your option) any later version.
- *
- * This library is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Library General Public License for more details.
- *
- * You should have received a copy of the GNU Library General Public
- * License along with this library; if not, write to the
- * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
- * Boston, MA 02111-1307, USA.
- */
-
-#ifndef __GST_GCONF_AUDIO_SRC_H__
-#define __GST_GCONF_AUDIO_SRC_H__
-
-#include <gst/gst.h>
-#include <gconf/gconf-client.h>
-
-#include "gstswitchsrc.h"
-
-G_BEGIN_DECLS
-
-#define GST_TYPE_GCONF_AUDIO_SRC (gst_gconf_audio_src_get_type ())
-#define GST_GCONF_AUDIO_SRC(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_GCONF_AUDIO_SRC, GstGConfAudioSrc))
-#define GST_GCONF_AUDIO_SRC_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_GCONF_AUDIO_SRC, GstGConfAudioSrcClass))
-#define GST_IS_GCONF_AUDIO_SRC(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_GCONF_AUDIO_SRC))
-#define GST_IS_GCONF_AUDIO_SRC_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_GCONF_AUDIO_SRC))
-
-typedef struct _GstGConfAudioSrc {
- GstSwitchSrc parent;
-
- /* explicit pointers to stuff used */
- GConfClient *client;
-
- guint gconf_notify_id;
-
- /* Current gconf string */
- gchar *gconf_str;
-} GstGConfAudioSrc;
-
-typedef struct _GstGConfAudioSrcClass {
- GstSwitchSrcClass parent_class;
-} GstGConfAudioSrcClass;
-
-GType gst_gconf_audio_src_get_type (void);
-
-G_END_DECLS
-
-#endif /* __GST_GCONF_AUDIO_SRC_H__ */
+++ /dev/null
-/* GStreamer
- * (c) 2005 Ronald S. Bultje <rbultje@ronald.bitfreak.net>
- *
- * This library is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Library General Public
- * License as published by the Free Software Foundation; either
- * version 2 of the License, or (at your option) any later version.
- *
- * This library is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Library General Public License for more details.
- *
- * You should have received a copy of the GNU Library General Public
- * License along with this library; if not, write to the
- * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
- * Boston, MA 02111-1307, USA.
- */
-
-#ifdef HAVE_CONFIG_H
-#include "config.h"
-#endif
-
-#include <gst/gst.h>
-
-#include "gstgconfelements.h"
-
-#include "gstgconfaudiosink.h"
-#include "gstgconfaudiosrc.h"
-#include "gstgconfvideosink.h"
-#include "gstgconfvideosrc.h"
-
-GST_DEBUG_CATEGORY (gconf_debug);
-
-static gboolean
-plugin_init (GstPlugin * plugin)
-{
- GST_DEBUG_CATEGORY_INIT (gconf_debug, "gconf", 0,
- "GConf/GStreamer audio/video output wrapper elements");
-
- if (!gst_element_register (plugin, "gconfvideosink",
- GST_RANK_NONE, GST_TYPE_GCONF_VIDEO_SINK) ||
- !gst_element_register (plugin, "gconfvideosrc",
- GST_RANK_NONE, GST_TYPE_GCONF_VIDEO_SRC) ||
- !gst_element_register (plugin, "gconfaudiosink",
- GST_RANK_NONE, GST_TYPE_GCONF_AUDIO_SINK) ||
- !gst_element_register (plugin, "gconfaudiosrc",
- GST_RANK_NONE, GST_TYPE_GCONF_AUDIO_SRC)) {
- return FALSE;
- }
-
- return TRUE;
-}
-
-GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
- GST_VERSION_MINOR,
- "gconfelements",
- "elements wrapping the GStreamer/GConf audio/video output settings",
- plugin_init, VERSION, GST_LICENSE, GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)
+++ /dev/null
-/* GStreamer
- * (c) 2005 Ronald S. Bultje <rbultje@ronald.bitfreak.net>
- *
- * This library is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Library General Public
- * License as published by the Free Software Foundation; either
- * version 2 of the License, or (at your option) any later version.
- *
- * This library is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Library General Public License for more details.
- *
- * You should have received a copy of the GNU Library General Public
- * License along with this library; if not, write to the
- * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
- * Boston, MA 02111-1307, USA.
- */
-
-#ifndef __GST_GCONF_ELEMENTS_H__
-#define __GST_GCONF_ELEMENTS_H__
-
-#include "gstgconf.h"
-
-GST_DEBUG_CATEGORY_EXTERN (gconf_debug);
-#define GST_CAT_DEFAULT gconf_debug
-
-#endif /* __GST_GCONF_ELEMENTS_H__ */
+++ /dev/null
-/* GStreamer
- * (c) 2005 Ronald S. Bultje <rbultje@ronald.bitfreak.net>
- *
- * This library is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Library General Public
- * License as published by the Free Software Foundation; either
- * version 2 of the License, or (at your option) any later version.
- *
- * This library is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Library General Public License for more details.
- *
- * You should have received a copy of the GNU Library General Public
- * License along with this library; if not, write to the
- * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
- * Boston, MA 02111-1307, USA.
- */
-/**
- * SECTION:element-gconfvideosink
- *
- * This element outputs video to the videosink that has been configured in
- * GConf by the user.
- *
- * <refsect2>
- * <title>Example launch line</title>
- * |[
- * gst-launch filesrc location=foo.ogg ! decodebin ! ffmpegcolorspace ! gconfvideosink
- * ]| Play on configured videosink
- * </refsect2>
- */
-
-#ifdef HAVE_CONFIG_H
-#include "config.h"
-#endif
-
-#include <string.h>
-
-#include "gstgconfelements.h"
-#include "gstgconfvideosink.h"
-
-static void gst_gconf_video_sink_dispose (GObject * object);
-static void gst_gconf_video_sink_finalize (GstGConfVideoSink * sink);
-static void cb_toggle_element (GConfClient * client,
- guint connection_id, GConfEntry * entry, gpointer data);
-static GstStateChangeReturn
-gst_gconf_video_sink_change_state (GstElement * element,
- GstStateChange transition);
-
-GST_BOILERPLATE (GstGConfVideoSink, gst_gconf_video_sink, GstSwitchSink,
- GST_TYPE_SWITCH_SINK);
-
-static void
-gst_gconf_video_sink_base_init (gpointer klass)
-{
- GstElementClass *eklass = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_set_details_simple (eklass, "GConf video sink",
- "Sink/Video",
- "Video sink embedding the GConf-settings for video output",
- "GStreamer maintainers <gstreamer-devel@lists.sourceforge.net>");
-}
-
-static void
-gst_gconf_video_sink_class_init (GstGConfVideoSinkClass * klass)
-{
- GObjectClass *oklass = G_OBJECT_CLASS (klass);
- GstElementClass *eklass = GST_ELEMENT_CLASS (klass);
-
- oklass->dispose = gst_gconf_video_sink_dispose;
- oklass->finalize = (GObjectFinalizeFunc) gst_gconf_video_sink_finalize;
- eklass->change_state = gst_gconf_video_sink_change_state;
-}
-
-/*
- * Hack to make negotiation work.
- */
-
-static void
-gst_gconf_video_sink_reset (GstGConfVideoSink * sink)
-{
- gst_switch_sink_set_child (GST_SWITCH_SINK (sink), NULL);
-
- g_free (sink->gconf_str);
- sink->gconf_str = NULL;
-}
-
-static void
-gst_gconf_video_sink_init (GstGConfVideoSink * sink,
- GstGConfVideoSinkClass * g_class)
-{
- gst_gconf_video_sink_reset (sink);
-
- sink->client = gconf_client_get_default ();
- gconf_client_add_dir (sink->client, GST_GCONF_DIR,
- GCONF_CLIENT_PRELOAD_RECURSIVE, NULL);
- sink->notify_id = gconf_client_notify_add (sink->client,
- GST_GCONF_DIR "/" GST_GCONF_VIDEOSINK_KEY,
- cb_toggle_element, sink, NULL, NULL);
-}
-
-static void
-gst_gconf_video_sink_dispose (GObject * object)
-{
- GstGConfVideoSink *sink = GST_GCONF_VIDEO_SINK (object);
-
- if (sink->client) {
- if (sink->notify_id != 0)
- gconf_client_notify_remove (sink->client, sink->notify_id);
-
- g_object_unref (G_OBJECT (sink->client));
- sink->client = NULL;
- }
-
- GST_CALL_PARENT (G_OBJECT_CLASS, dispose, (object));
-}
-
-static void
-gst_gconf_video_sink_finalize (GstGConfVideoSink * sink)
-{
- g_free (sink->gconf_str);
-
- GST_CALL_PARENT (G_OBJECT_CLASS, finalize, ((GObject *) (sink)));
-}
-
-static gboolean
-do_change_child (GstGConfVideoSink * sink)
-{
- gchar *new_gconf_str;
- GstElement *new_kid;
-
- new_gconf_str = gst_gconf_get_string (GST_GCONF_VIDEOSINK_KEY);
-
- GST_LOG_OBJECT (sink, "old gconf string: %s", GST_STR_NULL (sink->gconf_str));
- GST_LOG_OBJECT (sink, "new gconf string: %s", GST_STR_NULL (new_gconf_str));
-
- if (new_gconf_str != NULL && sink->gconf_str != NULL &&
- (strlen (new_gconf_str) == 0 ||
- strcmp (sink->gconf_str, new_gconf_str) == 0)) {
- g_free (new_gconf_str);
- GST_DEBUG_OBJECT (sink,
- "GConf key was updated, but it didn't change. Ignoring");
- return TRUE;
- }
-
- GST_DEBUG_OBJECT (sink, "GConf key changed: '%s' to '%s'",
- GST_STR_NULL (sink->gconf_str), GST_STR_NULL (new_gconf_str));
-
- GST_DEBUG_OBJECT (sink, "Creating new kid");
- if (!(new_kid = gst_gconf_get_default_video_sink ())) {
- GST_ELEMENT_ERROR (sink, LIBRARY, SETTINGS, (NULL),
- ("Failed to render video sink from GConf"));
- return FALSE;
- }
-
- if (!gst_switch_sink_set_child (GST_SWITCH_SINK (sink), new_kid)) {
- GST_WARNING_OBJECT (sink, "Failed to update child element");
- goto fail;
- }
-
- g_free (sink->gconf_str);
- sink->gconf_str = new_gconf_str;
-
- GST_DEBUG_OBJECT (sink, "done changing gconf video sink");
-
- return TRUE;
-
-fail:
- g_free (new_gconf_str);
- return FALSE;
-}
-
-static void
-cb_toggle_element (GConfClient * client,
- guint connection_id, GConfEntry * entry, gpointer data)
-{
- do_change_child (GST_GCONF_VIDEO_SINK (data));
-}
-
-static GstStateChangeReturn
-gst_gconf_video_sink_change_state (GstElement * element,
- GstStateChange transition)
-{
- GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
- GstGConfVideoSink *sink = GST_GCONF_VIDEO_SINK (element);
-
- switch (transition) {
- case GST_STATE_CHANGE_NULL_TO_READY:
- if (!do_change_child (sink)) {
- gst_gconf_video_sink_reset (sink);
- return GST_STATE_CHANGE_FAILURE;
- }
- break;
- default:
- break;
- }
-
- ret = GST_CALL_PARENT_WITH_DEFAULT (GST_ELEMENT_CLASS, change_state,
- (element, transition), GST_STATE_CHANGE_SUCCESS);
-
- switch (transition) {
- case GST_STATE_CHANGE_READY_TO_NULL:
- gst_gconf_video_sink_reset (sink);
- break;
- default:
- break;
- }
-
- return ret;
-}
+++ /dev/null
-/* GStreamer
- * (c) 2005 Ronald S. Bultje <rbultje@ronald.bitfreak.net>
- *
- * This library is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Library General Public
- * License as published by the Free Software Foundation; either
- * version 2 of the License, or (at your option) any later version.
- *
- * This library is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Library General Public License for more details.
- *
- * You should have received a copy of the GNU Library General Public
- * License along with this library; if not, write to the
- * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
- * Boston, MA 02111-1307, USA.
- */
-
-#ifndef __GST_GCONF_VIDEO_SINK_H__
-#define __GST_GCONF_VIDEO_SINK_H__
-
-#include <gst/gst.h>
-#include <gconf/gconf-client.h>
-
-#include "gstswitchsink.h"
-
-G_BEGIN_DECLS
-
-#define GST_TYPE_GCONF_VIDEO_SINK \
- (gst_gconf_video_sink_get_type ())
-#define GST_GCONF_VIDEO_SINK(obj) \
- (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_GCONF_VIDEO_SINK, \
- GstGConfVideoSink))
-#define GST_GCONF_VIDEO_SINK_CLASS(klass) \
- (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_GCONF_VIDEO_SINK, \
- GstGConfVideoSinkClass))
-#define GST_IS_GCONF_VIDEO_SINK(obj) \
- (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_GCONF_VIDEO_SINK))
-#define GST_IS_GCONF_VIDEO_SINK_CLASS(klass) \
- (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_GCONF_VIDEO_SINK))
-
-typedef struct _GstGConfVideoSink {
- GstSwitchSink parent;
-
- /* explicit pointers to stuff used */
- GConfClient *client;
-
- /* gconf notify id */
- guint notify_id;
-
- /* Current gconf string */
- gchar *gconf_str;
-} GstGConfVideoSink;
-
-typedef struct _GstGConfVideoSinkClass {
- GstSwitchSinkClass parent_class;
-} GstGConfVideoSinkClass;
-
-GType gst_gconf_video_sink_get_type (void);
-
-G_END_DECLS
-
-#endif /* __GST_GCONF_VIDEO_SINK_H__ */
+++ /dev/null
-/* GStreamer
- * (c) 2005 Ronald S. Bultje <rbultje@ronald.bitfreak.net>
- * (c) 2005 Tim-Philipp Müller <tim centricular net>
- *
- * This library is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Library General Public
- * License as published by the Free Software Foundation; either
- * version 2 of the License, or (at your option) any later version.
- *
- * This library is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Library General Public License for more details.
- *
- * You should have received a copy of the GNU Library General Public
- * License along with this library; if not, write to the
- * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
- * Boston, MA 02111-1307, USA.
- */
-/**
- * SECTION:element-gconfvideosrc
- * @see_also: #GstAlsaSrc, #GstAutoVideoSrc
- *
- * This element records video from the videosink that has been configured in
- * GConf by the user.
- *
- * <refsect2>
- * <title>Example launch line</title>
- * |[
- * gst-launch gconfvideosrc ! theoraenc ! oggmux ! filesink location=record.ogg
- * ]| Record from configured videoinput
- * </refsect2>
- */
-
-#ifdef HAVE_CONFIG_H
-#include "config.h"
-#endif
-
-#include <string.h>
-
-#include "gstgconfelements.h"
-#include "gstgconfvideosrc.h"
-
-static void gst_gconf_video_src_dispose (GObject * object);
-static void gst_gconf_video_src_finalize (GstGConfVideoSrc * src);
-static void cb_toggle_element (GConfClient * client,
- guint connection_id, GConfEntry * entry, gpointer data);
-static GstStateChangeReturn
-gst_gconf_video_src_change_state (GstElement * element,
- GstStateChange transition);
-
-GST_BOILERPLATE (GstGConfVideoSrc, gst_gconf_video_src, GstSwitchSrc,
- GST_TYPE_SWITCH_SRC);
-
-static void
-gst_gconf_video_src_base_init (gpointer klass)
-{
- GstElementClass *eklass = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_set_details_simple (eklass, "GConf video source",
- "Source/Video",
- "Video source embedding the GConf-settings for video input",
- "GStreamer maintainers <gstreamer-devel@lists.sourceforge.net>");
-}
-
-static void
-gst_gconf_video_src_class_init (GstGConfVideoSrcClass * klass)
-{
- GObjectClass *oklass = G_OBJECT_CLASS (klass);
- GstElementClass *eklass = GST_ELEMENT_CLASS (klass);
-
- oklass->dispose = gst_gconf_video_src_dispose;
- oklass->finalize = (GObjectFinalizeFunc) gst_gconf_video_src_finalize;
- eklass->change_state = gst_gconf_video_src_change_state;
-}
-
-/*
- * Hack to make negotiation work.
- */
-
-static gboolean
-gst_gconf_video_src_reset (GstGConfVideoSrc * src)
-{
- gst_switch_src_set_child (GST_SWITCH_SRC (src), NULL);
-
- g_free (src->gconf_str);
- src->gconf_str = NULL;
-
- return TRUE;
-}
-
-static void
-gst_gconf_video_src_init (GstGConfVideoSrc * src,
- GstGConfVideoSrcClass * g_class)
-{
- gst_gconf_video_src_reset (src);
-
- src->client = gconf_client_get_default ();
- gconf_client_add_dir (src->client, GST_GCONF_DIR,
- GCONF_CLIENT_PRELOAD_RECURSIVE, NULL);
- src->notify_id = gconf_client_notify_add (src->client,
- GST_GCONF_DIR "/" GST_GCONF_VIDEOSRC_KEY,
- cb_toggle_element, src, NULL, NULL);
-}
-
-static void
-gst_gconf_video_src_dispose (GObject * object)
-{
- GstGConfVideoSrc *src = GST_GCONF_VIDEO_SRC (object);
-
- if (src->client) {
- if (src->notify_id != 0)
- gconf_client_notify_remove (src->client, src->notify_id);
-
- g_object_unref (G_OBJECT (src->client));
- src->client = NULL;
- }
-
- GST_CALL_PARENT (G_OBJECT_CLASS, dispose, (object));
-}
-
-static void
-gst_gconf_video_src_finalize (GstGConfVideoSrc * src)
-{
- g_free (src->gconf_str);
-
- GST_CALL_PARENT (G_OBJECT_CLASS, finalize, ((GObject *) (src)));
-}
-
-static gboolean
-do_toggle_element (GstGConfVideoSrc * src)
-{
- GstElement *new_kid;
- gchar *new_gconf_str;
-
- new_gconf_str = gst_gconf_get_string (GST_GCONF_VIDEOSRC_KEY);
- if (new_gconf_str != NULL && src->gconf_str != NULL &&
- (strlen (new_gconf_str) == 0 ||
- strcmp (src->gconf_str, new_gconf_str) == 0)) {
- g_free (new_gconf_str);
- GST_DEBUG_OBJECT (src, "GConf key was updated, but it didn't change");
- return TRUE;
- }
-
- GST_DEBUG_OBJECT (src, "GConf key changed: '%s' to '%s'",
- GST_STR_NULL (src->gconf_str), GST_STR_NULL (new_gconf_str));
-
- GST_DEBUG_OBJECT (src, "Creating new kid");
- if (!(new_kid = gst_gconf_get_default_video_src ())) {
- GST_ELEMENT_ERROR (src, LIBRARY, SETTINGS, (NULL),
- ("Failed to render video src from GConf"));
- return FALSE;
- }
-
- if (!gst_switch_src_set_child (GST_SWITCH_SRC (src), new_kid)) {
- GST_WARNING_OBJECT (src, "Failed to update child element");
- goto fail;
- }
-
- g_free (src->gconf_str);
- src->gconf_str = new_gconf_str;
-
- GST_DEBUG_OBJECT (src, "done changing gconf video src");
-
- return TRUE;
-fail:
- g_free (new_gconf_str);
- return FALSE;
-}
-
-static void
-cb_toggle_element (GConfClient * client,
- guint connection_id, GConfEntry * entry, gpointer data)
-{
- do_toggle_element (GST_GCONF_VIDEO_SRC (data));
-}
-
-static GstStateChangeReturn
-gst_gconf_video_src_change_state (GstElement * element,
- GstStateChange transition)
-{
- GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
- GstGConfVideoSrc *src = GST_GCONF_VIDEO_SRC (element);
-
- switch (transition) {
- case GST_STATE_CHANGE_NULL_TO_READY:
- if (!do_toggle_element (src)) {
- gst_gconf_video_src_reset (src);
- return GST_STATE_CHANGE_FAILURE;
- }
- break;
- default:
- break;
- }
-
- ret = GST_CALL_PARENT_WITH_DEFAULT (GST_ELEMENT_CLASS, change_state,
- (element, transition), GST_STATE_CHANGE_SUCCESS);
-
- switch (transition) {
- case GST_STATE_CHANGE_READY_TO_NULL:
- if (!gst_gconf_video_src_reset (src))
- ret = GST_STATE_CHANGE_FAILURE;
- break;
- default:
- break;
- }
-
- return ret;
-}
+++ /dev/null
-/* GStreamer
- * (c) 2005 Ronald S. Bultje <rbultje@ronald.bitfreak.net>
- * (c) 2005 Tim-Philipp Müller <tim centricular net>
- *
- * This library is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Library General Public
- * License as published by the Free Software Foundation; either
- * version 2 of the License, or (at your option) any later version.
- *
- * This library is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Library General Public License for more details.
- *
- * You should have received a copy of the GNU Library General Public
- * License along with this library; if not, write to the
- * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
- * Boston, MA 02111-1307, USA.
- */
-
-#ifndef __GST_GCONF_VIDEO_SRC_H__
-#define __GST_GCONF_VIDEO_SRC_H__
-
-#include <gst/gst.h>
-#include <gconf/gconf-client.h>
-
-#include "gstswitchsrc.h"
-
-G_BEGIN_DECLS
-
-#define GST_TYPE_GCONF_VIDEO_SRC (gst_gconf_video_src_get_type ())
-#define GST_GCONF_VIDEO_SRC(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_GCONF_VIDEO_SRC, GstGConfVideoSrc))
-#define GST_GCONF_VIDEO_SRC_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_GCONF_VIDEO_SRC, GstGConfVideoSrcClass))
-#define GST_IS_GCONF_VIDEO_SRC(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_GCONF_VIDEO_SRC))
-#define GST_IS_GCONF_VIDEO_SRC_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_GCONF_VIDEO_SRC))
-
-typedef struct _GstGConfVideoSrc {
- GstSwitchSrc parent;
-
- /* explicit pointers to stuff used */
- GConfClient *client;
-
- /* gconf key notification id */
- guint notify_id;
-
- /* Current gconf string */
- gchar *gconf_str;
-} GstGConfVideoSrc;
-
-typedef struct _GstGConfVideoSrcClass {
- GstSwitchSrcClass parent_class;
-} GstGConfVideoSrcClass;
-
-GType gst_gconf_video_src_get_type (void);
-
-G_END_DECLS
-
-#endif /* __GST_GCONF_VIDEO_SRC_H__ */
+++ /dev/null
-/* GStreamer
- * Copyright (c) 2005 Ronald S. Bultje <rbultje@ronald.bitfreak.net>
- * Copyright (c) 2006 Jürg Billeter <j@bitron.ch>
- * Copyright (c) 2007 Jan Schmidt <thaytan@noraisin.net>
- *
- * This library is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Library General Public
- * License as published by the Free Software Foundation; either
- * version 2 of the License, or (at your option) any later version.
- *
- * This library is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Library General Public License for more details.
- *
- * You should have received a copy of the GNU Library General Public
- * License along with this library; if not, write to the
- * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
- * Boston, MA 02111-1307, USA.
- */
-
-#ifdef HAVE_CONFIG_H
-#include "config.h"
-#endif
-
-#include <string.h>
-
-#include "gstswitchsink.h"
-
-GST_DEBUG_CATEGORY_STATIC (switch_debug);
-#define GST_CAT_DEFAULT switch_debug
-
-static void gst_switch_sink_dispose (GObject * object);
-static GstStateChangeReturn
-gst_switch_sink_change_state (GstElement * element, GstStateChange transition);
-
-enum
-{
- PROP_0
-};
-
-GST_BOILERPLATE (GstSwitchSink, gst_switch_sink, GstBin, GST_TYPE_BIN);
-
-static void
-gst_switch_sink_base_init (gpointer klass)
-{
- GST_DEBUG_CATEGORY_INIT (switch_debug, "switchsink", 0, "switchsink element");
-}
-
-static void
-gst_switch_sink_class_init (GstSwitchSinkClass * klass)
-{
- GObjectClass *oklass = G_OBJECT_CLASS (klass);
- GstElementClass *eklass = GST_ELEMENT_CLASS (klass);
- static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
- GST_PAD_SINK,
- GST_PAD_ALWAYS,
- GST_STATIC_CAPS_ANY);
- GstPadTemplate *child_pad_templ;
-
- oklass->dispose = gst_switch_sink_dispose;
- eklass->change_state = gst_switch_sink_change_state;
-
- /* Provide a default pad template if the child didn't */
- child_pad_templ = gst_element_class_get_pad_template (eklass, "sink");
- if (child_pad_templ == NULL) {
- gst_element_class_add_static_pad_template (eklass, &sink_template);
- }
-}
-
-static gboolean
-gst_switch_sink_reset (GstSwitchSink * sink)
-{
- /* this will install fakesink if no other child has been set,
- * otherwise we rely on the subclass to know when to unset its
- * custom kid */
- if (sink->kid == NULL) {
- return gst_switch_sink_set_child (sink, NULL);
- }
-
- return TRUE;
-}
-
-static void
-gst_switch_sink_init (GstSwitchSink * sink, GstSwitchSinkClass * g_class)
-{
- GstElementClass *eklass = GST_ELEMENT_GET_CLASS (sink);
- GstPadTemplate *templ;
-
- templ = gst_element_class_get_pad_template (eklass, "sink");
- sink->pad = gst_ghost_pad_new_no_target_from_template ("sink", templ);
- gst_element_add_pad (GST_ELEMENT (sink), sink->pad);
-
- gst_switch_sink_reset (sink);
-
- GST_OBJECT_FLAG_SET (sink, GST_ELEMENT_IS_SINK);
-}
-
-static void
-gst_switch_sink_dispose (GObject * object)
-{
- GstSwitchSink *sink = GST_SWITCH_SINK (object);
- GstObject *new_kid, *kid;
-
- GST_OBJECT_LOCK (sink);
- new_kid = GST_OBJECT_CAST (sink->new_kid);
- sink->new_kid = NULL;
-
- kid = GST_OBJECT_CAST (sink->kid);
- sink->kid = NULL;
- GST_OBJECT_UNLOCK (sink);
-
- gst_object_replace (&new_kid, NULL);
- gst_object_replace (&kid, NULL);
-
- GST_CALL_PARENT (G_OBJECT_CLASS, dispose, (object));
-}
-
-static gboolean
-gst_switch_sink_commit_new_kid (GstSwitchSink * sink)
-{
- GstPad *targetpad;
- GstState kid_state;
- GstElement *new_kid, *old_kid;
- gboolean is_fakesink = FALSE;
- GstBus *bus;
-
- /* need locking around member accesses */
- GST_OBJECT_LOCK (sink);
- /* If we're currently changing state, set the child to the next state
- * we're transitioning too, rather than our current state which is
- * about to change */
- if (GST_STATE_NEXT (sink) != GST_STATE_VOID_PENDING)
- kid_state = GST_STATE_NEXT (sink);
- else
- kid_state = GST_STATE (sink);
-
- new_kid = sink->new_kid ? gst_object_ref (sink->new_kid) : NULL;
- sink->new_kid = NULL;
- GST_OBJECT_UNLOCK (sink);
-
- /* Fakesink by default if NULL is passed as the new child */
- if (new_kid == NULL) {
- GST_DEBUG_OBJECT (sink, "Replacing kid with fakesink");
- new_kid = gst_element_factory_make ("fakesink", "testsink");
- if (new_kid == NULL) {
- GST_ERROR_OBJECT (sink, "Failed to create fakesink");
- return FALSE;
- }
- /* Add a reference, as it would if the element came from sink->new_kid */
- gst_object_ref (new_kid);
- g_object_set (new_kid, "sync", TRUE, NULL);
- is_fakesink = TRUE;
- } else {
- GST_DEBUG_OBJECT (sink, "Setting new kid");
- }
-
- /* set temporary bus of our own to catch error messages from the child
- * (could we just set our own bus on it, or would the state change messages
- * from the not-yet-added element confuse the state change algorithm? Let's
- * play it safe for now) */
- bus = gst_bus_new ();
- gst_element_set_bus (new_kid, bus);
- gst_object_unref (bus);
-
- if (gst_element_set_state (new_kid, kid_state) == GST_STATE_CHANGE_FAILURE) {
- GstMessage *msg;
-
- /* check if child posted an error message and if so re-post it on our bus
- * so that the application gets to see a decent error and not our generic
- * fallback error message which is completely indecipherable to the user */
- msg = gst_bus_pop_filtered (GST_ELEMENT_BUS (new_kid), GST_MESSAGE_ERROR);
- if (msg) {
- GST_INFO_OBJECT (sink, "Forwarding kid error: %" GST_PTR_FORMAT, msg);
- gst_element_post_message (GST_ELEMENT (sink), msg);
- }
- GST_ELEMENT_ERROR (sink, CORE, STATE_CHANGE, (NULL),
- ("Failed to set state on new child."));
- gst_element_set_bus (new_kid, NULL);
- gst_object_unref (new_kid);
- return FALSE;
- }
- gst_element_set_bus (new_kid, NULL);
- gst_bin_add (GST_BIN (sink), new_kid);
-
- /* Now, replace the existing child */
- GST_OBJECT_LOCK (sink);
- old_kid = sink->kid;
- sink->kid = new_kid;
- /* Mark whether a custom kid or fakesink has been installed */
- sink->have_kid = !is_fakesink;
- GST_OBJECT_UNLOCK (sink);
-
- /* kill old element */
- if (old_kid) {
- GST_DEBUG_OBJECT (sink, "Removing old kid %" GST_PTR_FORMAT, old_kid);
- gst_element_set_state (old_kid, GST_STATE_NULL);
- gst_bin_remove (GST_BIN (sink), old_kid);
- gst_object_unref (old_kid);
- /* Don't lose the SINK flag */
- GST_OBJECT_FLAG_SET (sink, GST_ELEMENT_IS_SINK);
- }
-
- /* re-attach ghostpad */
- GST_DEBUG_OBJECT (sink, "Creating new ghostpad");
- targetpad = gst_element_get_static_pad (sink->kid, "sink");
- gst_ghost_pad_set_target (GST_GHOST_PAD (sink->pad), targetpad);
- gst_object_unref (targetpad);
- GST_DEBUG_OBJECT (sink, "done changing child of switchsink");
-
- /* FIXME: Push new-segment info and pre-roll buffer(s) into the kid */
-
- return TRUE;
-}
-
-gboolean
-gst_switch_sink_set_child (GstSwitchSink * sink, GstElement * new_kid)
-{
- GstState cur, next;
- GstElement **p_kid;
-
- /* Nothing to do if clearing the child and we've already installed fakesink */
- if (new_kid == NULL && sink->kid != NULL && sink->have_kid == FALSE)
- return TRUE;
-
- /* Store the new kid to be committed later */
- GST_OBJECT_LOCK (sink);
- cur = GST_STATE (sink);
- next = GST_STATE_NEXT (sink);
- p_kid = &sink->new_kid;
- gst_object_replace ((GstObject **) p_kid, (GstObject *) new_kid);
- GST_OBJECT_UNLOCK (sink);
- if (new_kid)
- gst_object_unref (new_kid);
-
- /* Sometime, it would be lovely to allow sink changes even when
- * already running, but this involves sending an appropriate new-segment
- * and possibly prerolling etc */
- /* FIXME: Block the pad and replace the kid when it completes */
- if (cur > GST_STATE_READY || next == GST_STATE_PAUSED) {
- GST_DEBUG_OBJECT (sink,
- "Switch-sink is already running. Ignoring change of child.");
- gst_object_unref (new_kid);
- return TRUE;
- }
-
- return gst_switch_sink_commit_new_kid (sink);
-}
-
-static GstStateChangeReturn
-gst_switch_sink_change_state (GstElement * element, GstStateChange transition)
-{
- GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
- GstSwitchSink *sink = GST_SWITCH_SINK (element);
-
- ret = GST_CALL_PARENT_WITH_DEFAULT (GST_ELEMENT_CLASS, change_state,
- (element, transition), GST_STATE_CHANGE_SUCCESS);
-
- switch (transition) {
- case GST_STATE_CHANGE_READY_TO_NULL:
- if (!gst_switch_sink_reset (sink))
- ret = GST_STATE_CHANGE_FAILURE;
- break;
- default:
- break;
- }
-
- return ret;
-}
+++ /dev/null
-/* GStreamer
- * Copyright (c) 2005 Ronald S. Bultje <rbultje@ronald.bitfreak.net>
- * Copyright (c) 2007 Jan Schmidt <thaytan@mad.scientist.com>
- *
- * This library is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Library General Public
- * License as published by the Free Software Foundation; either
- * version 2 of the License, or (at your option) any later version.
- *
- * This library is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Library General Public License for more details.
- *
- * You should have received a copy of the GNU Library General Public
- * License along with this library; if not, write to the
- * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
- * Boston, MA 02111-1307, USA.
- */
-
-#ifndef __GST_SWITCH_SINK_H__
-#define __GST_SWITCH_SINK_H__
-
-#include <gst/gst.h>
-
-G_BEGIN_DECLS
-
-#define GST_TYPE_SWITCH_SINK \
- (gst_switch_sink_get_type ())
-#define GST_SWITCH_SINK(obj) \
- (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_SWITCH_SINK, \
- GstSwitchSink))
-#define GST_SWITCH_SINK_CLASS(klass) \
- (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_SWITCH_SINK, \
- GstSwitchSinkClass))
-#define GST_IS_SWITCH_SINK(obj) \
- (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_SWITCH_SINK))
-#define GST_IS_SWITCH_SINK_CLASS(klass) \
- (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_SWITCH_SINK))
-
-typedef struct _GstSwitchSink {
- GstBin parent;
-
- GstElement *kid;
- GstElement *new_kid;
- GstPad *pad;
-
- /* If a custom child has been set... */
- gboolean have_kid;
-} GstSwitchSink;
-
-typedef struct _GstSwitchSinkClass {
- GstBinClass parent_class;
-} GstSwitchSinkClass;
-
-GType gst_switch_sink_get_type (void);
-
-gboolean gst_switch_sink_set_child (GstSwitchSink *ssink, GstElement *new_kid);
-
-G_END_DECLS
-
-#endif /* __GST_SWITCH_SINK_H__ */
+++ /dev/null
-/* GStreamer
- * Copyright (c) 2005 Ronald S. Bultje <rbultje@ronald.bitfreak.net>
- * Copyright (c) 2006 Jürg Billeter <j@bitron.ch>
- * Copyright (c) 2007 Jan Schmidt <thaytan@noraisin.net>
- * Copyright (c) 2010 Sebastian Dröge <sebastian.droege@collabora.co.uk>
- *
- * This library is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Library General Public
- * License as published by the Free Software Foundation; either
- * version 2 of the License, or (at your option) any later version.
- *
- * This library is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Library General Public License for more details.
- *
- * You should have received a copy of the GNU Library General Public
- * License along with this library; if not, write to the
- * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
- * Boston, MA 02111-1307, USA.
- */
-
-#ifdef HAVE_CONFIG_H
-#include "config.h"
-#endif
-
-#include <string.h>
-
-#include "gstswitchsrc.h"
-
-GST_DEBUG_CATEGORY_STATIC (switch_debug);
-#define GST_CAT_DEFAULT switch_debug
-
-static void gst_switch_src_dispose (GObject * object);
-static GstStateChangeReturn
-gst_switch_src_change_state (GstElement * element, GstStateChange transition);
-
-GST_BOILERPLATE (GstSwitchSrc, gst_switch_src, GstBin, GST_TYPE_BIN);
-
-static void
-gst_switch_src_base_init (gpointer klass)
-{
- GST_DEBUG_CATEGORY_INIT (switch_debug, "switchsrc", 0, "switchsrc element");
-}
-
-static void
-gst_switch_src_class_init (GstSwitchSrcClass * klass)
-{
- GObjectClass *oklass = G_OBJECT_CLASS (klass);
- GstElementClass *eklass = GST_ELEMENT_CLASS (klass);
- static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
- GST_PAD_SRC,
- GST_PAD_ALWAYS,
- GST_STATIC_CAPS_ANY);
- GstPadTemplate *child_pad_templ;
-
- oklass->dispose = gst_switch_src_dispose;
- eklass->change_state = gst_switch_src_change_state;
-
- /* Provide a default pad template if the child didn't */
- child_pad_templ = gst_element_class_get_pad_template (eklass, "src");
- if (child_pad_templ == NULL) {
- gst_element_class_add_static_pad_template (eklass, &src_template);
- }
-}
-
-static gboolean
-gst_switch_src_reset (GstSwitchSrc * src)
-{
- /* this will install fakesrc if no other child has been set,
- * otherwise we rely on the subclass to know when to unset its
- * custom kid */
- if (src->kid == NULL) {
- return gst_switch_src_set_child (src, NULL);
- }
-
- return TRUE;
-}
-
-static void
-gst_switch_src_init (GstSwitchSrc * src, GstSwitchSrcClass * g_class)
-{
- GstElementClass *eklass = GST_ELEMENT_GET_CLASS (src);
- GstPadTemplate *templ;
-
- templ = gst_element_class_get_pad_template (eklass, "src");
- src->pad = gst_ghost_pad_new_no_target_from_template ("src", templ);
- gst_element_add_pad (GST_ELEMENT (src), src->pad);
-
- gst_switch_src_reset (src);
-
- GST_OBJECT_FLAG_SET (src, GST_ELEMENT_IS_SOURCE);
-}
-
-static void
-gst_switch_src_dispose (GObject * object)
-{
- GstSwitchSrc *src = GST_SWITCH_SRC (object);
- GstObject *new_kid, *kid;
-
- GST_OBJECT_LOCK (src);
- new_kid = GST_OBJECT_CAST (src->new_kid);
- src->new_kid = NULL;
-
- kid = GST_OBJECT_CAST (src->kid);
- src->kid = NULL;
- GST_OBJECT_UNLOCK (src);
-
- gst_object_replace (&new_kid, NULL);
- gst_object_replace (&kid, NULL);
-
- GST_CALL_PARENT (G_OBJECT_CLASS, dispose, (object));
-}
-
-static gboolean
-gst_switch_src_commit_new_kid (GstSwitchSrc * src)
-{
- GstPad *targetpad;
- GstState kid_state;
- GstElement *new_kid, *old_kid;
- gboolean is_fakesrc = FALSE;
- GstBus *bus;
-
- /* need locking around member accesses */
- GST_OBJECT_LOCK (src);
- /* If we're currently changing state, set the child to the next state
- * we're transitioning too, rather than our current state which is
- * about to change */
- if (GST_STATE_NEXT (src) != GST_STATE_VOID_PENDING)
- kid_state = GST_STATE_NEXT (src);
- else
- kid_state = GST_STATE (src);
-
- new_kid = src->new_kid ? gst_object_ref (src->new_kid) : NULL;
- src->new_kid = NULL;
- GST_OBJECT_UNLOCK (src);
-
- /* Fakesrc by default if NULL is passed as the new child */
- if (new_kid == NULL) {
- GST_DEBUG_OBJECT (src, "Replacing kid with fakesrc");
- new_kid = gst_element_factory_make ("fakesrc", "testsrc");
- if (new_kid == NULL) {
- GST_ERROR_OBJECT (src, "Failed to create fakesrc");
- return FALSE;
- }
- /* Add a reference, as it would if the element came from src->new_kid */
- gst_object_ref (new_kid);
- is_fakesrc = TRUE;
- } else {
- GST_DEBUG_OBJECT (src, "Setting new kid");
- }
-
- /* set temporary bus of our own to catch error messages from the child
- * (could we just set our own bus on it, or would the state change messages
- * from the not-yet-added element confuse the state change algorithm? Let's
- * play it safe for now) */
- bus = gst_bus_new ();
- gst_element_set_bus (new_kid, bus);
- gst_object_unref (bus);
-
- if (gst_element_set_state (new_kid, kid_state) == GST_STATE_CHANGE_FAILURE) {
- GstMessage *msg;
-
- /* check if child posted an error message and if so re-post it on our bus
- * so that the application gets to see a decent error and not our generic
- * fallback error message which is completely indecipherable to the user */
- msg = gst_bus_pop_filtered (GST_ELEMENT_BUS (new_kid), GST_MESSAGE_ERROR);
- if (msg) {
- GST_INFO_OBJECT (src, "Forwarding kid error: %" GST_PTR_FORMAT, msg);
- gst_element_post_message (GST_ELEMENT (src), msg);
- }
- GST_ELEMENT_ERROR (src, CORE, STATE_CHANGE, (NULL),
- ("Failed to set state on new child."));
- gst_element_set_bus (new_kid, NULL);
- gst_object_unref (new_kid);
- return FALSE;
- }
- gst_element_set_bus (new_kid, NULL);
- gst_bin_add (GST_BIN (src), new_kid);
-
- /* Now, replace the existing child */
- GST_OBJECT_LOCK (src);
- old_kid = src->kid;
- src->kid = new_kid;
- /* Mark whether a custom kid or fakesrc has been installed */
- src->have_kid = !is_fakesrc;
- GST_OBJECT_UNLOCK (src);
-
- /* kill old element */
- if (old_kid) {
- GST_DEBUG_OBJECT (src, "Removing old kid %" GST_PTR_FORMAT, old_kid);
- gst_element_set_state (old_kid, GST_STATE_NULL);
- gst_bin_remove (GST_BIN (src), old_kid);
- gst_object_unref (old_kid);
- /* Don't lose the SOURCE flag */
- GST_OBJECT_FLAG_SET (src, GST_ELEMENT_IS_SOURCE);
- }
-
- /* re-attach ghostpad */
- GST_DEBUG_OBJECT (src, "Creating new ghostpad");
- targetpad = gst_element_get_static_pad (src->kid, "src");
- gst_ghost_pad_set_target (GST_GHOST_PAD (src->pad), targetpad);
- gst_object_unref (targetpad);
- GST_DEBUG_OBJECT (src, "done changing child of switchsrc");
-
- return TRUE;
-}
-
-gboolean
-gst_switch_src_set_child (GstSwitchSrc * src, GstElement * new_kid)
-{
- GstState cur, next;
- GstElement **p_kid;
-
- /* Nothing to do if clearing the child and we've already installed fakesrc */
- if (new_kid == NULL && src->kid != NULL && src->have_kid == FALSE)
- return TRUE;
-
- /* Store the new kid to be committed later */
- GST_OBJECT_LOCK (src);
- cur = GST_STATE (src);
- next = GST_STATE_NEXT (src);
- p_kid = &src->new_kid;
- gst_object_replace ((GstObject **) p_kid, (GstObject *) new_kid);
- GST_OBJECT_UNLOCK (src);
- if (new_kid)
- gst_object_unref (new_kid);
-
- /* Sometime, it would be lovely to allow src changes even when
- * already running */
- /* FIXME: Block the pad and replace the kid when it completes */
- if (cur > GST_STATE_READY || next == GST_STATE_PAUSED) {
- GST_DEBUG_OBJECT (src,
- "Switch-src is already running. Ignoring change of child.");
- gst_object_unref (new_kid);
- return TRUE;
- }
-
- return gst_switch_src_commit_new_kid (src);
-}
-
-static GstStateChangeReturn
-gst_switch_src_change_state (GstElement * element, GstStateChange transition)
-{
- GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
- GstSwitchSrc *src = GST_SWITCH_SRC (element);
-
- ret = GST_CALL_PARENT_WITH_DEFAULT (GST_ELEMENT_CLASS, change_state,
- (element, transition), GST_STATE_CHANGE_SUCCESS);
-
- switch (transition) {
- case GST_STATE_CHANGE_READY_TO_NULL:
- if (!gst_switch_src_reset (src))
- ret = GST_STATE_CHANGE_FAILURE;
- break;
- default:
- break;
- }
-
- return ret;
-}
+++ /dev/null
-/* GStreamer
- *
- * Copyright (c) 2005 Ronald S. Bultje <rbultje@ronald.bitfreak.net>
- * Copyright (c) 2005 Tim-Philipp Müller <tim centricular net>
- * Copyright (c) 2007 Jan Schmidt <thaytan@mad.scientist.com>
- * Copyright (c) 2010 Sebastian Dröge <sebastian.droege@collabora.co.uk>
- *
- * This library is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Library General Public
- * License as published by the Free Software Foundation; either
- * version 2 of the License, or (at your option) any later version.
- *
- * This library is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Library General Public License for more details.
- *
- * You should have received a copy of the GNU Library General Public
- * License along with this library; if not, write to the
- * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
- * Boston, MA 02111-1307, USA.
- */
-
-#ifndef __GST_SWITCH_SRC_H__
-#define __GST_SWITCH_SRC_H__
-
-#include <gst/gst.h>
-
-G_BEGIN_DECLS
-
-#define GST_TYPE_SWITCH_SRC (gst_switch_src_get_type ())
-#define GST_SWITCH_SRC(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_SWITCH_SRC, GstSwitchSrc))
-#define GST_SWITCH_SRC_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_SWITCH_SRC, GstSwitchSrcClass))
-#define GST_IS_SWITCH_SRC(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_SWITCH_SRC))
-#define GST_IS_SWITCH_SRC_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_SWITCH_SRC))
-
-typedef struct _GstSwitchSrc {
- GstBin parent;
-
- GstElement *kid;
- GstElement *new_kid;
- GstPad *pad;
-
- /* If a custom child has been set... */
- gboolean have_kid;
-} GstSwitchSrc;
-
-typedef struct _GstSwitchSrcClass {
- GstBinClass parent_class;
-} GstSwitchSrcClass;
-
-GType gst_switch_src_get_type (void);
-gboolean gst_switch_src_set_child (GstSwitchSrc *ssrc, GstElement *new_kid);
-
-G_END_DECLS
-
-#endif /* __GST_SWITCH_SRC_H__ */
{
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
- gst_element_class_add_static_pad_template (element_class,
- &gst_gdk_pixbuf_src_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_gdk_pixbuf_sink_template);
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_gdk_pixbuf_src_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_gdk_pixbuf_sink_template));
gst_element_class_set_details_simple (element_class,
"GdkPixbuf image decoder", "Codec/Decoder/Image",
"Decodes images in a video stream using GdkPixbuf",
/* as long as we don't have flow returns for event functions we need
* to post an error here, or the application might never know that
* things failed */
- if (res != GST_FLOW_OK && res != GST_FLOW_WRONG_STATE) {
+ if (res != GST_FLOW_OK && res != GST_FLOW_FLUSHING) {
GST_ELEMENT_ERROR (pixbuf, STREAM, FAILED, (NULL),
("Flow: %s", gst_flow_get_name (res)));
}
"Sink/Video", "Output images as GdkPixbuf objects in bus messages",
"Tim-Philipp Müller <tim centricular net>");
- gst_element_class_add_static_pad_template (element_class,
- &pixbufsink_sink_factory);
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&pixbufsink_sink_factory));
}
static void
"Wim Taymans <wim.taymans@chello.be>, "
"Renato Filho <renato.filho@indt.org.br>");
- gst_element_class_add_static_pad_template (element_class,
- &gst_pixbufscale_src_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_pixbufscale_sink_template);
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_pixbufscale_src_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_pixbufscale_sink_template));
}
static void
+++ /dev/null
-plugin_LTLIBRARIES = libgsthalelements.la
-
-libgsthalelements_la_SOURCES = \
- gsthalaudiosink.c \
- gsthalaudiosrc.c \
- gsthalelements.c \
- hal.c
-
-libgsthalelements_la_CFLAGS = $(GST_CFLAGS) $(HAL_CFLAGS)
-libgsthalelements_la_LIBADD = $(GST_LIBS) $(HAL_LIBS)
-libgsthalelements_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
-libgsthalelements_la_LIBTOOLFLAGS = --tag=disable-static
-
-noinst_HEADERS = \
- gsthalaudiosink.h \
- gsthalaudiosrc.h \
- gsthalelements.h \
- hal.h
+++ /dev/null
-/* GStreamer
- * (c) 2005 Ronald S. Bultje <rbultje@ronald.bitfreak.net>
- * (c) 2006 Jürg Billeter <j@bitron.ch>
- *
- * This library is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Library General Public
- * License as published by the Free Software Foundation; either
- * version 2 of the License, or (at your option) any later version.
- *
- * This library is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Library General Public License for more details.
- *
- * You should have received a copy of the GNU Library General Public
- * License along with this library; if not, write to the
- * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
- * Boston, MA 02111-1307, USA.
- */
-
-/**
- * SECTION:element-halaudiosink
- *
- * HalAudioSink allows access to output of sound devices by specifying the
- * corresponding persistent Unique Device Id (UDI) from the Hardware Abstraction
- * Layer (HAL) in the #GstHalAudioSink:udi property.
- * It currently always embeds alsasink or osssink as HAL doesn't support other
- * sound systems yet. You can also specify the UDI of a device that has ALSA or
- * OSS subdevices. If both are present ALSA is preferred.
- *
- * <refsect2>
- * <title>Examples</title>
- * |[
- * hal-find-by-property --key alsa.type --string playback
- * ]| list the UDIs of all your ALSA output devices
- * |[
- * gst-launch -v audiotestsrc ! halaudiosink udi=/org/freedesktop/Hal/devices/pci_8086_27d8_alsa_playback_0
- * ]| test your soundcard by playing a test signal on the specified sound device.
- * </refsect2>
- */
-
-#ifdef HAVE_CONFIG_H
-#include "config.h"
-#endif
-
-#include "gsthalelements.h"
-#include "gsthalaudiosink.h"
-
-static void gst_hal_audio_sink_dispose (GObject * object);
-static GstStateChangeReturn
-gst_hal_audio_sink_change_state (GstElement * element,
- GstStateChange transition);
-
-enum
-{
- PROP_0,
- PROP_UDI
-};
-
-GST_BOILERPLATE (GstHalAudioSink, gst_hal_audio_sink, GstBin, GST_TYPE_BIN);
-
-static void gst_hal_audio_sink_set_property (GObject * object, guint prop_id,
- const GValue * value, GParamSpec * pspec);
-static void gst_hal_audio_sink_get_property (GObject * object, guint prop_id,
- GValue * value, GParamSpec * pspec);
-
-static void
-gst_hal_audio_sink_base_init (gpointer klass)
-{
- GstElementClass *eklass = GST_ELEMENT_CLASS (klass);
-
- static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
- GST_PAD_SINK,
- GST_PAD_ALWAYS,
- GST_STATIC_CAPS_ANY);
-
- gst_element_class_add_static_pad_template (eklass, &sink_template);
- gst_element_class_set_details_simple (eklass, "HAL audio sink",
- "Sink/Audio",
- "Audio sink for sound device access via HAL",
- "Jürg Billeter <j@bitron.ch>");
-}
-
-static void
-gst_hal_audio_sink_class_init (GstHalAudioSinkClass * klass)
-{
- GObjectClass *oklass = G_OBJECT_CLASS (klass);
- GstElementClass *eklass = GST_ELEMENT_CLASS (klass);
-
- oklass->set_property = gst_hal_audio_sink_set_property;
- oklass->get_property = gst_hal_audio_sink_get_property;
- oklass->dispose = gst_hal_audio_sink_dispose;
- eklass->change_state = gst_hal_audio_sink_change_state;
-
- g_object_class_install_property (oklass, PROP_UDI,
- g_param_spec_string ("udi",
- "UDI", "Unique Device Id", NULL,
- G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
-}
-
-/*
- * Hack to make negotiation work.
- */
-
-static void
-gst_hal_audio_sink_reset (GstHalAudioSink * sink)
-{
- GstPad *targetpad;
-
- /* fakesink */
- if (sink->kid) {
- gst_element_set_state (sink->kid, GST_STATE_NULL);
- gst_bin_remove (GST_BIN (sink), sink->kid);
- }
- sink->kid = gst_element_factory_make ("fakesink", "testsink");
- gst_bin_add (GST_BIN (sink), sink->kid);
-
- targetpad = gst_element_get_static_pad (sink->kid, "sink");
- gst_ghost_pad_set_target (GST_GHOST_PAD (sink->pad), targetpad);
- gst_object_unref (targetpad);
-}
-
-static void
-gst_hal_audio_sink_init (GstHalAudioSink * sink, GstHalAudioSinkClass * g_class)
-{
- sink->pad = gst_ghost_pad_new_no_target ("sink", GST_PAD_SINK);
- gst_element_add_pad (GST_ELEMENT (sink), sink->pad);
-
- gst_hal_audio_sink_reset (sink);
-}
-
-static void
-gst_hal_audio_sink_dispose (GObject * object)
-{
- GstHalAudioSink *sink = GST_HAL_AUDIO_SINK (object);
-
- if (sink->udi) {
- g_free (sink->udi);
- sink->udi = NULL;
- }
-
- GST_CALL_PARENT (G_OBJECT_CLASS, dispose, (object));
-}
-
-static gboolean
-do_toggle_element (GstHalAudioSink * sink)
-{
- GstPad *targetpad;
-
- /* kill old element */
- if (sink->kid) {
- GST_DEBUG_OBJECT (sink, "Removing old kid");
- gst_element_set_state (sink->kid, GST_STATE_NULL);
- gst_bin_remove (GST_BIN (sink), sink->kid);
- sink->kid = NULL;
- }
-
- GST_DEBUG_OBJECT (sink, "Creating new kid");
- if (!sink->udi)
- GST_INFO_OBJECT (sink, "No UDI set for device, using default one");
-
- if (!(sink->kid = gst_hal_get_audio_sink (sink->udi))) {
- GST_ELEMENT_ERROR (sink, LIBRARY, SETTINGS, (NULL),
- ("Failed to render audio sink from Hal"));
- return FALSE;
- }
- gst_element_set_state (sink->kid, GST_STATE (sink));
- gst_bin_add (GST_BIN (sink), sink->kid);
-
- /* re-attach ghostpad */
- GST_DEBUG_OBJECT (sink, "Creating new ghostpad");
- targetpad = gst_element_get_static_pad (sink->kid, "sink");
- gst_ghost_pad_set_target (GST_GHOST_PAD (sink->pad), targetpad);
- gst_object_unref (targetpad);
- GST_DEBUG_OBJECT (sink, "done changing hal audio sink");
-
- return TRUE;
-}
-
-static void
-gst_hal_audio_sink_set_property (GObject * object, guint prop_id,
- const GValue * value, GParamSpec * pspec)
-{
- GstHalAudioSink *this = GST_HAL_AUDIO_SINK (object);
-
- GST_OBJECT_LOCK (this);
-
- switch (prop_id) {
- case PROP_UDI:
- if (this->udi)
- g_free (this->udi);
- this->udi = g_value_dup_string (value);
- break;
- default:
- G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
- break;
- }
-
- GST_OBJECT_UNLOCK (this);
-}
-
-static void
-gst_hal_audio_sink_get_property (GObject * object, guint prop_id,
- GValue * value, GParamSpec * pspec)
-{
- GstHalAudioSink *this = GST_HAL_AUDIO_SINK (object);
-
- GST_OBJECT_LOCK (this);
-
- switch (prop_id) {
- case PROP_UDI:
- g_value_set_string (value, this->udi);
- break;
- default:
- G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
- break;
- }
-
- GST_OBJECT_UNLOCK (this);
-}
-
-static GstStateChangeReturn
-gst_hal_audio_sink_change_state (GstElement * element,
- GstStateChange transition)
-{
- GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
- GstHalAudioSink *sink = GST_HAL_AUDIO_SINK (element);
-
- switch (transition) {
- case GST_STATE_CHANGE_NULL_TO_READY:
- if (!do_toggle_element (sink))
- return GST_STATE_CHANGE_FAILURE;
- break;
- default:
- break;
- }
-
- ret = GST_CALL_PARENT_WITH_DEFAULT (GST_ELEMENT_CLASS, change_state,
- (element, transition), GST_STATE_CHANGE_SUCCESS);
-
- switch (transition) {
- case GST_STATE_CHANGE_READY_TO_NULL:
- gst_hal_audio_sink_reset (sink);
- break;
- default:
- break;
- }
-
- return ret;
-}
+++ /dev/null
-/* GStreamer
- * (c) 2005 Ronald S. Bultje <rbultje@ronald.bitfreak.net>
- * (c) 2006 Jürg Billeter <j@bitron.ch>
- *
- * This library is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Library General Public
- * License as published by the Free Software Foundation; either
- * version 2 of the License, or (at your option) any later version.
- *
- * This library is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Library General Public License for more details.
- *
- * You should have received a copy of the GNU Library General Public
- * License along with this library; if not, write to the
- * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
- * Boston, MA 02111-1307, USA.
- */
-
-#ifndef __GST_HAL_AUDIO_SINK_H__
-#define __GST_HAL_AUDIO_SINK_H__
-
-#include <gst/gst.h>
-
-G_BEGIN_DECLS
-
-#define GST_TYPE_HAL_AUDIO_SINK \
- (gst_hal_audio_sink_get_type ())
-#define GST_HAL_AUDIO_SINK(obj) \
- (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_HAL_AUDIO_SINK, \
- GstHalAudioSink))
-#define GST_HAL_AUDIO_SINK_CLASS(klass) \
- (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_HAL_AUDIO_SINK, \
- GstHalAudioSinkClass))
-#define GST_IS_HAL_AUDIO_SINK(obj) \
- (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_HAL_AUDIO_SINK))
-#define GST_IS_HAL_AUDIO_SINK_CLASS(klass) \
- (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_HAL_AUDIO_SINK))
-
-typedef struct _GstHalAudioSink {
- GstBin parent;
-
- /* explicit pointers to stuff used */
- gchar *udi;
- GstElement *kid;
- GstPad *pad;
-} GstHalAudioSink;
-
-typedef struct _GstHalAudioSinkClass {
- GstBinClass parent_class;
-} GstHalAudioSinkClass;
-
-GType gst_hal_audio_sink_get_type (void);
-
-G_END_DECLS
-
-#endif /* __GST_HAL_AUDIO_SINK_H__ */
+++ /dev/null
-/* GStreamer
- * (c) 2005 Ronald S. Bultje <rbultje@ronald.bitfreak.net>
- * (c) 2005 Tim-Philipp Müller <tim centricular net>
- * (c) 2006 Jürg Billeter <j@bitron.ch>
- *
- * This library is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Library General Public
- * License as published by the Free Software Foundation; either
- * version 2 of the License, or (at your option) any later version.
- *
- * This library is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Library General Public License for more details.
- *
- * You should have received a copy of the GNU Library General Public
- * License along with this library; if not, write to the
- * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
- * Boston, MA 02111-1307, USA.
- */
-
-/**
- * SECTION:element-halaudiosrc
- *
- * HalAudioSrc allows access to input of sound devices by specifying the
- * corresponding persistent Unique Device Id (UDI) from the Hardware Abstraction
- * Layer (HAL) in the #GstHalAudioSrc:udi property.
- * It currently always embeds alsasrc or osssrc as HAL doesn't support other
- * sound systems yet. You can also specify the UDI of a device that has ALSA or
- * OSS subdevices. If both are present ALSA is preferred.
- *
- * <refsect2>
- * <title>Examples</title>
- * |[
- * hal-find-by-property --key alsa.type --string capture
- * ]| list the UDIs of all your ALSA input devices
- * |[
- * gst-launch -v halaudiosrc udi=/org/freedesktop/Hal/devices/pci_8086_27d8_alsa_capture_0 ! autoaudiosink
- * ]| You should now hear yourself with a small delay if you have a microphone
- * connected to the specified sound device.
- * </refsect2>
- */
-
-#ifdef HAVE_CONFIG_H
-#include "config.h"
-#endif
-
-#include "gsthalelements.h"
-#include "gsthalaudiosrc.h"
-
-static void gst_hal_audio_src_dispose (GObject * object);
-static GstStateChangeReturn
-gst_hal_audio_src_change_state (GstElement * element,
- GstStateChange transition);
-
-enum
-{
- PROP_0,
- PROP_UDI
-};
-
-GST_BOILERPLATE (GstHalAudioSrc, gst_hal_audio_src, GstBin, GST_TYPE_BIN);
-
-static void gst_hal_audio_src_set_property (GObject * object, guint prop_id,
- const GValue * value, GParamSpec * pspec);
-static void gst_hal_audio_src_get_property (GObject * object, guint prop_id,
- GValue * value, GParamSpec * pspec);
-
-static void
-gst_hal_audio_src_base_init (gpointer klass)
-{
- GstElementClass *eklass = GST_ELEMENT_CLASS (klass);
-
- static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
- GST_PAD_SRC,
- GST_PAD_ALWAYS,
- GST_STATIC_CAPS_ANY);
-
- gst_element_class_add_static_pad_template (eklass, &src_template);
- gst_element_class_set_details_simple (eklass, "HAL audio source",
- "Source/Audio",
- "Audio source for sound device access via HAL",
- "Jürg Billeter <j@bitron.ch>");
-}
-
-static void
-gst_hal_audio_src_class_init (GstHalAudioSrcClass * klass)
-{
- GObjectClass *oklass = G_OBJECT_CLASS (klass);
- GstElementClass *eklass = GST_ELEMENT_CLASS (klass);
-
- oklass->set_property = gst_hal_audio_src_set_property;
- oklass->get_property = gst_hal_audio_src_get_property;
- oklass->dispose = gst_hal_audio_src_dispose;
- eklass->change_state = gst_hal_audio_src_change_state;
-
- g_object_class_install_property (oklass, PROP_UDI,
- g_param_spec_string ("udi",
- "UDI", "Unique Device Id", NULL,
- G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
-}
-
-/*
- * Hack to make negotiation work.
- */
-
-static void
-gst_hal_audio_src_reset (GstHalAudioSrc * src)
-{
- GstPad *targetpad;
-
- /* fakesrc */
- if (src->kid) {
- gst_element_set_state (src->kid, GST_STATE_NULL);
- gst_bin_remove (GST_BIN (src), src->kid);
- }
- src->kid = gst_element_factory_make ("fakesrc", "testsrc");
- gst_bin_add (GST_BIN (src), src->kid);
-
- targetpad = gst_element_get_static_pad (src->kid, "src");
- gst_ghost_pad_set_target (GST_GHOST_PAD (src->pad), targetpad);
- gst_object_unref (targetpad);
-}
-
-static void
-gst_hal_audio_src_init (GstHalAudioSrc * src, GstHalAudioSrcClass * g_class)
-{
- src->pad = gst_ghost_pad_new_no_target ("src", GST_PAD_SRC);
- gst_element_add_pad (GST_ELEMENT (src), src->pad);
-
- gst_hal_audio_src_reset (src);
-}
-
-static void
-gst_hal_audio_src_dispose (GObject * object)
-{
- GstHalAudioSrc *src = GST_HAL_AUDIO_SRC (object);
-
- if (src->udi) {
- g_free (src->udi);
- src->udi = NULL;
- }
-
- GST_CALL_PARENT (G_OBJECT_CLASS, dispose, (object));
-}
-
-static gboolean
-do_toggle_element (GstHalAudioSrc * src)
-{
- GstPad *targetpad;
-
- /* kill old element */
- if (src->kid) {
- GST_DEBUG_OBJECT (src, "Removing old kid");
- gst_element_set_state (src->kid, GST_STATE_NULL);
- gst_bin_remove (GST_BIN (src), src->kid);
- src->kid = NULL;
- }
-
- GST_DEBUG_OBJECT (src, "Creating new kid");
- if (!src->udi)
- GST_INFO_OBJECT (src, "No UDI set for device, using default one");
-
- if (!(src->kid = gst_hal_get_audio_src (src->udi))) {
- GST_ELEMENT_ERROR (src, LIBRARY, SETTINGS, (NULL),
- ("Failed to render audio source from Hal"));
- return FALSE;
- }
- gst_element_set_state (src->kid, GST_STATE (src));
- gst_bin_add (GST_BIN (src), src->kid);
-
- /* re-attach ghostpad */
- GST_DEBUG_OBJECT (src, "Creating new ghostpad");
- targetpad = gst_element_get_static_pad (src->kid, "src");
- gst_ghost_pad_set_target (GST_GHOST_PAD (src->pad), targetpad);
- gst_object_unref (targetpad);
- GST_DEBUG_OBJECT (src, "done changing hal audio source");
-
- return TRUE;
-}
-
-static void
-gst_hal_audio_src_set_property (GObject * object, guint prop_id,
- const GValue * value, GParamSpec * pspec)
-{
- GstHalAudioSrc *this = GST_HAL_AUDIO_SRC (object);
-
- GST_OBJECT_LOCK (this);
-
- switch (prop_id) {
- case PROP_UDI:
- if (this->udi)
- g_free (this->udi);
- this->udi = g_value_dup_string (value);
- break;
- default:
- G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
- break;
- }
-
- GST_OBJECT_UNLOCK (this);
-}
-
-static void
-gst_hal_audio_src_get_property (GObject * object, guint prop_id,
- GValue * value, GParamSpec * pspec)
-{
- GstHalAudioSrc *this = GST_HAL_AUDIO_SRC (object);
-
- GST_OBJECT_LOCK (this);
-
- switch (prop_id) {
- case PROP_UDI:
- g_value_set_string (value, this->udi);
- break;
- default:
- G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
- break;
- }
-
- GST_OBJECT_UNLOCK (this);
-}
-
-static GstStateChangeReturn
-gst_hal_audio_src_change_state (GstElement * element, GstStateChange transition)
-{
- GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
- GstHalAudioSrc *src = GST_HAL_AUDIO_SRC (element);
-
- switch (transition) {
- case GST_STATE_CHANGE_NULL_TO_READY:
- if (!do_toggle_element (src))
- return GST_STATE_CHANGE_FAILURE;
- break;
- default:
- break;
- }
-
- ret = GST_CALL_PARENT_WITH_DEFAULT (GST_ELEMENT_CLASS, change_state,
- (element, transition), GST_STATE_CHANGE_SUCCESS);
-
- switch (transition) {
- case GST_STATE_CHANGE_READY_TO_NULL:
- gst_hal_audio_src_reset (src);
- break;
- default:
- break;
- }
-
- return ret;
-}
+++ /dev/null
-/* GStreamer
- * (c) 2005 Ronald S. Bultje <rbultje@ronald.bitfreak.net>
- * (c) 2005 Tim-Philipp Müller <tim centricular net>
- * (c) 2006 Jürg Billeter <j@bitron.ch>
- *
- * This library is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Library General Public
- * License as published by the Free Software Foundation; either
- * version 2 of the License, or (at your option) any later version.
- *
- * This library is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Library General Public License for more details.
- *
- * You should have received a copy of the GNU Library General Public
- * License along with this library; if not, write to the
- * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
- * Boston, MA 02111-1307, USA.
- */
-
-#ifndef __GST_HAL_AUDIO_SRC_H__
-#define __GST_HAL_AUDIO_SRC_H__
-
-#include <gst/gst.h>
-
-G_BEGIN_DECLS
-
-#define GST_TYPE_HAL_AUDIO_SRC (gst_hal_audio_src_get_type ())
-#define GST_HAL_AUDIO_SRC(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_HAL_AUDIO_SRC, GstHalAudioSrc))
-#define GST_HAL_AUDIO_SRC_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_HAL_AUDIO_SRC, GstHalAudioSrcClass))
-#define GST_IS_HAL_AUDIO_SRC(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_HAL_AUDIO_SRC))
-#define GST_IS_HAL_AUDIO_SRC_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_HAL_AUDIO_SRC))
-
-typedef struct _GstHalAudioSrc {
- GstBin parent;
-
- /* explicit pointers to stuff used */
- gchar *udi;
- GstElement *kid;
- GstPad *pad;
-} GstHalAudioSrc;
-
-typedef struct _GstHalAudioSrcClass {
- GstBinClass parent_class;
-} GstHalAudioSrcClass;
-
-GType gst_hal_audio_src_get_type (void);
-
-G_END_DECLS
-
-#endif /* __GST_HAL_AUDIO_SRC_H__ */
+++ /dev/null
-/* GStreamer
- * (c) 2005 Ronald S. Bultje <rbultje@ronald.bitfreak.net>
- * (c) 2006 Jürg Billeter <j@bitron.ch>
- *
- * This library is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Library General Public
- * License as published by the Free Software Foundation; either
- * version 2 of the License, or (at your option) any later version.
- *
- * This library is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Library General Public License for more details.
- *
- * You should have received a copy of the GNU Library General Public
- * License along with this library; if not, write to the
- * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
- * Boston, MA 02111-1307, USA.
- */
-
-#ifdef HAVE_CONFIG_H
-#include "config.h"
-#endif
-
-#include <gst/gst.h>
-
-#include "gsthalelements.h"
-
-#include "gsthalaudiosink.h"
-#include "gsthalaudiosrc.h"
-
-GST_DEBUG_CATEGORY (hal_debug);
-
-static gboolean
-plugin_init (GstPlugin * plugin)
-{
- GST_DEBUG_CATEGORY_INIT (hal_debug, "hal", 0,
- "HAL/GStreamer audio input/output wrapper elements");
-
- if (!gst_element_register (plugin, "halaudiosink",
- GST_RANK_NONE, GST_TYPE_HAL_AUDIO_SINK) ||
- !gst_element_register (plugin, "halaudiosrc",
- GST_RANK_NONE, GST_TYPE_HAL_AUDIO_SRC)) {
- return FALSE;
- }
-
- return TRUE;
-}
-
-GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
- GST_VERSION_MINOR,
- "halelements",
- "elements wrapping the GStreamer/HAL audio input/output devices",
- plugin_init, VERSION, GST_LICENSE, GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)
+++ /dev/null
-/* GStreamer
- * (c) 2005 Ronald S. Bultje <rbultje@ronald.bitfreak.net>
- * (c) 2006 Ronald S. Bultje <rbultje@ronald.bitfreak.net>
- *
- * This library is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Library General Public
- * License as published by the Free Software Foundation; either
- * version 2 of the License, or (at your option) any later version.
- *
- * This library is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Library General Public License for more details.
- *
- * You should have received a copy of the GNU Library General Public
- * License along with this library; if not, write to the
- * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
- * Boston, MA 02111-1307, USA.
- */
-
-#ifndef __GST_HAL_ELEMENTS_H__
-#define __GST_HAL_ELEMENTS_H__
-
-#include <hal.h>
-
-GST_DEBUG_CATEGORY_EXTERN (hal_debug);
-#define GST_CAT_DEFAULT hal_debug
-
-#endif /* __GST_HAL_ELEMENTS_H__ */
+++ /dev/null
-/* GStreamer
- * Copyright (C) <2002> Thomas Vander Stichele <thomas@apestaart.org>
- * Copyright (C) <2006> Jürg Billeter <j@bitron.ch>
- * Copyright (C) <2007> Sebastian Dröge <slomo@circular-chaos.org>
- *
- * This library is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Library General Public
- * License as published by the Free Software Foundation; either
- * version 2 of the License, or (at your option) any later version.
- *
- * This library is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Library General Public License for more details.
- *
- * You should have received a copy of the GNU Library General Public
- * License along with this library; if not, write to the
- * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
- * Boston, MA 02111-1307, USA.
- */
-
-/*
- * this library handles interaction with Hal
- */
-
-#ifdef HAVE_CONFIG_H
-#include "config.h"
-#endif
-
-#include <string.h>
-#include <glib.h>
-#include "hal.h"
-
-GST_DEBUG_CATEGORY_EXTERN (hal_debug);
-
-#define GST_CAT_DEFAULT hal_debug
-
-/* compat for older libhal */
-#ifndef LIBHAL_FREE_DBUS_ERROR
-#define LIBHAL_FREE_DBUS_ERROR(e) dbus_error_free (e)
-#endif
-
-/*
- * gst_hal_get_alsa_element:
- * @ctx: a #LibHalContext which should be used for querying HAL.
- * @udi: a #gchar corresponding to the UDI you want to get.
- * @device_type: a #GstHalDeviceType specifying the wanted device type.
- *
- * Get Hal UDI @udi's string value.
- *
- * Returns: a newly allocated #gchar string containing the appropriate pipeline
- * for UDI @udi, or NULL in the case of an error..
- */
-static gchar *
-gst_hal_get_alsa_element (LibHalContext * ctx, const gchar * udi,
- GstHalDeviceType device_type)
-{
- char *type, *string = NULL;
- const char *element = NULL;
- DBusError error;
-
- dbus_error_init (&error);
-
- if (!libhal_device_query_capability (ctx, udi, "alsa", &error)) {
- if (dbus_error_is_set (&error)) {
- GST_DEBUG ("Failed querying %s for alsa capability: %s: %s",
- udi, error.name, error.message);
- LIBHAL_FREE_DBUS_ERROR (&error);
- } else {
- GST_DEBUG ("UDI %s has no alsa capability", udi);
- }
- return NULL;
- }
-
- type = libhal_device_get_property_string (ctx, udi, "alsa.type", &error);
-
- if (dbus_error_is_set (&error)) {
- GST_DEBUG ("UDI %s has alsa capabilities but no alsa.type property: %s, %s",
- udi, error.name, error.message);
- LIBHAL_FREE_DBUS_ERROR (&error);
- return NULL;
- } else if (!type) {
- GST_DEBUG ("UDI %s has empty alsa.type property", udi);
- return NULL;
- }
-
- if (strcmp (type, "playback") == 0 && device_type == GST_HAL_AUDIOSINK)
- element = "alsasink";
- else if (strcmp (type, "capture") == 0 && device_type == GST_HAL_AUDIOSRC)
- element = "alsasrc";
-
- libhal_free_string (type);
-
- if (element) {
- int card, device;
-
- card = libhal_device_get_property_int (ctx, udi, "alsa.card", &error);
- if (dbus_error_is_set (&error)) {
- GST_DEBUG ("UDI %s has no alsa.card property: %s: %s", udi, error.name,
- error.message);
- LIBHAL_FREE_DBUS_ERROR (&error);
- return NULL;
- } else if (card == -1) {
- GST_DEBUG ("UDI %s has no alsa.card property", udi);
- return NULL;
- }
-
- device = libhal_device_get_property_int (ctx, udi, "alsa.device", &error);
- if (dbus_error_is_set (&error)) {
- GST_DEBUG ("UDI %s has no alsa.device property: %s: %s", udi, error.name,
- error.message);
- LIBHAL_FREE_DBUS_ERROR (&error);
- return NULL;
- } else if (device == -1) {
- GST_DEBUG ("UDI %s has no alsa.device property", udi);
- return NULL;
- }
-
- /* This is a bit dodgy, since it makes lots of assumptions about the way
- * alsa is set up. In any case, only munge the device string for playback */
- if (strcmp (element, "alsasink") == 0 && device == 0) {
- /* handle default device specially to use
- * dmix, dsnoop, and softvol if appropriate */
- string = g_strdup_printf ("%s device=default:%d", element, card);
- } else {
- string =
- g_strdup_printf ("%s device=plughw:%d,%d", element, card, device);
- }
- }
-
- return string;
-}
-
-/*
- * gst_hal_get_oss_element:
- * @ctx: a #LibHalContext which should be used for querying HAL.
- * @udi: a #gchar corresponding to the UDI you want to get.
- * @device_type: a #GstHalDeviceType specifying the wanted device type.
- *
- * Get Hal UDI @udi's string value.
- *
- * Returns: a newly allocated #gchar string containing the appropriate pipeline
- * for UDI @udi, or NULL in the case of an error..
- */
-static gchar *
-gst_hal_get_oss_element (LibHalContext * ctx, const gchar * udi,
- GstHalDeviceType device_type)
-{
- char *type, *string = NULL;
- const char *element = NULL;
- DBusError error;
-
- dbus_error_init (&error);
-
- if (!libhal_device_query_capability (ctx, udi, "oss", &error)) {
- if (dbus_error_is_set (&error)) {
- GST_DEBUG ("Failed querying %s for oss capability: %s: %s", udi,
- error.name, error.message);
- LIBHAL_FREE_DBUS_ERROR (&error);
- } else {
- GST_DEBUG ("UDI %s has no oss capability", udi);
- }
- return NULL;
- }
-
- type = libhal_device_get_property_string (ctx, udi, "oss.type", &error);
- if (dbus_error_is_set (&error)) {
- GST_DEBUG ("UDI %s has oss capabilities but no oss.type property: %s, %s",
- udi, error.name, error.message);
- LIBHAL_FREE_DBUS_ERROR (&error);
- return NULL;
- } else if (!type) {
- GST_DEBUG ("UDI %s has empty oss.type property", udi);
- return NULL;
- }
-
- if (strcmp (type, "pcm") == 0) {
- if (device_type == GST_HAL_AUDIOSINK)
- element = "osssink";
- else if (device_type == GST_HAL_AUDIOSRC)
- element = "osssrc";
- }
- libhal_free_string (type);
-
- if (element) {
- char *device = NULL;
-
- device =
- libhal_device_get_property_string (ctx, udi, "oss.device_file", &error);
- if (dbus_error_is_set (&error)) {
- GST_DEBUG
- ("UDI %s has oss capabilities but no oss.device_file property: %s, %s",
- udi, error.name, error.message);
- LIBHAL_FREE_DBUS_ERROR (&error);
- return NULL;
- } else if (!device) {
- GST_DEBUG ("UDI %s has empty oss.device_file property", udi);
- return NULL;
- }
-
- string = g_strdup_printf ("%s device=%s", element, device);
- libhal_free_string (device);
- }
-
- return string;
-}
-
-/*
- * gst_hal_get_string:
- * @udi: a #gchar corresponding to the UDI you want to get.
- * @device_type: a #GstHalDeviceType specifying the wanted device type.
- *
- * Get Hal UDI @udi's string value.
- *
- * Returns: a newly allocated #gchar string containing the appropriate pipeline
- * for UDI @udi, or NULL in the case of an error..
- */
-static gchar *
-gst_hal_get_string (const gchar * udi, GstHalDeviceType device_type)
-{
- DBusError error;
- LibHalContext *ctx;
- char *string = NULL;
-
- /* Don't query HAL for NULL UDIs. Passing NULL as UDI to HAL gives
- * an assertion failure in D-Bus when running with
- * DBUS_FATAL_WARNINGS=1. */
- if (!udi)
- return NULL;
-
- dbus_error_init (&error);
-
- ctx = libhal_ctx_new ();
- /* Should only happen on OOM */
- g_return_val_if_fail (ctx != NULL, NULL);
-
- if (!libhal_ctx_set_dbus_connection (ctx, dbus_bus_get (DBUS_BUS_SYSTEM,
- &error))) {
- GST_DEBUG ("Unable to set DBus connection: %s: %s", error.name,
- error.message);
- LIBHAL_FREE_DBUS_ERROR (&error);
- goto ctx_free;
- }
-
- if (!libhal_ctx_init (ctx, &error)) {
- GST_DEBUG ("Unable to set init HAL context: %s: %s", error.name,
- error.message);
- LIBHAL_FREE_DBUS_ERROR (&error);
- goto ctx_free;
- }
-
- /* Now first check if UDI is an alsa device, then oss and then
- * check the childs of the given device. If there are alsa and oss
- * children the first alsa one is used. */
-
- string = gst_hal_get_alsa_element (ctx, udi, device_type);
-
- if (!string)
- string = gst_hal_get_oss_element (ctx, udi, device_type);
-
- if (!string) {
- int num_childs;
- char **childs = NULL;
-
- /* now try if one of the direct subdevices supports ALSA or OSS */
- childs =
- libhal_manager_find_device_string_match (ctx, "info.parent", udi,
- &num_childs, &error);
- if (dbus_error_is_set (&error)) {
- GST_DEBUG ("Unable to retrieve childs of %s: %s: %s", udi, error.name,
- error.message);
- LIBHAL_FREE_DBUS_ERROR (&error);
- goto ctx_shutdown;
- }
-
- if (childs && num_childs > 0) {
- int i;
- char *alsa_string = NULL, *oss_string = NULL;
-
- for (i = 0; i < num_childs && !alsa_string; i++) {
- alsa_string = gst_hal_get_alsa_element (ctx, childs[i], device_type);
-
- if (!oss_string)
- oss_string = gst_hal_get_oss_element (ctx, childs[i], device_type);
- }
-
- if (alsa_string) {
- string = alsa_string;
- g_free (oss_string);
- } else if (oss_string) {
- string = oss_string;
- }
- }
- libhal_free_string_array (childs);
- }
-
-ctx_shutdown:
- if (!libhal_ctx_shutdown (ctx, &error)) {
- GST_DEBUG ("Closing connection to HAL failed: %s: %s", error.name,
- error.message);
- LIBHAL_FREE_DBUS_ERROR (&error);
- }
-
-ctx_free:
- libhal_ctx_free (ctx);
-
- if (string == NULL) {
- GST_WARNING ("Problem finding a HAL audio device for udi %s", udi);
- } else {
- GST_INFO ("Using %s", string);
- }
-
- return string;
-}
-
-/* external functions */
-
-/**
- * gst_hal_render_bin_from_udi:
- * @udi: a #gchar string corresponding to a Hal UDI.
- *
- * Render bin from Hal UDI @udi.
- *
- * Returns: a #GstElement containing the rendered bin.
- */
-GstElement *
-gst_hal_render_bin_from_udi (const gchar * udi, GstHalDeviceType type)
-{
- GstElement *bin = NULL;
- gchar *value;
-
- value = gst_hal_get_string (udi, type);
- if (value)
- bin = gst_parse_bin_from_description (value, TRUE, NULL);
- g_free (value);
- return bin;
-}
-
-/**
- * gst_hal_get_audio_sink:
- * @udi: a #gchar string corresponding to a Hal UDI.
- *
- * Render audio output bin from GStreamer Hal UDI.
- * If no device with the specified UDI exists or @udi is NULL,
- * the default audio sink for the platform is used
- * (typically alsasink, osssink or sunaudiosink).
- *
- * Returns: a #GstElement containing the audio output bin, or NULL if
- * everything failed.
- */
-GstElement *
-gst_hal_get_audio_sink (const gchar * udi)
-{
- GstElement *ret = NULL;
-
- if (udi)
- ret = gst_hal_render_bin_from_udi (udi, GST_HAL_AUDIOSINK);
-
- if (!ret) {
- ret = gst_element_factory_make (DEFAULT_AUDIOSINK, NULL);
-
- if (!ret)
- GST_ERROR ("Hal audio sink and %s don't work", DEFAULT_AUDIOSINK);
- }
-
- return ret;
-}
-
-/**
- * gst_hal_get_audio_src:
- * @udi: a #gchar string corresponding to a Hal UDI.
- *
- * Render audio acquisition bin from GStreamer Hal UDI.
- * If no device with the specified UDI exists or @udi is NULL,
- * the default audio source for the plaform is used
- * (typically alsasrc, osssrc or sunaudiosrc).
- *
- * Returns: a #GstElement containing the audio source bin, or NULL if
- * everything failed.
- */
-GstElement *
-gst_hal_get_audio_src (const gchar * udi)
-{
- GstElement *ret = NULL;
-
- if (udi)
- ret = gst_hal_render_bin_from_udi (udi, GST_HAL_AUDIOSRC);
-
- if (!ret) {
- ret = gst_element_factory_make (DEFAULT_AUDIOSRC, NULL);
-
- if (!ret)
- GST_ERROR ("Hal audio src and %s don't work", DEFAULT_AUDIOSRC);
- }
-
- return ret;
-}
+++ /dev/null
-/* GStreamer
- * Copyright (C) <2002> Thomas Vander Stichele <thomas@apestaart.org>
- * Copyright (C) <2006> Jürg Billeter <j@bitron.ch>
- *
- * This library is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Library General Public
- * License as published by the Free Software Foundation; either
- * version 2 of the License, or (at your option) any later version.
- *
- * This library is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Library General Public License for more details.
- *
- * You should have received a copy of the GNU Library General Public
- * License along with this library; if not, write to the
- * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
- * Boston, MA 02111-1307, USA.
- */
-
-#ifndef GST_HAL_H
-#define GST_HAL_H
-
-/*
- * this library handles interaction with Hal
- */
-
-#include <gst/gst.h>
-#include <dbus/dbus.h>
-#include <libhal.h>
-
-G_BEGIN_DECLS
-
-typedef enum
-{
- GST_HAL_AUDIOSINK,
- GST_HAL_AUDIOSRC
-} GstHalDeviceType;
-
-GstElement *gst_hal_render_bin_from_udi (const gchar * udi,
- GstHalDeviceType type);
-
-GstElement *gst_hal_get_audio_sink (const gchar * udi);
-GstElement *gst_hal_get_audio_src (const gchar * udi);
-
-G_END_DECLS
-
-#endif /* GST_HAL_H */
#define GST_TYPE_JACK_TRANSPORT (gst_jack_transport_get_type ())
#define GST_TYPE_JACK_CLIENT (gst_jack_client_get_type ())
+#if G_BYTE_ORDER == G_LITTLE_ENDIAN
+#define GST_JACK_FORMAT_STR "F32LE"
+#else
+#define GST_JACK_FORMAT_STR "F32BE"
+#endif
+
GType gst_jack_client_get_type(void);
GType gst_jack_connect_get_type(void);
GType gst_jack_transport_get_type(void);
typedef struct
{
gint refcount;
- GMutex *lock;
- GCond *flush_cond;
+ GMutex lock;
+ GCond flush_cond;
/* id/server pair and the connection */
gchar *id;
g_list_length (conn->src_clients), g_list_length (conn->sink_clients));
}
- g_mutex_lock (conn->lock);
+ g_mutex_lock (&conn->lock);
/* call sources first, then sinks. Sources will either push data into the
* ringbuffer of the sinks, which will then pull the data out of it, or
* sinks will pull the data from the sources. */
res = client->process (nframes, client->user_data);
if (client->deactivate) {
client->deactivate = FALSE;
- g_cond_signal (conn->flush_cond);
+ g_cond_signal (&conn->flush_cond);
}
}
}
res = client->process (nframes, client->user_data);
if (client->deactivate) {
client->deactivate = FALSE;
- g_cond_signal (conn->flush_cond);
+ g_cond_signal (&conn->flush_cond);
}
}
}
}
}
}
- g_mutex_unlock (conn->lock);
+ g_mutex_unlock (&conn->lock);
+
return res;
}
GST_DEBUG ("disconnect client %s from server %s", conn->id,
GST_STR_NULL (conn->server));
- g_mutex_lock (conn->lock);
+ g_mutex_lock (&conn->lock);
for (walk = conn->src_clients; walk; walk = g_list_next (walk)) {
GstJackAudioClient *client = (GstJackAudioClient *) walk->data;
if (client->shutdown)
client->shutdown (client->user_data);
}
- g_mutex_unlock (conn->lock);
+ g_mutex_unlock (&conn->lock);
}
typedef struct
/* now create object */
conn = g_new (GstJackAudioConnection, 1);
conn->refcount = 1;
- conn->lock = g_mutex_new ();
- conn->flush_cond = g_cond_new ();
+ g_mutex_init (&conn->lock);
+ g_cond_init (&conn->flush_cond);
conn->id = g_strdup (id);
conn->server = g_strdup (server);
conn->client = jclient;
{
GST_ERROR ("Could not activate client (%d)", res);
*status = JackFailure;
- g_mutex_free (conn->lock);
+ g_mutex_clear (&conn->lock);
g_free (conn->id);
g_free (conn->server);
g_free (conn);
}
/* free resources */
- g_mutex_free (conn->lock);
- g_cond_free (conn->flush_cond);
+ g_mutex_clear (&conn->lock);
+ g_cond_clear (&conn->flush_cond);
g_free (conn->id);
g_free (conn->server);
g_free (conn);
gst_jack_audio_connection_add_client (GstJackAudioConnection * conn,
GstJackAudioClient * client)
{
- g_mutex_lock (conn->lock);
+ g_mutex_lock (&conn->lock);
switch (client->type) {
case GST_JACK_CLIENT_SOURCE:
conn->src_clients = g_list_append (conn->src_clients, client);
g_warning ("trying to add unknown client type");
break;
}
- g_mutex_unlock (conn->lock);
+ g_mutex_unlock (&conn->lock);
}
static void
gst_jack_audio_connection_remove_client (GstJackAudioConnection * conn,
GstJackAudioClient * client)
{
- g_mutex_lock (conn->lock);
+ g_mutex_lock (&conn->lock);
switch (client->type) {
case GST_JACK_CLIENT_SOURCE:
conn->src_clients = g_list_remove (conn->src_clients, client);
g_warning ("trying to remove unknown client type");
break;
}
- g_mutex_unlock (conn->lock);
+ g_mutex_unlock (&conn->lock);
}
/**
g_return_val_if_fail (client != NULL, -1);
/* make sure that we are not dispatching the client */
- g_mutex_lock (client->conn->lock);
+ g_mutex_lock (&client->conn->lock);
if (client->active && !active) {
/* we need to process once more to flush the port */
client->deactivate = TRUE;
/* need to wait for process_cb run once more */
while (client->deactivate)
- g_cond_wait (client->conn->flush_cond, client->conn->lock);
+ g_cond_wait (&client->conn->flush_cond, &client->conn->lock);
}
client->active = active;
- g_mutex_unlock (client->conn->lock);
+ g_mutex_unlock (&client->conn->lock);
return 0;
}
/**
* SECTION:element-jackaudiosink
- * @see_also: #GstBaseAudioSink, #GstRingBuffer
+ * @see_also: #GstAudioBaseSink, #GstAudioRingBuffer
*
* A Sink that outputs data to Jack ports.
*
(GInstanceInitFunc) gst_jack_ring_buffer_init,
NULL
};
- GType tmp = g_type_register_static (GST_TYPE_RING_BUFFER,
+ GType tmp = g_type_register_static (GST_TYPE_AUDIO_RING_BUFFER,
"GstJackAudioSinkRingBuffer", &ringbuffer_info, 0);
g_once_init_leave (&ringbuffer_type, tmp);
}
static void
gst_jack_ring_buffer_class_init (GstJackRingBufferClass * klass)
{
- GstRingBufferClass *gstringbuffer_class;
+ GstAudioRingBufferClass *gstringbuffer_class;
- gstringbuffer_class = (GstRingBufferClass *) klass;
+ gstringbuffer_class = (GstAudioRingBufferClass *) klass;
ring_parent_class = g_type_class_peek_parent (klass);
jack_process_cb (jack_nframes_t nframes, void *arg)
{
GstJackAudioSink *sink;
- GstRingBuffer *buf;
+ GstAudioRingBuffer *buf;
gint readseg, len;
guint8 *readptr;
gint i, j, flen, channels;
sample_t *data;
- buf = GST_RING_BUFFER_CAST (arg);
+ buf = GST_AUDIO_RING_BUFFER_CAST (arg);
sink = GST_JACK_AUDIO_SINK (GST_OBJECT_PARENT (buf));
- channels = buf->spec.channels;
+ channels = GST_AUDIO_INFO_CHANNELS (&buf->spec.info);
/* get target buffers */
for (i = 0; i < channels; i++) {
(sample_t *) jack_port_get_buffer (sink->ports[i], nframes);
}
- if (gst_ring_buffer_prepare_read (buf, &readseg, &readptr, &len)) {
+ if (gst_audio_ring_buffer_prepare_read (buf, &readseg, &readptr, &len)) {
flen = len / channels;
/* the number of samples must be exactly the segment size */
}
/* clear written samples in the ringbuffer */
- gst_ring_buffer_clear (buf, readseg);
+ gst_audio_ring_buffer_clear (buf, readseg);
/* we wrote one segment */
- gst_ring_buffer_advance (buf, 1);
+ gst_audio_ring_buffer_advance (buf, 1);
} else {
GST_DEBUG_OBJECT (sink, "write %d frames silence", nframes);
/* We are not allowed to read from the ringbuffer, write silence to all
/* the _open_device method should make a connection with the server
*/
static gboolean
-gst_jack_ring_buffer_open_device (GstRingBuffer * buf)
+gst_jack_ring_buffer_open_device (GstAudioRingBuffer * buf)
{
GstJackAudioSink *sink;
jack_status_t status = 0;
/* close the connection with the server
*/
static gboolean
-gst_jack_ring_buffer_close_device (GstRingBuffer * buf)
+gst_jack_ring_buffer_close_device (GstAudioRingBuffer * buf)
{
GstJackAudioSink *sink;
* received for some reason, we fail here.
*/
static gboolean
-gst_jack_ring_buffer_acquire (GstRingBuffer * buf, GstRingBufferSpec * spec)
+gst_jack_ring_buffer_acquire (GstAudioRingBuffer * buf,
+ GstAudioRingBufferSpec * spec)
{
GstJackAudioSink *sink;
GstJackRingBuffer *abuf;
const char **ports;
gint sample_rate, buffer_size;
- gint i, channels, res;
+ gint i, rate, bpf, channels, res;
jack_client_t *client;
sink = GST_JACK_AUDIO_SINK (GST_OBJECT_PARENT (buf));
client = gst_jack_audio_client_get_client (sink->client);
+ rate = GST_AUDIO_INFO_RATE (&spec->info);
+
/* sample rate must be that of the server */
sample_rate = jack_get_sample_rate (client);
- if (sample_rate != spec->rate)
+ if (sample_rate != rate)
goto wrong_samplerate;
- channels = spec->channels;
+ channels = GST_AUDIO_INFO_CHANNELS (&spec->info);
+ bpf = GST_AUDIO_INFO_BPF (&spec->info);
if (!gst_jack_audio_sink_allocate_channels (sink, channels))
goto out_of_ports;
* for all channels */
spec->segsize = buffer_size * sizeof (gfloat) * channels;
spec->latency_time = gst_util_uint64_scale (spec->segsize,
- (GST_SECOND / GST_USECOND), spec->rate * spec->bytes_per_sample);
+ (GST_SECOND / GST_USECOND), rate * bpf);
/* segtotal based on buffer-time latency */
spec->segtotal = spec->buffer_time / spec->latency_time;
if (spec->segtotal < 2) {
buffer_size, spec->segsize, spec->segtotal);
/* allocate the ringbuffer memory now */
- buf->data = gst_buffer_new_and_alloc (spec->segtotal * spec->segsize);
- memset (GST_BUFFER_DATA (buf->data), 0, GST_BUFFER_SIZE (buf->data));
+ buf->size = spec->segtotal * spec->segsize;
+ buf->memory = g_malloc0 (buf->size);
if ((res = gst_jack_audio_client_set_active (sink->client, TRUE)))
goto could_not_activate;
abuf->sample_rate = sample_rate;
abuf->buffer_size = buffer_size;
- abuf->channels = spec->channels;
+ abuf->channels = channels;
return TRUE;
{
GST_ELEMENT_ERROR (sink, RESOURCE, SETTINGS, (NULL),
("Wrong samplerate, server is running at %d and we received %d",
- sample_rate, spec->rate));
+ sample_rate, rate));
return FALSE;
}
out_of_ports:
/* function is called with LOCK */
static gboolean
-gst_jack_ring_buffer_release (GstRingBuffer * buf)
+gst_jack_ring_buffer_release (GstAudioRingBuffer * buf)
{
GstJackAudioSink *sink;
GstJackRingBuffer *abuf;
abuf->sample_rate = -1;
/* free the buffer */
- gst_buffer_unref (buf->data);
- buf->data = NULL;
+ g_free (buf->memory);
+ buf->memory = NULL;
return TRUE;
}
static gboolean
-gst_jack_ring_buffer_start (GstRingBuffer * buf)
+gst_jack_ring_buffer_start (GstAudioRingBuffer * buf)
{
GstJackAudioSink *sink;
}
static gboolean
-gst_jack_ring_buffer_pause (GstRingBuffer * buf)
+gst_jack_ring_buffer_pause (GstAudioRingBuffer * buf)
{
GstJackAudioSink *sink;
}
static gboolean
-gst_jack_ring_buffer_stop (GstRingBuffer * buf)
+gst_jack_ring_buffer_stop (GstAudioRingBuffer * buf)
{
GstJackAudioSink *sink;
#if defined (HAVE_JACK_0_120_1) || defined(HAVE_JACK_1_9_7)
static guint
-gst_jack_ring_buffer_delay (GstRingBuffer * buf)
+gst_jack_ring_buffer_delay (GstAudioRingBuffer * buf)
{
GstJackAudioSink *sink;
guint i, res = 0;
}
#else /* !(defined (HAVE_JACK_0_120_1) || defined(HAVE_JACK_1_9_7)) */
static guint
-gst_jack_ring_buffer_delay (GstRingBuffer * buf)
+gst_jack_ring_buffer_delay (GstAudioRingBuffer * buf)
{
GstJackAudioSink *sink;
guint i, res = 0;
GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("audio/x-raw-float, "
- "endianness = (int) BYTE_ORDER, "
- "width = (int) 32, "
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "format = (string) " GST_JACK_FORMAT_STR ", "
+ "layout = (string) interleaved, "
"rate = (int) [ 1, MAX ], " "channels = (int) [ 1, MAX ]")
);
PROP_LAST
};
-#define _do_init(bla) \
- GST_DEBUG_CATEGORY_INIT (gst_jack_audio_sink_debug, "jacksink", 0, "jacksink element");
-
-GST_BOILERPLATE_FULL (GstJackAudioSink, gst_jack_audio_sink, GstBaseAudioSink,
- GST_TYPE_BASE_AUDIO_SINK, _do_init);
+#define gst_jack_audio_sink_parent_class parent_class
+G_DEFINE_TYPE (GstJackAudioSink, gst_jack_audio_sink, GST_TYPE_AUDIO_BASE_SINK);
static void gst_jack_audio_sink_dispose (GObject * object);
static void gst_jack_audio_sink_set_property (GObject * object, guint prop_id,
static void gst_jack_audio_sink_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
-static GstCaps *gst_jack_audio_sink_getcaps (GstBaseSink * bsink);
-static GstRingBuffer *gst_jack_audio_sink_create_ringbuffer (GstBaseAudioSink *
- sink);
-
-static void
-gst_jack_audio_sink_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_set_details_simple (element_class, "Audio Sink (Jack)",
- "Sink/Audio", "Output audio to a JACK server",
- "Wim Taymans <wim.taymans@gmail.com>");
-
- gst_element_class_add_static_pad_template (element_class,
- &jackaudiosink_sink_factory);
-}
+static GstCaps *gst_jack_audio_sink_getcaps (GstBaseSink * bsink,
+ GstCaps * filter);
+static GstAudioRingBuffer
+ * gst_jack_audio_sink_create_ringbuffer (GstAudioBaseSink * sink);
static void
gst_jack_audio_sink_class_init (GstJackAudioSinkClass * klass)
{
GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
GstBaseSinkClass *gstbasesink_class;
- GstBaseAudioSinkClass *gstbaseaudiosink_class;
+ GstAudioBaseSinkClass *gstaudiobasesink_class;
+
+ GST_DEBUG_CATEGORY_INIT (gst_jack_audio_sink_debug, "jacksink", 0,
+ "jacksink element");
gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
gstbasesink_class = (GstBaseSinkClass *) klass;
- gstbaseaudiosink_class = (GstBaseAudioSinkClass *) klass;
+ gstaudiobasesink_class = (GstAudioBaseSinkClass *) klass;
gobject_class->dispose = gst_jack_audio_sink_dispose;
gobject_class->get_property = gst_jack_audio_sink_get_property;
GST_TYPE_JACK_TRANSPORT, DEFAULT_PROP_TRANSPORT,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ gst_element_class_set_details_simple (gstelement_class, "Audio Sink (Jack)",
+ "Sink/Audio", "Output audio to a JACK server",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&jackaudiosink_sink_factory));
+
gstbasesink_class->get_caps = GST_DEBUG_FUNCPTR (gst_jack_audio_sink_getcaps);
- gstbaseaudiosink_class->create_ringbuffer =
+ gstaudiobasesink_class->create_ringbuffer =
GST_DEBUG_FUNCPTR (gst_jack_audio_sink_create_ringbuffer);
/* ref class from a thread-safe context to work around missing bit of
}
static void
-gst_jack_audio_sink_init (GstJackAudioSink * sink,
- GstJackAudioSinkClass * g_class)
+gst_jack_audio_sink_init (GstJackAudioSink * sink)
{
sink->connect = DEFAULT_PROP_CONNECT;
sink->server = g_strdup (DEFAULT_PROP_SERVER);
}
static GstCaps *
-gst_jack_audio_sink_getcaps (GstBaseSink * bsink)
+gst_jack_audio_sink_getcaps (GstBaseSink * bsink, GstCaps * filter)
{
GstJackAudioSink *sink = GST_JACK_AUDIO_SINK (bsink);
const char **ports;
GST_DEBUG_OBJECT (sink, "got %d-%d ports, samplerate: %d", min, max, rate);
if (!sink->caps) {
- sink->caps = gst_caps_new_simple ("audio/x-raw-float",
- "endianness", G_TYPE_INT, G_BYTE_ORDER,
- "width", G_TYPE_INT, 32,
+ sink->caps = gst_caps_new_simple ("audio/x-raw",
+ "format", G_TYPE_STRING, GST_JACK_FORMAT_STR,
+ "layout", G_TYPE_STRING, "interleaved",
"rate", G_TYPE_INT, rate,
"channels", GST_TYPE_INT_RANGE, min, max, NULL);
}
}
}
-static GstRingBuffer *
-gst_jack_audio_sink_create_ringbuffer (GstBaseAudioSink * sink)
+static GstAudioRingBuffer *
+gst_jack_audio_sink_create_ringbuffer (GstAudioBaseSink * sink)
{
- GstRingBuffer *buffer;
+ GstAudioRingBuffer *buffer;
buffer = g_object_new (GST_TYPE_JACK_RING_BUFFER, NULL);
GST_DEBUG_OBJECT (sink, "created ringbuffer @%p", buffer);
#include <jack/jack.h>
#include <gst/gst.h>
-#include <gst/audio/gstbaseaudiosink.h>
+#include <gst/audio/gstaudiobasesink.h>
#include "gstjack.h"
#include "gstjackaudioclient.h"
* Opaque #GstJackAudioSink.
*/
struct _GstJackAudioSink {
- GstBaseAudioSink element;
+ GstAudioBaseSink element;
/*< private >*/
/* cached caps */
};
struct _GstJackAudioSinkClass {
- GstBaseAudioSinkClass parent_class;
+ GstAudioBaseSinkClass parent_class;
};
GType gst_jack_audio_sink_get_type (void);
/**
* SECTION:element-jackaudiosrc
- * @see_also: #GstBaseAudioSrc, #GstRingBuffer
+ * @see_also: #GstAudioBaseSrc, #GstAudioRingBuffer
*
* A Src that inputs data from Jack ports.
*
(GInstanceInitFunc) gst_jack_ring_buffer_init,
NULL
};
- GType tmp = g_type_register_static (GST_TYPE_RING_BUFFER,
+ GType tmp = g_type_register_static (GST_TYPE_AUDIO_RING_BUFFER,
"GstJackAudioSrcRingBuffer", &ringbuffer_info, 0);
g_once_init_leave (&ringbuffer_type, tmp);
}
static void
gst_jack_ring_buffer_class_init (GstJackRingBufferClass * klass)
{
- GstRingBufferClass *gstringbuffer_class;
+ GstAudioRingBufferClass *gstringbuffer_class;
- gstringbuffer_class = (GstRingBufferClass *) klass;
+ gstringbuffer_class = (GstAudioRingBufferClass *) klass;
ring_parent_class = g_type_class_peek_parent (klass);
jack_process_cb (jack_nframes_t nframes, void *arg)
{
GstJackAudioSrc *src;
- GstRingBuffer *buf;
+ GstAudioRingBuffer *buf;
gint len;
guint8 *writeptr;
gint writeseg;
gint channels, i, j, flen;
sample_t *data;
- buf = GST_RING_BUFFER_CAST (arg);
+ buf = GST_AUDIO_RING_BUFFER_CAST (arg);
src = GST_JACK_AUDIO_SRC (GST_OBJECT_PARENT (buf));
- channels = buf->spec.channels;
+ channels = GST_AUDIO_INFO_CHANNELS (&buf->spec.info);
/* get input buffers */
for (i = 0; i < channels; i++)
src->buffers[i] =
(sample_t *) jack_port_get_buffer (src->ports[i], nframes);
- if (gst_ring_buffer_prepare_read (buf, &writeseg, &writeptr, &len)) {
+ if (gst_audio_ring_buffer_prepare_read (buf, &writeseg, &writeptr, &len)) {
flen = len / channels;
/* the number of samples must be exactly the segment size */
len / channels, channels);
/* we wrote one segment */
- gst_ring_buffer_advance (buf, 1);
+ gst_audio_ring_buffer_advance (buf, 1);
}
return 0;
/* the _open_device method should make a connection with the server
*/
static gboolean
-gst_jack_ring_buffer_open_device (GstRingBuffer * buf)
+gst_jack_ring_buffer_open_device (GstAudioRingBuffer * buf)
{
GstJackAudioSrc *src;
jack_status_t status = 0;
/* close the connection with the server
*/
static gboolean
-gst_jack_ring_buffer_close_device (GstRingBuffer * buf)
+gst_jack_ring_buffer_close_device (GstAudioRingBuffer * buf)
{
GstJackAudioSrc *src;
* received for some reason, we fail here.
*/
static gboolean
-gst_jack_ring_buffer_acquire (GstRingBuffer * buf, GstRingBufferSpec * spec)
+gst_jack_ring_buffer_acquire (GstAudioRingBuffer * buf,
+ GstAudioRingBufferSpec * spec)
{
GstJackAudioSrc *src;
GstJackRingBuffer *abuf;
const char **ports;
gint sample_rate, buffer_size;
- gint i, channels, res;
+ gint i, bpf, rate, channels, res;
jack_client_t *client;
src = GST_JACK_AUDIO_SRC (GST_OBJECT_PARENT (buf));
client = gst_jack_audio_client_get_client (src->client);
+ rate = GST_AUDIO_INFO_RATE (&spec->info);
+
/* sample rate must be that of the server */
sample_rate = jack_get_sample_rate (client);
- if (sample_rate != spec->rate)
+ if (sample_rate != rate)
goto wrong_samplerate;
- channels = spec->channels;
+ bpf = GST_AUDIO_INFO_BPF (&spec->info);
+ channels = GST_AUDIO_INFO_CHANNELS (&spec->info);
if (!gst_jack_audio_src_allocate_channels (src, channels))
goto out_of_ports;
- gst_jack_set_layout_on_caps (&spec->caps, channels);
+ gst_jack_set_layout (buf, spec);
buffer_size = jack_get_buffer_size (client);
* for all channels */
spec->segsize = buffer_size * sizeof (gfloat) * channels;
spec->latency_time = gst_util_uint64_scale (spec->segsize,
- (GST_SECOND / GST_USECOND), spec->rate * spec->bytes_per_sample);
+ (GST_SECOND / GST_USECOND), rate * bpf);
/* segtotal based on buffer-time latency */
spec->segtotal = spec->buffer_time / spec->latency_time;
if (spec->segtotal < 2) {
buffer_size, spec->segsize, spec->segtotal);
/* allocate the ringbuffer memory now */
- buf->data = gst_buffer_new_and_alloc (spec->segtotal * spec->segsize);
- memset (GST_BUFFER_DATA (buf->data), 0, GST_BUFFER_SIZE (buf->data));
+ buf->size = spec->segtotal * spec->segsize;
+ buf->memory = g_malloc0 (buf->size);
if ((res = gst_jack_audio_client_set_active (src->client, TRUE)))
goto could_not_activate;
abuf->sample_rate = sample_rate;
abuf->buffer_size = buffer_size;
- abuf->channels = spec->channels;
+ abuf->channels = channels;
return TRUE;
{
GST_ELEMENT_ERROR (src, RESOURCE, SETTINGS, (NULL),
("Wrong samplerate, server is running at %d and we received %d",
- sample_rate, spec->rate));
+ sample_rate, rate));
return FALSE;
}
out_of_ports:
/* function is called with LOCK */
static gboolean
-gst_jack_ring_buffer_release (GstRingBuffer * buf)
+gst_jack_ring_buffer_release (GstAudioRingBuffer * buf)
{
GstJackAudioSrc *src;
GstJackRingBuffer *abuf;
abuf->sample_rate = -1;
/* free the buffer */
- gst_buffer_unref (buf->data);
- buf->data = NULL;
+ g_free (buf->memory);
+ buf->memory = NULL;
return TRUE;
}
static gboolean
-gst_jack_ring_buffer_start (GstRingBuffer * buf)
+gst_jack_ring_buffer_start (GstAudioRingBuffer * buf)
{
GstJackAudioSrc *src;
}
static gboolean
-gst_jack_ring_buffer_pause (GstRingBuffer * buf)
+gst_jack_ring_buffer_pause (GstAudioRingBuffer * buf)
{
GstJackAudioSrc *src;
}
static gboolean
-gst_jack_ring_buffer_stop (GstRingBuffer * buf)
+gst_jack_ring_buffer_stop (GstAudioRingBuffer * buf)
{
GstJackAudioSrc *src;
#if defined (HAVE_JACK_0_120_1) || defined(HAVE_JACK_1_9_7)
static guint
-gst_jack_ring_buffer_delay (GstRingBuffer * buf)
+gst_jack_ring_buffer_delay (GstAudioRingBuffer * buf)
{
GstJackAudioSrc *src;
guint i, res = 0;
}
#else /* !(defined (HAVE_JACK_0_120_1) || defined(HAVE_JACK_1_9_7)) */
static guint
-gst_jack_ring_buffer_delay (GstRingBuffer * buf)
+gst_jack_ring_buffer_delay (GstAudioRingBuffer * buf)
{
GstJackAudioSrc *src;
guint i, res = 0;
static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("audio/x-raw-float, "
- "endianness = (int) BYTE_ORDER, "
- "width = (int) 32, "
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "format = (string) " GST_JACK_FORMAT_STR ", "
+ "layout = (string) interleaved, "
"rate = (int) [ 1, MAX ], " "channels = (int) [ 1, MAX ]")
);
-#define _do_init(bla) \
- GST_DEBUG_CATEGORY_INIT(gst_jack_audio_src_debug, "jacksrc", 0, "jacksrc element");
-
-GST_BOILERPLATE_FULL (GstJackAudioSrc, gst_jack_audio_src, GstBaseAudioSrc,
- GST_TYPE_BASE_AUDIO_SRC, _do_init);
+#define gst_jack_audio_src_parent_class parent_class
+G_DEFINE_TYPE (GstJackAudioSrc, gst_jack_audio_src, GST_TYPE_AUDIO_BASE_SRC);
static void gst_jack_audio_src_dispose (GObject * object);
static void gst_jack_audio_src_set_property (GObject * object, guint prop_id,
static void gst_jack_audio_src_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
-static GstCaps *gst_jack_audio_src_getcaps (GstBaseSrc * bsrc);
-static GstRingBuffer *gst_jack_audio_src_create_ringbuffer (GstBaseAudioSrc *
- src);
+static GstCaps *gst_jack_audio_src_getcaps (GstBaseSrc * bsrc,
+ GstCaps * filter);
+static GstAudioRingBuffer *gst_jack_audio_src_create_ringbuffer (GstAudioBaseSrc
+ * src);
/* GObject vmethod implementations */
-static void
-gst_jack_audio_src_base_init (gpointer gclass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (gclass);
-
- gst_element_class_add_static_pad_template (element_class, &src_factory);
- gst_element_class_set_details_simple (element_class, "Audio Source (Jack)",
- "Source/Audio", "Captures audio from a JACK server",
- "Tristan Matthews <tristan@sat.qc.ca>");
-}
-
/* initialize the jack_audio_src's class */
static void
gst_jack_audio_src_class_init (GstJackAudioSrcClass * klass)
{
GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
GstBaseSrcClass *gstbasesrc_class;
- GstBaseAudioSrcClass *gstbaseaudiosrc_class;
+ GstAudioBaseSrcClass *gstaudiobasesrc_class;
- gobject_class = (GObjectClass *) klass;
+ GST_DEBUG_CATEGORY_INIT (gst_jack_audio_src_debug, "jacksrc", 0,
+ "jacksrc element");
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
gstbasesrc_class = (GstBaseSrcClass *) klass;
- gstbaseaudiosrc_class = (GstBaseAudioSrcClass *) klass;
+ gstaudiobasesrc_class = (GstAudioBaseSrcClass *) klass;
gobject_class->dispose = gst_jack_audio_src_dispose;
gobject_class->set_property = gst_jack_audio_src_set_property;
GST_TYPE_JACK_TRANSPORT, DEFAULT_PROP_TRANSPORT,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&src_factory));
+
+ gst_element_class_set_details_simple (gstelement_class, "Audio Source (Jack)",
+ "Source/Audio", "Captures audio from a JACK server",
+ "Tristan Matthews <tristan@sat.qc.ca>");
+
gstbasesrc_class->get_caps = GST_DEBUG_FUNCPTR (gst_jack_audio_src_getcaps);
- gstbaseaudiosrc_class->create_ringbuffer =
+ gstaudiobasesrc_class->create_ringbuffer =
GST_DEBUG_FUNCPTR (gst_jack_audio_src_create_ringbuffer);
/* ref class from a thread-safe context to work around missing bit of
}
static void
-gst_jack_audio_src_init (GstJackAudioSrc * src, GstJackAudioSrcClass * gclass)
+gst_jack_audio_src_init (GstJackAudioSrc * src)
{
//gst_base_src_set_live(GST_BASE_SRC (src), TRUE);
src->connect = DEFAULT_PROP_CONNECT;
}
static GstCaps *
-gst_jack_audio_src_getcaps (GstBaseSrc * bsrc)
+gst_jack_audio_src_getcaps (GstBaseSrc * bsrc, GstCaps * filter)
{
GstJackAudioSrc *src = GST_JACK_AUDIO_SRC (bsrc);
const char **ports;
GST_DEBUG_OBJECT (src, "got %d-%d ports, samplerate: %d", min, max, rate);
if (!src->caps) {
- src->caps = gst_caps_new_simple ("audio/x-raw-float",
- "endianness", G_TYPE_INT, G_BYTE_ORDER,
- "width", G_TYPE_INT, 32,
+ src->caps = gst_caps_new_simple ("audio/x-raw",
+ "format", G_TYPE_STRING, GST_JACK_FORMAT_STR,
+ "layout", G_TYPE_STRING, "interleaved",
"rate", G_TYPE_INT, rate,
"channels", GST_TYPE_INT_RANGE, min, max, NULL);
}
}
}
-static GstRingBuffer *
-gst_jack_audio_src_create_ringbuffer (GstBaseAudioSrc * src)
+static GstAudioRingBuffer *
+gst_jack_audio_src_create_ringbuffer (GstAudioBaseSrc * src)
{
- GstRingBuffer *buffer;
+ GstAudioRingBuffer *buffer;
buffer = g_object_new (GST_TYPE_JACK_RING_BUFFER, NULL);
GST_DEBUG_OBJECT (src, "created ringbuffer @%p", buffer);
struct _GstJackAudioSrc
{
- GstBaseAudioSrc src;
+ GstAudioBaseSrc src;
/*< private >*/
/* cached caps */
struct _GstJackAudioSrcClass
{
- GstBaseAudioSrcClass parent_class;
+ GstAudioBaseSrcClass parent_class;
};
GType gst_jack_audio_src_get_type (void);
struct _GstJackRingBuffer
{
- GstRingBuffer object;
+ GstAudioRingBuffer object;
gint sample_rate;
gint buffer_size;
struct _GstJackRingBufferClass
{
- GstRingBufferClass parent_class;
+ GstAudioRingBufferClass parent_class;
};
static void gst_jack_ring_buffer_class_init(GstJackRingBufferClass * klass);
static void gst_jack_ring_buffer_init(GstJackRingBuffer * ringbuffer,
GstJackRingBufferClass * klass);
-static GstRingBufferClass *ring_parent_class = NULL;
+static GstAudioRingBufferClass *ring_parent_class = NULL;
-static gboolean gst_jack_ring_buffer_open_device(GstRingBuffer * buf);
-static gboolean gst_jack_ring_buffer_close_device(GstRingBuffer * buf);
-static gboolean gst_jack_ring_buffer_acquire(GstRingBuffer * buf,GstRingBufferSpec * spec);
-static gboolean gst_jack_ring_buffer_release(GstRingBuffer * buf);
-static gboolean gst_jack_ring_buffer_start(GstRingBuffer * buf);
-static gboolean gst_jack_ring_buffer_pause(GstRingBuffer * buf);
-static gboolean gst_jack_ring_buffer_stop(GstRingBuffer * buf);
-static guint gst_jack_ring_buffer_delay(GstRingBuffer * buf);
+static gboolean gst_jack_ring_buffer_open_device(GstAudioRingBuffer * buf);
+static gboolean gst_jack_ring_buffer_close_device(GstAudioRingBuffer * buf);
+static gboolean gst_jack_ring_buffer_acquire(GstAudioRingBuffer * buf,GstAudioRingBufferSpec * spec);
+static gboolean gst_jack_ring_buffer_release(GstAudioRingBuffer * buf);
+static gboolean gst_jack_ring_buffer_start(GstAudioRingBuffer * buf);
+static gboolean gst_jack_ring_buffer_pause(GstAudioRingBuffer * buf);
+static gboolean gst_jack_ring_buffer_stop(GstAudioRingBuffer * buf);
+static guint gst_jack_ring_buffer_delay(GstAudioRingBuffer * buf);
#endif
*/
#include "gstjackutil.h"
-#include <gst/audio/multichannel.h>
+#include <gst/audio/audio.h>
static const GstAudioChannelPosition default_positions[8][8] = {
/* 1 channel */
{
- GST_AUDIO_CHANNEL_POSITION_FRONT_MONO,
+ GST_AUDIO_CHANNEL_POSITION_MONO,
},
/* 2 channels */
{
{
GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT,
GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT,
- GST_AUDIO_CHANNEL_POSITION_LFE, /* or FRONT_CENTER for 3.0? */
+ GST_AUDIO_CHANNEL_POSITION_LFE1, /* or FRONT_CENTER for 3.0? */
},
/* 4 channels (4.0 or 3.1?) */
{
GST_AUDIO_CHANNEL_POSITION_REAR_LEFT,
GST_AUDIO_CHANNEL_POSITION_REAR_RIGHT,
GST_AUDIO_CHANNEL_POSITION_FRONT_CENTER,
- GST_AUDIO_CHANNEL_POSITION_LFE,
+ GST_AUDIO_CHANNEL_POSITION_LFE1,
},
/* 7 channels */
{
GST_AUDIO_CHANNEL_POSITION_REAR_LEFT,
GST_AUDIO_CHANNEL_POSITION_REAR_RIGHT,
GST_AUDIO_CHANNEL_POSITION_FRONT_CENTER,
- GST_AUDIO_CHANNEL_POSITION_LFE,
+ GST_AUDIO_CHANNEL_POSITION_LFE1,
GST_AUDIO_CHANNEL_POSITION_REAR_CENTER,
},
/* 8 channels */
GST_AUDIO_CHANNEL_POSITION_REAR_LEFT,
GST_AUDIO_CHANNEL_POSITION_REAR_RIGHT,
GST_AUDIO_CHANNEL_POSITION_FRONT_CENTER,
- GST_AUDIO_CHANNEL_POSITION_LFE,
+ GST_AUDIO_CHANNEL_POSITION_LFE1,
GST_AUDIO_CHANNEL_POSITION_SIDE_LEFT,
GST_AUDIO_CHANNEL_POSITION_SIDE_RIGHT,
}
/* if channels are less than or equal to 8, we set a default layout,
* otherwise set layout to an array of GST_AUDIO_CHANNEL_POSITION_NONE */
void
-gst_jack_set_layout_on_caps (GstCaps ** caps, gint channels)
+gst_jack_set_layout (GstAudioRingBuffer * buffer, GstAudioRingBufferSpec * spec)
{
- int c;
- GValue pos = { 0 };
- GValue chanpos = { 0 };
- gst_caps_unref (*caps);
+ gint i;
- if (channels <= 8) {
- g_assert (channels >= 1);
- gst_audio_set_channel_positions (gst_caps_get_structure (*caps, 0),
- default_positions[channels - 1]);
+ if (spec->info.channels <= 8) {
+ for (i = 0; i < spec->info.channels; i++)
+ spec->info.position[i] = default_positions[spec->info.channels - 1][i];
+ gst_audio_channel_positions_to_valid_order (spec->info.position,
+ spec->info.channels);
+ gst_audio_ring_buffer_set_channel_positions (buffer,
+ default_positions[spec->info.channels - 1]);
} else {
- g_value_init (&chanpos, GST_TYPE_ARRAY);
- g_value_init (&pos, GST_TYPE_AUDIO_CHANNEL_POSITION);
- for (c = 0; c < channels; c++) {
- g_value_set_enum (&pos, GST_AUDIO_CHANNEL_POSITION_NONE);
- gst_value_array_append_value (&chanpos, &pos);
- }
- g_value_unset (&pos);
- gst_structure_set_value (gst_caps_get_structure (*caps, 0),
- "channel-positions", &chanpos);
- g_value_unset (&chanpos);
+ spec->info.flags |= GST_AUDIO_FLAG_UNPOSITIONED;
+ for (i = 0; i < G_N_ELEMENTS (spec->info.position); i++)
+ spec->info.position[i] = GST_AUDIO_CHANNEL_POSITION_NONE;
+ gst_audio_ring_buffer_set_channel_positions (buffer, spec->info.position);
}
- gst_caps_ref (*caps);
+
+ gst_caps_unref (spec->caps);
+ spec->caps = gst_audio_info_to_caps (&spec->info);
}
#define _GST_JACK_UTIL_H_
#include <gst/gst.h>
+#include <gst/audio/gstaudioringbuffer.h>
void
-gst_jack_set_layout_on_caps (GstCaps **caps, gint channels);
+gst_jack_set_layout (GstAudioRingBuffer * buffer, GstAudioRingBufferSpec *spec);
#endif // _GST_JACK_UTIL_H_
libgstjpeg_la_SOURCES = \
gstjpeg.c \
gstjpegenc.c \
- gstjpegdec.c \
- gstsmokeenc.c \
- smokecodec.c \
- gstsmokedec.c
+ gstjpegdec.c
+# deprected gstsmokeenc.c smokecodec.c gstsmokedec.c
libgstjpeg_la_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_BASE_CFLAGS) $(GST_CFLAGS)
libgstjpeg_la_LIBADD = $(GST_PLUGINS_BASE_LIBS) $(GST_BASE_LIBS) $(GST_LIBS) -lgstvideo-$(GST_MAJORMINOR) \
noinst_HEADERS = \
gstjpeg.h \
- gstjpegdec.h gstjpegenc.h \
- gstsmokeenc.h gstsmokedec.h \
- smokecodec.h smokeformat.h
+ gstjpegdec.h gstjpegenc.h
+# deprecated gstsmokeenc.h gstsmokedec.h smokecodec.h smokeformat.h
#include "gstjpeg.h"
#include "gstjpegdec.h"
#include "gstjpegenc.h"
+#if 0
#include "gstsmokeenc.h"
#include "gstsmokedec.h"
+#endif
GType
gst_idct_method_get_type (void)
GST_TYPE_JPEG_DEC))
return FALSE;
+#if 0
if (!gst_element_register (plugin, "smokeenc", GST_RANK_PRIMARY,
GST_TYPE_SMOKEENC))
return FALSE;
if (!gst_element_register (plugin, "smokedec", GST_RANK_PRIMARY,
GST_TYPE_SMOKEDEC))
return FALSE;
+#endif
return TRUE;
}
GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("I420") "; "
- GST_VIDEO_CAPS_RGB "; " GST_VIDEO_CAPS_BGR "; "
- GST_VIDEO_CAPS_RGBx "; " GST_VIDEO_CAPS_xRGB "; "
- GST_VIDEO_CAPS_BGRx "; " GST_VIDEO_CAPS_xBGR "; "
- GST_VIDEO_CAPS_GRAY8)
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE
+ ("{ I420, RGB, BGR, RGBx, xRGB, BGRx, xBGR, GRAY8 }"))
);
+
/* *INDENT-ON* */
/* FIXME: sof-marker is for IJG libjpeg 8, should be different for 6.2 */
#define GST_CAT_DEFAULT jpeg_dec_debug
GST_DEBUG_CATEGORY_STATIC (GST_CAT_PERFORMANCE);
-/* These macros are adapted from videotestsrc.c
- * and/or gst-plugins/gst/games/gstvideoimage.c */
-#define I420_Y_ROWSTRIDE(width) (GST_ROUND_UP_4(width))
-#define I420_U_ROWSTRIDE(width) (GST_ROUND_UP_8(width)/2)
-#define I420_V_ROWSTRIDE(width) ((GST_ROUND_UP_8(I420_Y_ROWSTRIDE(width)))/2)
-
-#define I420_Y_OFFSET(w,h) (0)
-#define I420_U_OFFSET(w,h) (I420_Y_OFFSET(w,h)+(I420_Y_ROWSTRIDE(w)*GST_ROUND_UP_2(h)))
-#define I420_V_OFFSET(w,h) (I420_U_OFFSET(w,h)+(I420_U_ROWSTRIDE(w)*GST_ROUND_UP_2(h)/2))
-
-#define I420_SIZE(w,h) (I420_V_OFFSET(w,h)+(I420_V_ROWSTRIDE(w)*GST_ROUND_UP_2(h)/2))
-
-static GstElementClass *parent_class; /* NULL */
-
-static void gst_jpeg_dec_base_init (gpointer g_class);
-static void gst_jpeg_dec_class_init (GstJpegDecClass * klass);
-static void gst_jpeg_dec_init (GstJpegDec * jpegdec);
-
static void gst_jpeg_dec_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void gst_jpeg_dec_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
-static GstFlowReturn gst_jpeg_dec_chain (GstPad * pad, GstBuffer * buffer);
-static gboolean gst_jpeg_dec_setcaps (GstPad * pad, GstCaps * caps);
-static GstCaps *gst_jpeg_dec_getcaps (GstPad * pad);
-static gboolean gst_jpeg_dec_sink_event (GstPad * pad, GstEvent * event);
-static gboolean gst_jpeg_dec_src_event (GstPad * pad, GstEvent * event);
+static GstFlowReturn gst_jpeg_dec_chain (GstPad * pad, GstObject * parent,
+ GstBuffer * buffer);
+static GstCaps *gst_jpeg_dec_getcaps (GstPad * pad, GstCaps * filter);
+static gboolean gst_jpeg_dec_sink_query (GstPad * pad, GstObject * parent,
+ GstQuery * query);
+static gboolean gst_jpeg_dec_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+static gboolean gst_jpeg_dec_src_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
static GstStateChangeReturn gst_jpeg_dec_change_state (GstElement * element,
GstStateChange transition);
static void gst_jpeg_dec_update_qos (GstJpegDec * dec, gdouble proportion,
static void gst_jpeg_dec_read_qos (GstJpegDec * dec, gdouble * proportion,
GstClockTime * time);
-GType
-gst_jpeg_dec_get_type (void)
-{
- static GType type = 0;
-
- if (!type) {
- static const GTypeInfo jpeg_dec_info = {
- sizeof (GstJpegDecClass),
- (GBaseInitFunc) gst_jpeg_dec_base_init,
- NULL,
- (GClassInitFunc) gst_jpeg_dec_class_init,
- NULL,
- NULL,
- sizeof (GstJpegDec),
- 0,
- (GInstanceInitFunc) gst_jpeg_dec_init,
- };
-
- type = g_type_register_static (GST_TYPE_ELEMENT, "GstJpegDec",
- &jpeg_dec_info, 0);
- }
- return type;
-}
+#define gst_jpeg_dec_parent_class parent_class
+G_DEFINE_TYPE (GstJpegDec, gst_jpeg_dec, GST_TYPE_ELEMENT);
static void
gst_jpeg_dec_finalize (GObject * object)
}
static void
-gst_jpeg_dec_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_static_pad_template (element_class,
- &gst_jpeg_dec_src_pad_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_jpeg_dec_sink_pad_template);
- gst_element_class_set_details_simple (element_class, "JPEG image decoder",
- "Codec/Decoder/Image",
- "Decode images from JPEG format", "Wim Taymans <wim@fluendo.com>");
-}
-
-static void
gst_jpeg_dec_class_init (GstJpegDecClass * klass)
{
GstElementClass *gstelement_class;
-1, G_MAXINT, JPEG_DEFAULT_MAX_ERRORS,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_jpeg_dec_src_pad_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_jpeg_dec_sink_pad_template));
+ gst_element_class_set_details_simple (gstelement_class, "JPEG image decoder",
+ "Codec/Decoder/Image",
+ "Decode images from JPEG format", "Wim Taymans <wim@fluendo.com>");
+
gstelement_class->change_state =
GST_DEBUG_FUNCPTR (gst_jpeg_dec_change_state);
gst_pad_new_from_static_template (&gst_jpeg_dec_sink_pad_template,
"sink");
gst_element_add_pad (GST_ELEMENT (dec), dec->sinkpad);
- gst_pad_set_setcaps_function (dec->sinkpad,
- GST_DEBUG_FUNCPTR (gst_jpeg_dec_setcaps));
- gst_pad_set_getcaps_function (dec->sinkpad,
- GST_DEBUG_FUNCPTR (gst_jpeg_dec_getcaps));
gst_pad_set_chain_function (dec->sinkpad,
GST_DEBUG_FUNCPTR (gst_jpeg_dec_chain));
gst_pad_set_event_function (dec->sinkpad,
GST_DEBUG_FUNCPTR (gst_jpeg_dec_sink_event));
+ gst_pad_set_query_function (dec->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_jpeg_dec_sink_query));
dec->srcpad =
gst_pad_new_from_static_template (&gst_jpeg_dec_src_pad_template, "src");
}
static gboolean
-gst_jpeg_dec_setcaps (GstPad * pad, GstCaps * caps)
+gst_jpeg_dec_setcaps (GstJpegDec * dec, GstCaps * caps)
{
GstStructure *s;
- GstJpegDec *dec;
const GValue *framerate;
- dec = GST_JPEG_DEC (GST_OBJECT_PARENT (pad));
s = gst_caps_get_structure (caps, 0);
if ((framerate = gst_structure_get_value (s, "framerate")) != NULL) {
- dec->framerate_numerator = gst_value_get_fraction_numerator (framerate);
- dec->framerate_denominator = gst_value_get_fraction_denominator (framerate);
+ dec->in_fps_n = gst_value_get_fraction_numerator (framerate);
+ dec->in_fps_d = gst_value_get_fraction_denominator (framerate);
dec->packetized = TRUE;
GST_DEBUG ("got framerate of %d/%d fps => packetized mode",
- dec->framerate_numerator, dec->framerate_denominator);
+ dec->in_fps_n, dec->in_fps_d);
}
/* do not extract width/height here. we do that in the chain
}
static GstCaps *
-gst_jpeg_dec_getcaps (GstPad * pad)
+gst_jpeg_dec_getcaps (GstPad * pad, GstCaps * filter)
{
GstJpegDec *dec;
GstCaps *caps;
dec = GST_JPEG_DEC (GST_OBJECT_PARENT (pad));
- if (GST_PAD_CAPS (pad))
- return gst_caps_ref (GST_PAD_CAPS (pad));
+ if (gst_pad_has_current_caps (pad))
+ return gst_pad_get_current_caps (pad);
peer = gst_pad_get_peer (dec->srcpad);
GstStructure *s;
guint i, n;
- peer_caps = gst_pad_get_caps (peer);
+ peer_caps = gst_pad_query_caps (peer, filter);
/* Translate peercaps to image/jpeg */
peer_caps = gst_caps_make_writable (peer_caps);
{
gint i;
- if (G_LIKELY (dec->idr_width_allocated == maxrowbytes))
+ if (G_LIKELY (dec->idr_width_allocated >= maxrowbytes))
return TRUE;
/* FIXME: maybe just alloc one or three blocks altogether? */
}
static void
-gst_jpeg_dec_decode_grayscale (GstJpegDec * dec, guchar * base[1],
- guint width, guint height, guint pstride, guint rstride)
+gst_jpeg_dec_decode_grayscale (GstJpegDec * dec, GstVideoFrame * frame)
{
guchar *rows[16];
guchar **scanarray[1] = { rows };
gint i, j, k;
gint lines;
+ guint8 *base[1];
+ gint width, height;
+ gint pstride, rstride;
GST_DEBUG_OBJECT (dec, "indirect decoding of grayscale");
+ width = GST_VIDEO_FRAME_WIDTH (frame);
+ height = GST_VIDEO_FRAME_HEIGHT (frame);
+
if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width))))
return;
+ base[0] = GST_VIDEO_FRAME_COMP_DATA (frame, 0);
+ pstride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
+ rstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
+
memcpy (rows, dec->idr_y, 16 * sizeof (gpointer));
i = 0;
}
static void
-gst_jpeg_dec_decode_rgb (GstJpegDec * dec, guchar * base[3],
- guint width, guint height, guint pstride, guint rstride)
+gst_jpeg_dec_decode_rgb (GstJpegDec * dec, GstVideoFrame * frame)
{
guchar *r_rows[16], *g_rows[16], *b_rows[16];
guchar **scanarray[3] = { r_rows, g_rows, b_rows };
gint i, j, k;
gint lines;
+ guint8 *base[3];
+ guint pstride, rstride;
+ gint width, height;
GST_DEBUG_OBJECT (dec, "indirect decoding of RGB");
+ width = GST_VIDEO_FRAME_WIDTH (frame);
+ height = GST_VIDEO_FRAME_HEIGHT (frame);
+
if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width))))
return;
+ for (i = 0; i < 3; i++)
+ base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i);
+
+ pstride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
+ rstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
+
memcpy (r_rows, dec->idr_y, 16 * sizeof (gpointer));
memcpy (g_rows, dec->idr_u, 16 * sizeof (gpointer));
memcpy (b_rows, dec->idr_v, 16 * sizeof (gpointer));
}
static void
-gst_jpeg_dec_decode_indirect (GstJpegDec * dec, guchar * base[3],
- guchar * last[3], guint width, guint height, gint r_v, gint r_h, gint comp)
+gst_jpeg_dec_decode_indirect (GstJpegDec * dec, GstVideoFrame * frame,
+ gint r_v, gint r_h, gint comp)
{
guchar *y_rows[16], *u_rows[16], *v_rows[16];
guchar **scanarray[3] = { y_rows, u_rows, v_rows };
gint i, j, k;
gint lines;
+ guchar *base[3], *last[3];
+ gint stride[3];
+ gint width, height;
GST_DEBUG_OBJECT (dec,
"unadvantageous width or r_h, taking slow route involving memcpy");
+ width = GST_VIDEO_FRAME_WIDTH (frame);
+ height = GST_VIDEO_FRAME_HEIGHT (frame);
+
if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width))))
return;
+ for (i = 0; i < 3; i++) {
+ base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i);
+ stride[i] = GST_VIDEO_FRAME_COMP_STRIDE (frame, i);
+ /* make sure we don't make jpeglib write beyond our buffer,
+ * which might happen if (height % (r_v*DCTSIZE)) != 0 */
+ last[i] = base[i] + (GST_VIDEO_FRAME_COMP_STRIDE (frame, i) *
+ (GST_VIDEO_FRAME_COMP_HEIGHT (frame, i) - 1));
+ }
+
memcpy (y_rows, dec->idr_y, 16 * sizeof (gpointer));
memcpy (u_rows, dec->idr_u, 16 * sizeof (gpointer));
memcpy (v_rows, dec->idr_v, 16 * sizeof (gpointer));
if (G_LIKELY (lines > 0)) {
for (j = 0, k = 0; j < (r_v * DCTSIZE); j += r_v, k++) {
if (G_LIKELY (base[0] <= last[0])) {
- memcpy (base[0], y_rows[j], I420_Y_ROWSTRIDE (width));
- base[0] += I420_Y_ROWSTRIDE (width);
+ memcpy (base[0], y_rows[j], stride[0]);
+ base[0] += stride[0];
}
if (r_v == 2) {
if (G_LIKELY (base[0] <= last[0])) {
- memcpy (base[0], y_rows[j + 1], I420_Y_ROWSTRIDE (width));
- base[0] += I420_Y_ROWSTRIDE (width);
+ memcpy (base[0], y_rows[j + 1], stride[0]);
+ base[0] += stride[0];
}
}
if (G_LIKELY (base[1] <= last[1] && base[2] <= last[2])) {
if (r_h == 2) {
- memcpy (base[1], u_rows[k], I420_U_ROWSTRIDE (width));
- memcpy (base[2], v_rows[k], I420_V_ROWSTRIDE (width));
+ memcpy (base[1], u_rows[k], stride[1]);
+ memcpy (base[2], v_rows[k], stride[2]);
} else if (r_h == 1) {
- hresamplecpy1 (base[1], u_rows[k], I420_U_ROWSTRIDE (width));
- hresamplecpy1 (base[2], v_rows[k], I420_V_ROWSTRIDE (width));
+ hresamplecpy1 (base[1], u_rows[k], stride[1]);
+ hresamplecpy1 (base[2], v_rows[k], stride[2]);
} else {
/* FIXME: implement (at least we avoid crashing by doing nothing) */
}
}
if (r_v == 2 || (k & 1) != 0) {
- base[1] += I420_U_ROWSTRIDE (width);
- base[2] += I420_V_ROWSTRIDE (width);
+ base[1] += stride[1];
+ base[2] += stride[2];
}
}
} else {
}
}
-#ifndef GST_DISABLE_GST_DEBUG
-static inline void
-dump_lines (guchar * base[3], guchar ** line[3], int v_samp0, int width)
-{
- int j;
-
- for (j = 0; j < (v_samp0 * DCTSIZE); ++j) {
- GST_LOG ("[%02d] %5d %5d %5d", j,
- (line[0][j] >= base[0]) ?
- (int) (line[0][j] - base[0]) / I420_Y_ROWSTRIDE (width) : -1,
- (line[1][j] >= base[1]) ?
- (int) (line[1][j] - base[1]) / I420_U_ROWSTRIDE (width) : -1,
- (line[2][j] >= base[2]) ?
- (int) (line[2][j] - base[2]) / I420_V_ROWSTRIDE (width) : -1);
- }
-}
-#endif
-
static GstFlowReturn
-gst_jpeg_dec_decode_direct (GstJpegDec * dec, guchar * base[3],
- guchar * last[3], guint width, guint height)
+gst_jpeg_dec_decode_direct (GstJpegDec * dec, GstVideoFrame * frame)
{
guchar **line[3]; /* the jpeg line buffer */
guchar *y[4 * DCTSIZE] = { NULL, }; /* alloc enough for the lines */
guchar *v[4 * DCTSIZE] = { NULL, };
gint i, j;
gint lines, v_samp[3];
+ guchar *base[3], *last[3];
+ gint stride[3];
+ guint height;
line[0] = y;
line[1] = u;
if (G_UNLIKELY (v_samp[0] > 2 || v_samp[1] > 2 || v_samp[2] > 2))
goto format_not_supported;
+ height = GST_VIDEO_FRAME_HEIGHT (frame);
+
+ for (i = 0; i < 3; i++) {
+ base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i);
+ stride[i] = GST_VIDEO_FRAME_COMP_STRIDE (frame, i);
+ /* make sure we don't make jpeglib write beyond our buffer,
+ * which might happen if (height % (r_v*DCTSIZE)) != 0 */
+ last[i] = base[i] + (GST_VIDEO_FRAME_COMP_STRIDE (frame, i) *
+ (GST_VIDEO_FRAME_COMP_HEIGHT (frame, i) - 1));
+ }
+
/* let jpeglib decode directly into our final buffer */
GST_DEBUG_OBJECT (dec, "decoding directly into output buffer");
for (i = 0; i < height; i += v_samp[0] * DCTSIZE) {
for (j = 0; j < (v_samp[0] * DCTSIZE); ++j) {
/* Y */
- line[0][j] = base[0] + (i + j) * I420_Y_ROWSTRIDE (width);
+ line[0][j] = base[0] + (i + j) * stride[0];
if (G_UNLIKELY (line[0][j] > last[0]))
line[0][j] = last[0];
/* U */
if (v_samp[1] == v_samp[0]) {
- line[1][j] = base[1] + ((i + j) / 2) * I420_U_ROWSTRIDE (width);
+ line[1][j] = base[1] + ((i + j) / 2) * stride[1];
} else if (j < (v_samp[1] * DCTSIZE)) {
- line[1][j] = base[1] + ((i / 2) + j) * I420_U_ROWSTRIDE (width);
+ line[1][j] = base[1] + ((i / 2) + j) * stride[1];
}
if (G_UNLIKELY (line[1][j] > last[1]))
line[1][j] = last[1];
/* V */
if (v_samp[2] == v_samp[0]) {
- line[2][j] = base[2] + ((i + j) / 2) * I420_V_ROWSTRIDE (width);
+ line[2][j] = base[2] + ((i + j) / 2) * stride[2];
} else if (j < (v_samp[2] * DCTSIZE)) {
- line[2][j] = base[2] + ((i / 2) + j) * I420_V_ROWSTRIDE (width);
+ line[2][j] = base[2] + ((i / 2) + j) * stride[2];
}
if (G_UNLIKELY (line[2][j] > last[2]))
line[2][j] = last[2];
}
- /* dump_lines (base, line, v_samp[0], width); */
-
lines = jpeg_read_raw_data (&dec->cinfo, line, v_samp[0] * DCTSIZE);
if (G_UNLIKELY (!lines)) {
GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
GST_OBJECT_LOCK (dec);
dec->proportion = proportion;
if (G_LIKELY (ts != GST_CLOCK_TIME_NONE)) {
- if (G_UNLIKELY (diff > 0))
+ if (G_UNLIKELY (diff > dec->qos_duration))
dec->earliest_time = ts + 2 * diff + dec->qos_duration;
else
dec->earliest_time = ts + diff;
return TRUE;
}
-static void
+static gboolean
+gst_jpeg_dec_buffer_pool (GstJpegDec * dec, GstCaps * caps)
+{
+ GstQuery *query;
+ GstBufferPool *pool = NULL;
+ guint size, min, max, prefix, alignment;
+ GstStructure *config;
+
+ GST_DEBUG_OBJECT (dec, "setting up bufferpool");
+
+ /* find a pool for the negotiated caps now */
+ query = gst_query_new_allocation (caps, TRUE);
+
+ if (gst_pad_peer_query (dec->srcpad, query)) {
+ /* we got configuration from our peer, parse them */
+ gst_query_parse_allocation_params (query, &size, &min, &max, &prefix,
+ &alignment, &pool);
+ size = MAX (size, dec->info.size);
+ } else {
+ GST_DEBUG_OBJECT (dec, "peer query failed, using defaults");
+ size = dec->info.size;
+ min = max = 0;
+ prefix = 0;
+ alignment = 15;
+ }
+ gst_query_unref (query);
+
+ if (pool == NULL) {
+ /* we did not get a pool, make one ourselves then */
+ pool = gst_buffer_pool_new ();
+ }
+
+ config = gst_buffer_pool_get_config (pool);
+ gst_buffer_pool_config_set (config, caps, size, min, max, prefix,
+ alignment | 15);
+ /* and store */
+ gst_buffer_pool_set_config (pool, config);
+
+ if (dec->pool) {
+ gst_buffer_pool_set_active (dec->pool, FALSE);
+ gst_object_unref (dec->pool);
+ }
+ dec->pool = pool;
+
+ /* and activate */
+ gst_buffer_pool_set_active (pool, TRUE);
+
+ return TRUE;
+}
+
+static gboolean
gst_jpeg_dec_negotiate (GstJpegDec * dec, gint width, gint height, gint clrspc)
{
GstCaps *caps;
GstVideoFormat format;
+ GstVideoInfo info;
- if (G_UNLIKELY (width == dec->caps_width && height == dec->caps_height &&
- dec->framerate_numerator == dec->caps_framerate_numerator &&
- dec->framerate_denominator == dec->caps_framerate_denominator &&
- clrspc == dec->clrspc))
- return;
+ if (G_UNLIKELY (width == dec->info.width && height == dec->info.height &&
+ dec->in_fps_n == dec->info.fps_n && dec->in_fps_d == dec->info.fps_d
+ && clrspc == dec->clrspc))
+ return TRUE;
+
+ gst_video_info_init (&info);
/* framerate == 0/1 is a still frame */
- if (dec->framerate_denominator == 0) {
- dec->framerate_numerator = 0;
- dec->framerate_denominator = 1;
+ if (dec->in_fps_d == 0) {
+ info.fps_n = 0;
+ info.fps_d = 1;
+ } else {
+ info.fps_n = dec->in_fps_n;
+ info.fps_d = dec->in_fps_d;
}
/* calculate or assume an average frame duration for QoS purposes */
GST_OBJECT_LOCK (dec);
- if (dec->framerate_numerator != 0) {
- dec->qos_duration = gst_util_uint64_scale (GST_SECOND,
- dec->framerate_denominator, dec->framerate_numerator);
+ if (info.fps_n != 0) {
+ dec->qos_duration =
+ gst_util_uint64_scale (GST_SECOND, info.fps_d, info.fps_n);
+ dec->duration = dec->qos_duration;
} else {
/* if not set just use 25fps */
dec->qos_duration = gst_util_uint64_scale (GST_SECOND, 1, 25);
+ dec->duration = GST_CLOCK_TIME_NONE;
}
GST_OBJECT_UNLOCK (dec);
if (dec->cinfo.jpeg_color_space == JCS_RGB) {
gint i;
GstCaps *allowed_caps;
+ GstVideoInfo tmpinfo;
GST_DEBUG_OBJECT (dec, "selecting RGB format");
/* retrieve allowed caps, and find the first one that reasonably maps
* and get_pad_template_caps doesn't */
caps = gst_caps_copy (gst_pad_get_pad_template_caps (dec->srcpad));
}
- /* avoid lists of fourcc, etc */
+ /* avoid lists of formats, etc */
allowed_caps = gst_caps_normalize (caps);
gst_caps_unref (caps);
caps = NULL;
gst_caps_unref (caps);
caps = gst_caps_copy_nth (allowed_caps, i);
/* sigh, ds and _parse_caps need fixed caps for parsing, fixate */
- gst_pad_fixate_caps (dec->srcpad, caps);
+ gst_caps_fixate (caps);
GST_LOG_OBJECT (dec, "checking caps %" GST_PTR_FORMAT, caps);
- if (!gst_video_format_parse_caps (caps, &format, NULL, NULL))
+
+ if (!gst_video_info_from_caps (&tmpinfo, caps))
continue;
/* we'll settle for the first (preferred) downstream rgb format */
- if (gst_video_format_is_rgb (format))
+ if (GST_VIDEO_INFO_IS_RGB (&tmpinfo))
break;
/* default fall-back */
format = GST_VIDEO_FORMAT_RGB;
if (caps)
gst_caps_unref (caps);
gst_caps_unref (allowed_caps);
- caps = gst_video_format_new_caps (format, width, height,
- dec->framerate_numerator, dec->framerate_denominator, 1, 1);
- dec->outsize = gst_video_format_get_size (format, width, height);
- /* some format info */
- dec->offset[0] =
- gst_video_format_get_component_offset (format, 0, width, height);
- dec->offset[1] =
- gst_video_format_get_component_offset (format, 1, width, height);
- dec->offset[2] =
- gst_video_format_get_component_offset (format, 2, width, height);
- /* equal for all components */
- dec->stride = gst_video_format_get_row_stride (format, 0, width);
- dec->inc = gst_video_format_get_pixel_stride (format, 0);
} else if (dec->cinfo.jpeg_color_space == JCS_GRAYSCALE) {
/* TODO is anything else then 8bit supported in jpeg? */
format = GST_VIDEO_FORMAT_GRAY8;
- caps = gst_video_format_new_caps (format, width, height,
- dec->framerate_numerator, dec->framerate_denominator, 1, 1);
- dec->outsize = gst_video_format_get_size (format, width, height);
- dec->offset[0] =
- gst_video_format_get_component_offset (format, 0, width, height);
- dec->stride = gst_video_format_get_row_stride (format, 0, width);
- dec->inc = gst_video_format_get_pixel_stride (format, 0);
} else {
/* go for plain and simple I420 */
/* TODO other YUV cases ? */
- caps = gst_caps_new_simple ("video/x-raw-yuv",
- "format", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('I', '4', '2', '0'),
- "width", G_TYPE_INT, width, "height", G_TYPE_INT, height,
- "framerate", GST_TYPE_FRACTION, dec->framerate_numerator,
- dec->framerate_denominator, NULL);
- dec->outsize = I420_SIZE (width, height);
+ format = GST_VIDEO_FORMAT_I420;
}
+ gst_video_info_set_format (&info, format, width, height);
+ caps = gst_video_info_to_caps (&info);
+
GST_DEBUG_OBJECT (dec, "setting caps %" GST_PTR_FORMAT, caps);
GST_DEBUG_OBJECT (dec, "max_v_samp_factor=%d", dec->cinfo.max_v_samp_factor);
GST_DEBUG_OBJECT (dec, "max_h_samp_factor=%d", dec->cinfo.max_h_samp_factor);
gst_pad_set_caps (dec->srcpad, caps);
+
+ dec->info = info;
+ dec->clrspc = clrspc;
+
+ gst_jpeg_dec_buffer_pool (dec, caps);
gst_caps_unref (caps);
- dec->caps_width = width;
- dec->caps_height = height;
- dec->caps_framerate_numerator = dec->framerate_numerator;
- dec->caps_framerate_denominator = dec->framerate_denominator;
+ return TRUE;
}
static GstFlowReturn
-gst_jpeg_dec_chain (GstPad * pad, GstBuffer * buf)
+gst_jpeg_dec_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
{
GstFlowReturn ret = GST_FLOW_OK;
GstJpegDec *dec;
GstBuffer *outbuf = NULL;
-#ifndef GST_DISABLE_GST_DEBUG
- guchar *data;
-#endif
- guchar *outdata;
- guchar *base[3], *last[3];
gint img_len;
- guint outsize;
gint width, height;
gint r_h, r_v;
guint code, hdr_ok;
GstClockTime timestamp, duration;
+ GstVideoFrame frame;
- dec = GST_JPEG_DEC (GST_PAD_PARENT (pad));
+ dec = GST_JPEG_DEC (parent);
timestamp = GST_BUFFER_TIMESTAMP (buf);
duration = GST_BUFFER_DURATION (buf);
(dec->segment.stop != -1) &&
(gst_adapter_available (dec->adapter) < dec->segment.stop)) {
/* We assume that non-packetized input in bytes is *one* single jpeg image */
- GST_DEBUG ("Non-packetized mode. Got %d bytes, need %" G_GINT64_FORMAT,
- gst_adapter_available (dec->adapter), dec->segment.stop);
+ GST_DEBUG ("Non-packetized mode. Got %" G_GSIZE_FORMAT " bytes, "
+ "need %" G_GINT64_FORMAT, gst_adapter_available (dec->adapter),
+ dec->segment.stop);
goto need_more_data;
}
goto skip_decoding;
#ifndef GST_DISABLE_GST_DEBUG
- data = (guint8 *) gst_adapter_peek (dec->adapter, 4);
- GST_LOG_OBJECT (dec, "reading header %02x %02x %02x %02x", data[0], data[1],
- data[2], data[3]);
+ {
+ guchar data[4];
+
+ gst_adapter_copy (dec->adapter, data, 0, 4);
+ GST_LOG_OBJECT (dec, "reading header %02x %02x %02x %02x", data[0], data[1],
+ data[2], data[3]);
+ }
#endif
gst_jpeg_dec_fill_input_buffer (&dec->cinfo);
gst_jpeg_dec_negotiate (dec, width, height, dec->cinfo.jpeg_color_space);
- ret = gst_pad_alloc_buffer_and_set_caps (dec->srcpad, GST_BUFFER_OFFSET_NONE,
- dec->outsize, GST_PAD_CAPS (dec->srcpad), &outbuf);
+ ret = gst_buffer_pool_acquire_buffer (dec->pool, &outbuf, NULL);
if (G_UNLIKELY (ret != GST_FLOW_OK))
goto alloc_failed;
- outdata = GST_BUFFER_DATA (outbuf);
- outsize = GST_BUFFER_SIZE (outbuf);
+ if (!gst_video_frame_map (&frame, &dec->info, outbuf, GST_MAP_READWRITE))
+ goto invalid_frame;
- GST_LOG_OBJECT (dec, "width %d, height %d, buffer size %d, required size %d",
- width, height, outsize, dec->outsize);
+ GST_LOG_OBJECT (dec, "width %d, height %d", width, height);
GST_BUFFER_TIMESTAMP (outbuf) = dec->next_ts;
if (GST_CLOCK_TIME_IS_VALID (duration)) {
/* use duration from incoming buffer for outgoing buffer */
dec->next_ts += duration;
- } else if (dec->framerate_numerator != 0) {
- duration = gst_util_uint64_scale (GST_SECOND,
- dec->framerate_denominator, dec->framerate_numerator);
- dec->next_ts += duration;
+ } else if (GST_CLOCK_TIME_IS_VALID (dec->duration)) {
+ duration = dec->duration;
+ dec->next_ts += dec->duration;
} else {
duration = GST_CLOCK_TIME_NONE;
dec->next_ts = GST_CLOCK_TIME_NONE;
GST_BUFFER_DURATION (outbuf) = duration;
if (dec->cinfo.jpeg_color_space == JCS_RGB) {
- base[0] = outdata + dec->offset[0];
- base[1] = outdata + dec->offset[1];
- base[2] = outdata + dec->offset[2];
- gst_jpeg_dec_decode_rgb (dec, base, width, height, dec->inc, dec->stride);
+ gst_jpeg_dec_decode_rgb (dec, &frame);
} else if (dec->cinfo.jpeg_color_space == JCS_GRAYSCALE) {
- base[0] = outdata + dec->offset[0];
- gst_jpeg_dec_decode_grayscale (dec, base, width, height, dec->inc,
- dec->stride);
+ gst_jpeg_dec_decode_grayscale (dec, &frame);
} else {
- /* mind the swap, jpeglib outputs blue chroma first
- * ensonic: I see no swap?
- */
- base[0] = outdata + I420_Y_OFFSET (width, height);
- base[1] = outdata + I420_U_OFFSET (width, height);
- base[2] = outdata + I420_V_OFFSET (width, height);
-
- /* make sure we don't make jpeglib write beyond our buffer,
- * which might happen if (height % (r_v*DCTSIZE)) != 0 */
- last[0] = base[0] + (I420_Y_ROWSTRIDE (width) * (height - 1));
- last[1] =
- base[1] + (I420_U_ROWSTRIDE (width) * ((GST_ROUND_UP_2 (height) / 2) -
- 1));
- last[2] =
- base[2] + (I420_V_ROWSTRIDE (width) * ((GST_ROUND_UP_2 (height) / 2) -
- 1));
-
GST_LOG_OBJECT (dec, "decompressing (reqired scanline buffer height = %u)",
dec->cinfo.rec_outbuf_height);
|| dec->cinfo.comp_info[2].h_samp_factor != 1)) {
GST_CAT_LOG_OBJECT (GST_CAT_PERFORMANCE, dec,
"indirect decoding using extra buffer copy");
- gst_jpeg_dec_decode_indirect (dec, base, last, width, height, r_v, r_h,
+ gst_jpeg_dec_decode_indirect (dec, &frame, r_v, r_h,
dec->cinfo.num_components);
} else {
- ret = gst_jpeg_dec_decode_direct (dec, base, last, width, height);
-
+ ret = gst_jpeg_dec_decode_direct (dec, &frame);
if (G_UNLIKELY (ret != GST_FLOW_OK))
goto decode_direct_failed;
}
GST_LOG_OBJECT (dec, "decompressing finished");
jpeg_finish_decompress (&dec->cinfo);
+ gst_video_frame_unmap (&frame);
+
/* Clipping */
if (dec->segment.format == GST_FORMAT_TIME) {
- gint64 start, stop, clip_start, clip_stop;
+ guint64 start, stop, clip_start, clip_stop;
GST_LOG_OBJECT (dec, "Attempting clipping");
GST_DEBUG_OBJECT (dec, "failed to alloc buffer, reason %s", reason);
/* Reset for next time */
jpeg_abort_decompress (&dec->cinfo);
- if (ret != GST_FLOW_UNEXPECTED && ret != GST_FLOW_WRONG_STATE &&
+ if (ret != GST_FLOW_EOS && ret != GST_FLOW_FLUSHING &&
ret != GST_FLOW_NOT_LINKED) {
gst_jpeg_dec_set_error (dec, GST_FUNCTION, __LINE__,
"Buffer allocation failed, reason: %s", reason);
}
goto exit;
}
+invalid_frame:
+ {
+ jpeg_abort_decompress (&dec->cinfo);
+ gst_buffer_unref (outbuf);
+ ret = GST_FLOW_OK;
+ goto exit;
+ }
drop_buffer:
{
GST_WARNING_OBJECT (dec, "Outgoing buffer is outside configured segment");
}
static gboolean
-gst_jpeg_dec_src_event (GstPad * pad, GstEvent * event)
+gst_jpeg_dec_src_event (GstPad * pad, GstObject * parent, GstEvent * event)
{
GstJpegDec *dec;
gboolean res;
- dec = GST_JPEG_DEC (gst_pad_get_parent (pad));
- if (G_UNLIKELY (dec == NULL)) {
- gst_event_unref (event);
- return FALSE;
- }
+ dec = GST_JPEG_DEC (parent);
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_QOS:{
+ GstQOSType type;
GstClockTimeDiff diff;
GstClockTime timestamp;
gdouble proportion;
- gst_event_parse_qos (event, &proportion, &diff, ×tamp);
+ gst_event_parse_qos (event, &type, &proportion, &diff, ×tamp);
gst_jpeg_dec_update_qos (dec, proportion, diff, timestamp);
break;
}
res = gst_pad_push_event (dec->sinkpad, event);
- gst_object_unref (dec);
return res;
}
static gboolean
-gst_jpeg_dec_sink_event (GstPad * pad, GstEvent * event)
+gst_jpeg_dec_sink_event (GstPad * pad, GstObject * parent, GstEvent * event)
{
- gboolean ret = TRUE;
- GstJpegDec *dec = GST_JPEG_DEC (GST_OBJECT_PARENT (pad));
+ gboolean ret = TRUE, forward = TRUE;
+ GstJpegDec *dec = GST_JPEG_DEC (parent);
GST_DEBUG_OBJECT (dec, "event : %s", GST_EVENT_TYPE_NAME (event));
dec->parse_resync = FALSE;
gst_jpeg_dec_reset_qos (dec);
break;
- case GST_EVENT_NEWSEGMENT:{
- gboolean update;
- gdouble rate, applied_rate;
- GstFormat format;
- gint64 start, stop, position;
-
- gst_event_parse_new_segment_full (event, &update, &rate, &applied_rate,
- &format, &start, &stop, &position);
-
- GST_DEBUG_OBJECT (dec, "Got NEWSEGMENT [%" GST_TIME_FORMAT
- " - %" GST_TIME_FORMAT " / %" GST_TIME_FORMAT "]",
- GST_TIME_ARGS (start), GST_TIME_ARGS (stop),
- GST_TIME_ARGS (position));
-
- gst_segment_set_newsegment_full (&dec->segment, update, rate,
- applied_rate, format, start, stop, position);
+ case GST_EVENT_SEGMENT:
+ gst_event_copy_segment (event, &dec->segment);
+ GST_DEBUG_OBJECT (dec, "Got NEWSEGMENT %" GST_SEGMENT_FORMAT,
+ &dec->segment);
+ break;
+ case GST_EVENT_CAPS:
+ {
+ GstCaps *caps;
+ gst_event_parse_caps (event, &caps);
+ ret = gst_jpeg_dec_setcaps (dec, caps);
+ forward = FALSE;
break;
}
default:
break;
}
- ret = gst_pad_push_event (dec->srcpad, event);
+ if (forward)
+ ret = gst_pad_push_event (dec->srcpad, event);
+ else
+ gst_event_unref (event);
return ret;
}
+static gboolean
+gst_jpeg_dec_sink_query (GstPad * pad, GstObject * parent, GstQuery * query)
+{
+ gboolean res = FALSE;
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_CAPS:
+ {
+ GstCaps *filter, *caps;
+
+ gst_query_parse_caps (query, &filter);
+ caps = gst_jpeg_dec_getcaps (pad, filter);
+ gst_query_set_caps_result (query, caps);
+ gst_caps_unref (caps);
+ res = TRUE;
+ break;
+ }
+ default:
+ res = gst_pad_query_default (pad, parent, query);
+ break;
+ }
+ return res;
+}
+
static void
gst_jpeg_dec_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec)
case GST_STATE_CHANGE_READY_TO_PAUSED:
dec->error_count = 0;
dec->good_count = 0;
- dec->framerate_numerator = 0;
- dec->framerate_denominator = 1;
- dec->caps_framerate_numerator = dec->caps_framerate_denominator = 0;
- dec->caps_width = -1;
- dec->caps_height = -1;
+ dec->in_fps_n = 0;
+ dec->in_fps_d = 1;
+ gst_video_info_init (&dec->info);
dec->clrspc = -1;
dec->packetized = FALSE;
dec->next_ts = 0;
g_free (dec->cur_buf);
dec->cur_buf = NULL;
gst_jpeg_dec_free_buffers (dec);
+ if (dec->pool) {
+ gst_buffer_pool_set_active (dec->pool, FALSE);
+ gst_object_unref (dec->pool);
+ }
+ dec->pool = NULL;
break;
default:
break;
GstClockTime earliest_time;
GstClockTime qos_duration;
- /* video state */
- gint framerate_numerator;
- gint framerate_denominator;
-
- /* negotiated state */
- gint caps_framerate_numerator;
- gint caps_framerate_denominator;
- gint caps_width;
- gint caps_height;
- gint outsize;
- gint clrspc;
+ /* input state */
+ gint in_fps_n;
+ gint in_fps_d;
+
+ /* negotiated output state */
+ GstBufferPool *pool;
+ GstVideoInfo info;
+ GstClockTime duration;
- gint offset[3];
- gint stride;
- gint inc;
+ gint clrspc;
/* parse state */
gint parse_offset;
/* JpegEnc signals and args */
enum
{
- FRAME_ENCODED,
/* FILL ME */
LAST_SIGNAL
};
};
static void gst_jpegenc_reset (GstJpegEnc * enc);
-static void gst_jpegenc_base_init (gpointer g_class);
-static void gst_jpegenc_class_init (GstJpegEnc * klass);
-static void gst_jpegenc_init (GstJpegEnc * jpegenc);
static void gst_jpegenc_finalize (GObject * object);
-static GstFlowReturn gst_jpegenc_chain (GstPad * pad, GstBuffer * buf);
-static gboolean gst_jpegenc_setcaps (GstPad * pad, GstCaps * caps);
-static GstCaps *gst_jpegenc_getcaps (GstPad * pad);
+static GstFlowReturn gst_jpegenc_chain (GstPad * pad, GstObject * parent,
+ GstBuffer * buf);
+static gboolean gst_jpegenc_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+static GstCaps *gst_jpegenc_getcaps (GstPad * pad, GstCaps * filter);
+static gboolean gst_jpegenc_sink_query (GstPad * pad, GstObject * parent,
+ GstQuery * query);
static void gst_jpegenc_resync (GstJpegEnc * jpegenc);
static void gst_jpegenc_set_property (GObject * object, guint prop_id,
static GstStateChangeReturn gst_jpegenc_change_state (GstElement * element,
GstStateChange transition);
-
-static GstElementClass *parent_class = NULL;
-static guint gst_jpegenc_signals[LAST_SIGNAL] = { 0 };
-
-GType
-gst_jpegenc_get_type (void)
-{
- static GType jpegenc_type = 0;
-
- if (!jpegenc_type) {
- static const GTypeInfo jpegenc_info = {
- sizeof (GstJpegEnc),
- (GBaseInitFunc) gst_jpegenc_base_init,
- NULL,
- (GClassInitFunc) gst_jpegenc_class_init,
- NULL,
- NULL,
- sizeof (GstJpegEnc),
- 0,
- (GInstanceInitFunc) gst_jpegenc_init,
- };
-
- jpegenc_type =
- g_type_register_static (GST_TYPE_ELEMENT, "GstJpegEnc", &jpegenc_info,
- 0);
- }
- return jpegenc_type;
-}
+#define gst_jpegenc_parent_class parent_class
+G_DEFINE_TYPE (GstJpegEnc, gst_jpegenc, GST_TYPE_ELEMENT);
/* *INDENT-OFF* */
static GstStaticPadTemplate gst_jpegenc_sink_pad_template =
GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV
- ("{ I420, YV12, YUY2, UYVY, Y41B, Y42B, YVYU, Y444 }") "; "
- GST_VIDEO_CAPS_RGB "; " GST_VIDEO_CAPS_BGR "; "
- GST_VIDEO_CAPS_RGBx "; " GST_VIDEO_CAPS_xRGB "; "
- GST_VIDEO_CAPS_BGRx "; " GST_VIDEO_CAPS_xBGR "; "
- GST_VIDEO_CAPS_GRAY8)
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE
+ ("{ I420, YV12, YUY2, UYVY, Y41B, Y42B, YVYU, Y444, "
+ "RGB, BGR, RGBx, xRGB, BGRx, xBGR, GRAY8 }"))
);
/* *INDENT-ON* */
"height = (int) [ 16, 65535 ], " "framerate = (fraction) [ 0/1, MAX ]")
);
-static void
-gst_jpegenc_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_static_pad_template (element_class,
- &gst_jpegenc_sink_pad_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_jpegenc_src_pad_template);
- gst_element_class_set_details_simple (element_class, "JPEG image encoder",
- "Codec/Encoder/Image",
- "Encode images in JPEG format", "Wim Taymans <wim.taymans@tvd.be>");
-}
static void
-gst_jpegenc_class_init (GstJpegEnc * klass)
+gst_jpegenc_class_init (GstJpegEncClass * klass)
{
GObjectClass *gobject_class;
GstElementClass *gstelement_class;
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
- parent_class = g_type_class_peek_parent (klass);
-
- gst_jpegenc_signals[FRAME_ENCODED] =
- g_signal_new ("frame-encoded", G_TYPE_FROM_CLASS (klass),
- G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstJpegEncClass, frame_encoded), NULL,
- NULL, g_cclosure_marshal_VOID__VOID, G_TYPE_NONE, 0);
-
+ gobject_class->finalize = gst_jpegenc_finalize;
gobject_class->set_property = gst_jpegenc_set_property;
gobject_class->get_property = gst_jpegenc_get_property;
-
g_object_class_install_property (gobject_class, PROP_QUALITY,
g_param_spec_int ("quality", "Quality", "Quality of encoding",
0, 100, JPEG_DEFAULT_QUALITY,
gstelement_class->change_state = gst_jpegenc_change_state;
- gobject_class->finalize = gst_jpegenc_finalize;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_jpegenc_sink_pad_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_jpegenc_src_pad_template));
+ gst_element_class_set_details_simple (gstelement_class, "JPEG image encoder",
+ "Codec/Encoder/Image",
+ "Encode images in JPEG format", "Wim Taymans <wim.taymans@tvd.be>");
GST_DEBUG_CATEGORY_INIT (jpegenc_debug, "jpegenc", 0,
"JPEG encoding element");
}
static void
+ensure_memory (GstJpegEnc * jpegenc)
+{
+ GstMemory *new_memory;
+ GstMapInfo map;
+ gsize old_size, desired_size, new_size;
+ guint8 *new_data;
+
+ old_size = jpegenc->output_map.size;
+ if (old_size == 0)
+ desired_size = jpegenc->bufsize;
+ else
+ desired_size = old_size * 2;
+
+ /* Our output memory wasn't big enough.
+ * Make a new memory that's twice the size, */
+ new_memory = gst_allocator_alloc (NULL, desired_size, 3);
+ gst_memory_map (new_memory, &map, GST_MAP_READWRITE);
+ new_data = map.data;
+ new_size = map.size;
+
+ /* copy previous data if any */
+ if (jpegenc->output_mem) {
+ memcpy (new_data, jpegenc->output_map.data, old_size);
+ gst_memory_unmap (jpegenc->output_mem, &jpegenc->output_map);
+ gst_memory_unref (jpegenc->output_mem);
+ }
+
+ /* drop it into place, */
+ jpegenc->output_mem = new_memory;
+ jpegenc->output_map = map;
+
+ /* and last, update libjpeg on where to work. */
+ jpegenc->jdest.next_output_byte = new_data + old_size;
+ jpegenc->jdest.free_in_buffer = new_size - old_size;
+}
+
+static void
gst_jpegenc_init_destination (j_compress_ptr cinfo)
{
GST_DEBUG ("gst_jpegenc_chain: init_destination");
static boolean
gst_jpegenc_flush_destination (j_compress_ptr cinfo)
{
- GstBuffer *overflow_buffer;
- guint32 old_buffer_size;
GstJpegEnc *jpegenc = (GstJpegEnc *) (cinfo->client_data);
- GstFlowReturn ret;
GST_DEBUG_OBJECT (jpegenc,
"gst_jpegenc_chain: flush_destination: buffer too small");
- /* Our output buffer wasn't big enough.
- * Make a new buffer that's twice the size, */
- old_buffer_size = GST_BUFFER_SIZE (jpegenc->output_buffer);
- ret = gst_pad_alloc_buffer_and_set_caps (jpegenc->srcpad,
- GST_BUFFER_OFFSET_NONE, old_buffer_size * 2,
- GST_PAD_CAPS (jpegenc->srcpad), &overflow_buffer);
- /* handle here if needed */
- if (ret != GST_FLOW_OK) {
- overflow_buffer = gst_buffer_new_and_alloc (old_buffer_size * 2);
- gst_buffer_set_caps (overflow_buffer, GST_PAD_CAPS (jpegenc->srcpad));
- }
-
- memcpy (GST_BUFFER_DATA (overflow_buffer),
- GST_BUFFER_DATA (jpegenc->output_buffer), old_buffer_size);
-
- gst_buffer_copy_metadata (overflow_buffer, jpegenc->output_buffer,
- GST_BUFFER_COPY_TIMESTAMPS);
-
- /* drop it into place, */
- gst_buffer_unref (jpegenc->output_buffer);
- jpegenc->output_buffer = overflow_buffer;
-
- /* and last, update libjpeg on where to work. */
- jpegenc->jdest.next_output_byte =
- GST_BUFFER_DATA (jpegenc->output_buffer) + old_buffer_size;
- jpegenc->jdest.free_in_buffer =
- GST_BUFFER_SIZE (jpegenc->output_buffer) - old_buffer_size;
+ ensure_memory (jpegenc);
return TRUE;
}
GstJpegEnc *jpegenc = (GstJpegEnc *) (cinfo->client_data);
GST_DEBUG_OBJECT (jpegenc, "gst_jpegenc_chain: term_source");
- /* Trim the buffer size and push it. */
- GST_BUFFER_SIZE (jpegenc->output_buffer) =
- GST_BUFFER_SIZE (jpegenc->output_buffer) - jpegenc->jdest.free_in_buffer;
-
- g_signal_emit (G_OBJECT (jpegenc), gst_jpegenc_signals[FRAME_ENCODED], 0);
-
- jpegenc->last_ret = gst_pad_push (jpegenc->srcpad, jpegenc->output_buffer);
- jpegenc->output_buffer = NULL;
+ gst_memory_unmap (jpegenc->output_mem, &jpegenc->output_map);
+ /* Trim the buffer size. we will push it in the chain function */
+ gst_memory_resize (jpegenc->output_mem, 0,
+ jpegenc->output_map.size - jpegenc->jdest.free_in_buffer);
+ jpegenc->output_map.data = NULL;
+ jpegenc->output_map.size = 0;
}
static void
gst_pad_new_from_static_template (&gst_jpegenc_sink_pad_template, "sink");
gst_pad_set_chain_function (jpegenc->sinkpad,
GST_DEBUG_FUNCPTR (gst_jpegenc_chain));
- gst_pad_set_getcaps_function (jpegenc->sinkpad,
- GST_DEBUG_FUNCPTR (gst_jpegenc_getcaps));
- gst_pad_set_setcaps_function (jpegenc->sinkpad,
- GST_DEBUG_FUNCPTR (gst_jpegenc_setcaps));
+ gst_pad_set_query_function (jpegenc->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_jpegenc_sink_query));
+ gst_pad_set_event_function (jpegenc->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_jpegenc_sink_event));
gst_element_add_pad (GST_ELEMENT (jpegenc), jpegenc->sinkpad);
jpegenc->srcpad =
gst_element_add_pad (GST_ELEMENT (jpegenc), jpegenc->srcpad);
/* reset the initial video state */
- jpegenc->width = -1;
- jpegenc->height = -1;
+ gst_video_info_init (&jpegenc->info);
/* setup jpeglib */
memset (&jpegenc->cinfo, 0, sizeof (jpegenc->cinfo));
}
}
- enc->width = -1;
- enc->height = -1;
- enc->format = GST_VIDEO_FORMAT_UNKNOWN;
- enc->fps_den = enc->par_den = 0;
- enc->height = enc->width = 0;
+ gst_video_info_init (&enc->info);
}
static void
}
static GstCaps *
-gst_jpegenc_getcaps (GstPad * pad)
+gst_jpegenc_getcaps (GstPad * pad, GstCaps * filter)
{
GstJpegEnc *jpegenc = GST_JPEGENC (gst_pad_get_parent (pad));
GstCaps *caps, *othercaps;
/* we want to proxy properties like width, height and framerate from the
other end of the element */
- othercaps = gst_pad_peer_get_caps_reffed (jpegenc->srcpad);
+ othercaps = gst_pad_peer_query_caps (jpegenc->srcpad, filter);
if (othercaps == NULL ||
gst_caps_is_empty (othercaps) || gst_caps_is_any (othercaps)) {
caps = gst_caps_copy (gst_pad_get_pad_template_caps (pad));
}
static gboolean
-gst_jpegenc_setcaps (GstPad * pad, GstCaps * caps)
+gst_jpegenc_sink_query (GstPad * pad, GstObject * parent, GstQuery * query)
{
- GstJpegEnc *enc = GST_JPEGENC (gst_pad_get_parent (pad));
- GstVideoFormat format;
- gint width, height;
- gint fps_num, fps_den;
- gint par_num, par_den;
+ gboolean res;
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_CAPS:
+ {
+ GstCaps *filter, *caps;
+
+ gst_query_parse_caps (query, &filter);
+ caps = gst_jpegenc_getcaps (pad, filter);
+ gst_query_set_caps_result (query, caps);
+ gst_caps_unref (caps);
+ res = TRUE;
+ break;
+ }
+ default:
+ res = gst_pad_query_default (pad, parent, query);
+ break;
+ }
+ return res;
+}
+
+static gboolean
+gst_jpegenc_setcaps (GstJpegEnc * enc, GstCaps * caps)
+{
+ GstVideoInfo info;
gint i;
GstCaps *othercaps;
gboolean ret;
+ const GstVideoFormatInfo *vinfo;
/* get info from caps */
- if (!gst_video_format_parse_caps (caps, &format, &width, &height))
+ if (!gst_video_info_from_caps (&info, caps))
goto refuse_caps;
- /* optional; pass along if present */
- fps_num = fps_den = -1;
- par_num = par_den = -1;
- gst_video_parse_caps_framerate (caps, &fps_num, &fps_den);
- gst_video_parse_caps_pixel_aspect_ratio (caps, &par_num, &par_den);
-
- if (width == enc->width && height == enc->height && enc->format == format
- && fps_num == enc->fps_num && fps_den == enc->fps_den
- && par_num == enc->par_num && par_den == enc->par_den)
- return TRUE;
/* store input description */
- enc->format = format;
- enc->width = width;
- enc->height = height;
- enc->fps_num = fps_num;
- enc->fps_den = fps_den;
- enc->par_num = par_num;
- enc->par_den = par_den;
+ enc->info = info;
+
+ vinfo = info.finfo;
/* prepare a cached image description */
- enc->channels = 3 + (gst_video_format_has_alpha (format) ? 1 : 0);
+ enc->channels = 3 + (GST_VIDEO_FORMAT_INFO_HAS_ALPHA (vinfo) ? 1 : 0);
/* ... but any alpha is disregarded in encoding */
- if (gst_video_format_is_gray (format))
+ if (GST_VIDEO_FORMAT_INFO_IS_GRAY (vinfo))
enc->channels = 1;
else
enc->channels = 3;
+
enc->h_max_samp = 0;
enc->v_max_samp = 0;
for (i = 0; i < enc->channels; ++i) {
- enc->cwidth[i] = gst_video_format_get_component_width (format, i, width);
- enc->cheight[i] = gst_video_format_get_component_height (format, i, height);
- enc->offset[i] = gst_video_format_get_component_offset (format, i, width,
- height);
- enc->stride[i] = gst_video_format_get_row_stride (format, i, width);
- enc->inc[i] = gst_video_format_get_pixel_stride (format, i);
- enc->h_samp[i] = GST_ROUND_UP_4 (width) / enc->cwidth[i];
+ enc->cwidth[i] = GST_VIDEO_INFO_COMP_WIDTH (&info, i);
+ enc->cheight[i] = GST_VIDEO_INFO_COMP_HEIGHT (&info, i);
+ enc->inc[i] = GST_VIDEO_INFO_COMP_PSTRIDE (&info, i);
+
+ enc->h_samp[i] = GST_ROUND_UP_4 (info.width) / enc->cwidth[i];
enc->h_max_samp = MAX (enc->h_max_samp, enc->h_samp[i]);
- enc->v_samp[i] = GST_ROUND_UP_4 (height) / enc->cheight[i];
+ enc->v_samp[i] = GST_ROUND_UP_4 (info.height) / enc->cheight[i];
enc->v_max_samp = MAX (enc->v_max_samp, enc->v_samp[i]);
}
/* samp should only be 1, 2 or 4 */
othercaps = gst_caps_copy (gst_pad_get_pad_template_caps (enc->srcpad));
gst_caps_set_simple (othercaps,
- "width", G_TYPE_INT, enc->width, "height", G_TYPE_INT, enc->height, NULL);
- if (enc->fps_den > 0)
+ "width", G_TYPE_INT, info.width, "height", G_TYPE_INT, info.height, NULL);
+ if (info.fps_d > 0)
gst_caps_set_simple (othercaps,
- "framerate", GST_TYPE_FRACTION, enc->fps_num, enc->fps_den, NULL);
- if (enc->par_den > 0)
+ "framerate", GST_TYPE_FRACTION, info.fps_n, info.fps_d, NULL);
+ if (info.par_d > 0)
gst_caps_set_simple (othercaps,
- "pixel-aspect-ratio", GST_TYPE_FRACTION, enc->par_num, enc->par_den,
- NULL);
+ "pixel-aspect-ratio", GST_TYPE_FRACTION, info.par_n, info.par_d, NULL);
ret = gst_pad_set_caps (enc->srcpad, othercaps);
gst_caps_unref (othercaps);
if (ret)
gst_jpegenc_resync (enc);
- gst_object_unref (enc);
-
return ret;
/* ERRORS */
refuse_caps:
{
GST_WARNING_OBJECT (enc, "refused caps %" GST_PTR_FORMAT, caps);
- gst_object_unref (enc);
return FALSE;
}
}
+static gboolean
+gst_jpegenc_sink_event (GstPad * pad, GstObject * parent, GstEvent * event)
+{
+ gboolean res;
+ GstJpegEnc *enc = GST_JPEGENC (parent);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_CAPS:
+ {
+ GstCaps *caps;
+
+ gst_event_parse_caps (event, &caps);
+ res = gst_jpegenc_setcaps (enc, caps);
+ gst_event_unref (event);
+ break;
+ }
+ default:
+ res = gst_pad_event_default (pad, parent, event);
+ break;
+ }
+
+ return res;
+}
+
static void
gst_jpegenc_resync (GstJpegEnc * jpegenc)
{
gint width, height;
gint i, j;
+ const GstVideoFormatInfo *finfo;
GST_DEBUG_OBJECT (jpegenc, "resync");
- jpegenc->cinfo.image_width = width = jpegenc->width;
- jpegenc->cinfo.image_height = height = jpegenc->height;
+ finfo = jpegenc->info.finfo;
+
+ jpegenc->cinfo.image_width = width = GST_VIDEO_INFO_WIDTH (&jpegenc->info);
+ jpegenc->cinfo.image_height = height = GST_VIDEO_INFO_HEIGHT (&jpegenc->info);
jpegenc->cinfo.input_components = jpegenc->channels;
GST_DEBUG_OBJECT (jpegenc, "width %d, height %d", width, height);
- GST_DEBUG_OBJECT (jpegenc, "format %d", jpegenc->format);
+ GST_DEBUG_OBJECT (jpegenc, "format %d",
+ GST_VIDEO_INFO_FORMAT (&jpegenc->info));
- if (gst_video_format_is_rgb (jpegenc->format)) {
+ if (GST_VIDEO_FORMAT_INFO_IS_RGB (finfo)) {
GST_DEBUG_OBJECT (jpegenc, "RGB");
jpegenc->cinfo.in_color_space = JCS_RGB;
- } else if (gst_video_format_is_gray (jpegenc->format)) {
+ } else if (GST_VIDEO_FORMAT_INFO_IS_GRAY (finfo)) {
GST_DEBUG_OBJECT (jpegenc, "gray");
jpegenc->cinfo.in_color_space = JCS_GRAYSCALE;
} else {
}
/* input buffer size as max output */
- jpegenc->bufsize = gst_video_format_get_size (jpegenc->format, width, height);
+ jpegenc->bufsize = GST_VIDEO_INFO_SIZE (&jpegenc->info);
jpeg_set_defaults (&jpegenc->cinfo);
jpegenc->cinfo.raw_data_in = TRUE;
/* duh, libjpeg maps RGB to YUV ... and don't expect some conversion */
}
static GstFlowReturn
-gst_jpegenc_chain (GstPad * pad, GstBuffer * buf)
+gst_jpegenc_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
{
GstFlowReturn ret;
GstJpegEnc *jpegenc;
- guchar *data;
- gulong size;
guint height;
guchar *base[3], *end[3];
+ guint stride[3];
gint i, j, k;
+ GstBuffer *outbuf;
+ GstVideoFrame frame;
- jpegenc = GST_JPEGENC (GST_OBJECT_PARENT (pad));
+ jpegenc = GST_JPEGENC (parent);
- if (G_UNLIKELY (jpegenc->width <= 0 || jpegenc->height <= 0))
+ if (G_UNLIKELY (GST_VIDEO_INFO_FORMAT (&jpegenc->info) ==
+ GST_VIDEO_FORMAT_UNKNOWN))
goto not_negotiated;
- data = GST_BUFFER_DATA (buf);
- size = GST_BUFFER_SIZE (buf);
-
- GST_LOG_OBJECT (jpegenc, "got buffer of %lu bytes", size);
+ if (!gst_video_frame_map (&frame, &jpegenc->info, buf, GST_MAP_READ))
+ goto invalid_frame;
- ret =
- gst_pad_alloc_buffer_and_set_caps (jpegenc->srcpad,
- GST_BUFFER_OFFSET_NONE, jpegenc->bufsize, GST_PAD_CAPS (jpegenc->srcpad),
- &jpegenc->output_buffer);
-
- if (ret != GST_FLOW_OK)
- goto done;
+ height = GST_VIDEO_FRAME_HEIGHT (&frame);
- gst_buffer_copy_metadata (jpegenc->output_buffer, buf,
- GST_BUFFER_COPY_TIMESTAMPS);
-
- height = jpegenc->height;
+ GST_LOG_OBJECT (jpegenc, "got buffer of %lu bytes",
+ gst_buffer_get_size (buf));
for (i = 0; i < jpegenc->channels; i++) {
- base[i] = data + jpegenc->offset[i];
- end[i] = base[i] + jpegenc->cheight[i] * jpegenc->stride[i];
+ base[i] = GST_VIDEO_FRAME_COMP_DATA (&frame, i);
+ stride[i] = GST_VIDEO_FRAME_COMP_STRIDE (&frame, i);
+ end[i] = base[i] + GST_VIDEO_FRAME_COMP_HEIGHT (&frame, i) * stride[i];
}
- jpegenc->jdest.next_output_byte = GST_BUFFER_DATA (jpegenc->output_buffer);
- jpegenc->jdest.free_in_buffer = GST_BUFFER_SIZE (jpegenc->output_buffer);
+ jpegenc->output_mem = gst_allocator_alloc (NULL, jpegenc->bufsize, 3);
+ gst_memory_map (jpegenc->output_mem, &jpegenc->output_map, GST_MAP_READWRITE);
+
+ jpegenc->jdest.next_output_byte = jpegenc->output_map.data;
+ jpegenc->jdest.free_in_buffer = jpegenc->output_map.size;
/* prepare for raw input */
#if JPEG_LIB_VERSION >= 70
for (k = 0; k < jpegenc->channels; k++) {
for (j = 0; j < jpegenc->v_samp[k] * DCTSIZE; j++) {
jpegenc->line[k][j] = base[k];
- if (base[k] + jpegenc->stride[k] < end[k])
- base[k] += jpegenc->stride[k];
+ if (base[k] + stride[k] < end[k])
+ base[k] += stride[k];
}
}
jpeg_write_raw_data (&jpegenc->cinfo, jpegenc->line,
src += jpegenc->inc[k];
dst++;
}
- if (base[k] + jpegenc->stride[k] < end[k])
- base[k] += jpegenc->stride[k];
+ if (base[k] + stride[k] < end[k])
+ base[k] += stride[k];
}
}
jpeg_write_raw_data (&jpegenc->cinfo, jpegenc->line,
}
}
- /* This will ensure that gst_jpegenc_term_destination is called; we push
- the final output buffer from there */
+ /* This will ensure that gst_jpegenc_term_destination is called */
jpeg_finish_compress (&jpegenc->cinfo);
GST_LOG_OBJECT (jpegenc, "compressing done");
-done:
+ outbuf = gst_buffer_new ();
+ gst_buffer_copy_into (outbuf, buf, GST_BUFFER_COPY_METADATA, 0, -1);
+ gst_buffer_take_memory (outbuf, -1, jpegenc->output_mem);
+ jpegenc->output_mem = NULL;
+
+ ret = gst_pad_push (jpegenc->srcpad, outbuf);
+
+ gst_video_frame_unmap (&frame);
gst_buffer_unref (buf);
return ret;
-/* ERRORS */
+ /* ERRORS */
not_negotiated:
{
GST_WARNING_OBJECT (jpegenc, "no input format set (no caps on buffer)");
- ret = GST_FLOW_NOT_NEGOTIATED;
- goto done;
+ gst_buffer_unref (buf);
+ return GST_FLOW_NOT_NEGOTIATED;
+ }
+invalid_frame:
+ {
+ GST_WARNING_OBJECT (jpegenc, "invalid frame received");
+ gst_buffer_unref (buf);
+ return GST_FLOW_OK;
}
}
typedef struct _GstJpegEnc GstJpegEnc;
typedef struct _GstJpegEncClass GstJpegEncClass;
-#define GST_JPEG_ENC_MAX_COMPONENT 4
-
struct _GstJpegEnc
{
GstElement element;
GstPad *sinkpad, *srcpad;
/* stream/image properties */
- GstVideoFormat format;
- gint width;
- gint height;
+ GstVideoInfo info;
gint channels;
- gint fps_num, fps_den;
- gint par_num, par_den;
+
/* standard video_format indexed */
- gint stride[GST_JPEG_ENC_MAX_COMPONENT];
- gint offset[GST_JPEG_ENC_MAX_COMPONENT];
- gint inc[GST_JPEG_ENC_MAX_COMPONENT];
- gint cwidth[GST_JPEG_ENC_MAX_COMPONENT];
- gint cheight[GST_JPEG_ENC_MAX_COMPONENT];
- gint h_samp[GST_JPEG_ENC_MAX_COMPONENT];
- gint v_samp[GST_JPEG_ENC_MAX_COMPONENT];
+ gint inc[GST_VIDEO_MAX_COMPONENTS];
+ gint cwidth[GST_VIDEO_MAX_COMPONENTS];
+ gint cheight[GST_VIDEO_MAX_COMPONENTS];
+ gint h_samp[GST_VIDEO_MAX_COMPONENTS];
+ gint v_samp[GST_VIDEO_MAX_COMPONENTS];
gint h_max_samp;
gint v_max_samp;
gboolean planar;
/* cached return state for any problems that may occur in callbacks */
GstFlowReturn last_ret;
- GstBuffer *output_buffer;
+ GstMemory *output_mem;
+ GstMapInfo output_map;
};
struct _GstJpegEncClass
{
GstElementClass parent_class;
-
- /* signals */
- void (*frame_encoded) (GstElement * element);
};
GType gst_jpegenc_get_type (void);
{
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
- gst_element_class_add_static_pad_template (element_class,
- &gst_smokedec_src_pad_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_smokedec_sink_pad_template);
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_smokedec_src_pad_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_smokedec_sink_pad_template));
gst_element_class_set_details_simple (element_class, "Smoke video decoder",
"Codec/Decoder/Video",
"Decode video from Smoke format", "Wim Taymans <wim@fluendo.com>");
{
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
- gst_element_class_add_static_pad_template (element_class,
- &gst_smokeenc_sink_pad_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_smokeenc_src_pad_template);
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_smokeenc_sink_pad_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_smokeenc_src_pad_template));
gst_element_class_set_details_simple (element_class, "Smoke video encoder",
"Codec/Encoder/Video",
"Encode images into the Smoke format", "Wim Taymans <wim@fluendo.com>");
$(GST_CFLAGS) \
$(LIBCACA_CFLAGS)
libgstcacasink_la_LIBADD = \
+ -lgstvideo-$(GST_MAJORMINOR) \
$(GST_BASE_LIBS) \
$(GST_LIBS) \
$(LIBCACA_LIBS)
#include "gstcacasink.h"
-#define GST_CACA_DEFAULT_SCREEN_WIDTH 80
-#define GST_CACA_DEFAULT_SCREEN_HEIGHT 25
-#define GST_CACA_DEFAULT_BPP 24
-#define GST_CACA_DEFAULT_RED_MASK GST_VIDEO_BYTE1_MASK_32_INT
-#define GST_CACA_DEFAULT_GREEN_MASK GST_VIDEO_BYTE2_MASK_32_INT
-#define GST_CACA_DEFAULT_BLUE_MASK GST_VIDEO_BYTE3_MASK_32_INT
//#define GST_CACA_DEFAULT_RED_MASK R_MASK_32_REVERSE_INT
//#define GST_CACA_DEFAULT_GREEN_MASK G_MASK_32_REVERSE_INT
LAST_SIGNAL
};
+#define GST_CACA_DEFAULT_SCREEN_WIDTH 80
+#define GST_CACA_DEFAULT_SCREEN_HEIGHT 25
+#define GST_CACA_DEFAULT_DITHER CACA_DITHERING_NONE
+#define GST_CACA_DEFAULT_ANTIALIASING TRUE
enum
{
- ARG_0,
- ARG_SCREEN_WIDTH,
- ARG_SCREEN_HEIGHT,
- ARG_DITHER,
- ARG_ANTIALIASING
+ PROP_0,
+ PROP_SCREEN_WIDTH,
+ PROP_SCREEN_HEIGHT,
+ PROP_DITHER,
+ PROP_ANTIALIASING
};
static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS (GST_VIDEO_CAPS_RGB ";" GST_VIDEO_CAPS_RGBx ";"
- GST_VIDEO_CAPS_RGB_16 ";" GST_VIDEO_CAPS_RGB_15)
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ RGB, RGBx, RGB16, RGB15 }"))
);
-static void gst_cacasink_base_init (gpointer g_class);
-static void gst_cacasink_class_init (GstCACASinkClass * klass);
-static void gst_cacasink_init (GstCACASink * cacasink);
-
static gboolean gst_cacasink_setcaps (GstBaseSink * pad, GstCaps * caps);
static void gst_cacasink_get_times (GstBaseSink * sink, GstBuffer * buffer,
GstClockTime * start, GstClockTime * end);
static GstStateChangeReturn gst_cacasink_change_state (GstElement * element,
GstStateChange transition);
-static GstElementClass *parent_class = NULL;
-
-GType
-gst_cacasink_get_type (void)
-{
- static GType cacasink_type = 0;
-
- if (!cacasink_type) {
- static const GTypeInfo cacasink_info = {
- sizeof (GstCACASinkClass),
- gst_cacasink_base_init,
- NULL,
- (GClassInitFunc) gst_cacasink_class_init,
- NULL,
- NULL,
- sizeof (GstCACASink),
- 0,
- (GInstanceInitFunc) gst_cacasink_init,
- };
-
- cacasink_type =
- g_type_register_static (GST_TYPE_BASE_SINK, "GstCACASink",
- &cacasink_info, 0);
- }
- return cacasink_type;
-}
+#define gst_cacasink_parent_class parent_class
+G_DEFINE_TYPE (GstCACASink, gst_cacasink, GST_TYPE_BASE_SINK);
#define GST_TYPE_CACADITHER (gst_cacasink_dither_get_type())
static GType
}
static void
-gst_cacasink_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_set_details_simple (element_class,
- "A colored ASCII art video sink", "Sink/Video",
- "A colored ASCII art videosink", "Zeeshan Ali <zak147@yahoo.com>");
- gst_element_class_add_static_pad_template (element_class,
- &sink_template);
-}
-
-static void
gst_cacasink_class_init (GstCACASinkClass * klass)
{
GObjectClass *gobject_class;
gobject_class->set_property = gst_cacasink_set_property;
gobject_class->get_property = gst_cacasink_get_property;
- gstelement_class->change_state = gst_cacasink_change_state;
- g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_SCREEN_WIDTH,
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_SCREEN_WIDTH,
g_param_spec_int ("screen-width", "Screen Width",
"The width of the screen", 0, G_MAXINT, GST_CACA_DEFAULT_SCREEN_WIDTH,
G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
- g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_SCREEN_HEIGHT,
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_SCREEN_HEIGHT,
g_param_spec_int ("screen-height", "Screen Height",
"The height of the screen", 0, G_MAXINT,
GST_CACA_DEFAULT_SCREEN_HEIGHT,
G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
- g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_DITHER,
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_DITHER,
g_param_spec_enum ("dither", "Dither Type", "Set type of Dither",
- GST_TYPE_CACADITHER, CACA_DITHERING_NONE,
+ GST_TYPE_CACADITHER, GST_CACA_DEFAULT_DITHER,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
- g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_ANTIALIASING,
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_ANTIALIASING,
g_param_spec_boolean ("anti-aliasing", "Anti Aliasing",
- "Enables Anti-Aliasing", TRUE,
+ "Enables Anti-Aliasing", GST_CACA_DEFAULT_ANTIALIASING,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ gstelement_class->change_state = gst_cacasink_change_state;
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "A colored ASCII art video sink", "Sink/Video",
+ "A colored ASCII art videosink", "Zeeshan Ali <zak147@yahoo.com>");
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&sink_template));
+
gstbasesink_class->set_caps = GST_DEBUG_FUNCPTR (gst_cacasink_setcaps);
gstbasesink_class->get_times = GST_DEBUG_FUNCPTR (gst_cacasink_get_times);
gstbasesink_class->preroll = GST_DEBUG_FUNCPTR (gst_cacasink_render);
gst_cacasink_setcaps (GstBaseSink * basesink, GstCaps * caps)
{
GstCACASink *cacasink;
- GstStructure *structure;
- gint endianness;
+ GstVideoInfo info;
+ guint bpp, red_mask, green_mask, blue_mask;
cacasink = GST_CACASINK (basesink);
- structure = gst_caps_get_structure (caps, 0);
- gst_structure_get_int (structure, "width", &(cacasink->width));
- gst_structure_get_int (structure, "height", &(cacasink->height));
- gst_structure_get_int (structure, "endianness", &endianness);
- gst_structure_get_int (structure, "bpp", (int *) &cacasink->bpp);
- gst_structure_get_int (structure, "red_mask", (int *) &cacasink->red_mask);
- gst_structure_get_int (structure, "green_mask",
- (int *) &cacasink->green_mask);
- gst_structure_get_int (structure, "blue_mask", (int *) &cacasink->blue_mask);
-
- if (cacasink->bpp == 24) {
- cacasink->red_mask = GUINT32_FROM_BE (cacasink->red_mask) >> 8;
- cacasink->green_mask = GUINT32_FROM_BE (cacasink->green_mask) >> 8;
- cacasink->blue_mask = GUINT32_FROM_BE (cacasink->blue_mask) >> 8;
- }
+ if (!gst_video_info_from_caps (&info, caps))
+ goto caps_error;
- else if (cacasink->bpp == 32) {
- cacasink->red_mask = GUINT32_FROM_BE (cacasink->red_mask);
- cacasink->green_mask = GUINT32_FROM_BE (cacasink->green_mask);
- cacasink->blue_mask = GUINT32_FROM_BE (cacasink->blue_mask);
- }
- else if (cacasink->bpp == 16) {
- if (endianness == G_BIG_ENDIAN) {
- cacasink->red_mask = GUINT16_FROM_BE (cacasink->red_mask);
- cacasink->green_mask = GUINT16_FROM_BE (cacasink->green_mask);
- cacasink->blue_mask = GUINT16_FROM_BE (cacasink->blue_mask);
- } else {
- cacasink->red_mask = GUINT16_FROM_LE (cacasink->red_mask);
- cacasink->green_mask = GUINT16_FROM_LE (cacasink->green_mask);
- cacasink->blue_mask = GUINT16_FROM_LE (cacasink->blue_mask);
- }
+ switch (GST_VIDEO_INFO_FORMAT (&info)) {
+ case GST_VIDEO_FORMAT_RGB:
+ bpp = 24;
+ red_mask = 0xff0000;
+ green_mask = 0x00ff00;
+ blue_mask = 0x0000ff;
+ break;
+ case GST_VIDEO_FORMAT_RGBx:
+ bpp = 32;
+ red_mask = 0xff000000;
+ green_mask = 0x00ff0000;
+ blue_mask = 0x0000ff00;
+ break;
+ case GST_VIDEO_FORMAT_RGB16:
+ bpp = 16;
+ red_mask = 0xf800;
+ green_mask = 0x07e0;
+ blue_mask = 0x001f;
+ break;
+ case GST_VIDEO_FORMAT_RGB15:
+ bpp = 16;
+ red_mask = 0x7c00;
+ green_mask = 0x03e0;
+ blue_mask = 0x001f;
+ break;
+ default:
+ goto invalid_format;
}
if (cacasink->bitmap) {
caca_free_bitmap (cacasink->bitmap);
}
+ cacasink->bitmap = caca_create_bitmap (bpp,
+ GST_VIDEO_INFO_WIDTH (&info),
+ GST_VIDEO_INFO_HEIGHT (&info),
+ GST_ROUND_UP_4 (GST_VIDEO_INFO_WIDTH (&info) * bpp / 8),
+ red_mask, green_mask, blue_mask, 0);
+ if (!cacasink->bitmap)
+ goto no_bitmap;
- cacasink->bitmap = caca_create_bitmap (cacasink->bpp,
- cacasink->width,
- cacasink->height,
- GST_ROUND_UP_4 (cacasink->width * cacasink->bpp / 8),
- cacasink->red_mask, cacasink->green_mask, cacasink->blue_mask, 0);
+ cacasink->info = info;
- if (!cacasink->bitmap) {
+ return TRUE;
+
+ /* ERROS */
+caps_error:
+ {
+ GST_ERROR_OBJECT (cacasink, "error parsing caps");
+ return FALSE;
+ }
+invalid_format:
+ {
+ GST_ERROR_OBJECT (cacasink, "invalid format");
+ return FALSE;
+ }
+no_bitmap:
+ {
+ GST_ERROR_OBJECT (cacasink, "could not create bitmap");
return FALSE;
}
-
- return TRUE;
}
static void
{
cacasink->screen_width = GST_CACA_DEFAULT_SCREEN_WIDTH;
cacasink->screen_height = GST_CACA_DEFAULT_SCREEN_HEIGHT;
- cacasink->bpp = GST_CACA_DEFAULT_BPP;
- cacasink->red_mask = GST_CACA_DEFAULT_RED_MASK;
- cacasink->green_mask = GST_CACA_DEFAULT_GREEN_MASK;
- cacasink->blue_mask = GST_CACA_DEFAULT_BLUE_MASK;
+
+ cacasink->dither = GST_CACA_DEFAULT_DITHER;
+ cacasink->antialiasing = GST_CACA_DEFAULT_ANTIALIASING;
}
static GstFlowReturn
gst_cacasink_render (GstBaseSink * basesink, GstBuffer * buffer)
{
GstCACASink *cacasink = GST_CACASINK (basesink);
+ GstVideoFrame frame;
GST_DEBUG ("render");
+ if (!gst_video_frame_map (&frame, &cacasink->info, buffer, GST_MAP_READ))
+ goto invalid_frame;
+
caca_clear ();
caca_draw_bitmap (0, 0, cacasink->screen_width - 1,
- cacasink->screen_height - 1, cacasink->bitmap, GST_BUFFER_DATA (buffer));
+ cacasink->screen_height - 1, cacasink->bitmap,
+ GST_VIDEO_FRAME_PLANE_DATA (&frame, 0));
caca_refresh ();
+ gst_video_frame_unmap (&frame);
+
return GST_FLOW_OK;
+
+ /* ERRORS */
+invalid_frame:
+ {
+ GST_ERROR_OBJECT (cacasink, "invalid frame received");
+ return GST_FLOW_ERROR;
+ }
}
static void
cacasink = GST_CACASINK (object);
switch (prop_id) {
- case ARG_DITHER:{
+ case PROP_DITHER:{
cacasink->dither = g_value_get_enum (value);
caca_set_dithering (cacasink->dither + CACA_DITHERING_NONE);
break;
}
- case ARG_ANTIALIASING:{
+ case PROP_ANTIALIASING:{
cacasink->antialiasing = g_value_get_boolean (value);
if (cacasink->antialiasing) {
caca_set_feature (CACA_ANTIALIASING_MAX);
cacasink = GST_CACASINK (object);
switch (prop_id) {
- case ARG_SCREEN_WIDTH:{
+ case PROP_SCREEN_WIDTH:{
g_value_set_int (value, cacasink->screen_width);
break;
}
- case ARG_SCREEN_HEIGHT:{
+ case PROP_SCREEN_HEIGHT:{
g_value_set_int (value, cacasink->screen_height);
break;
}
- case ARG_DITHER:{
+ case PROP_DITHER:{
g_value_set_enum (value, cacasink->dither);
break;
}
- case ARG_ANTIALIASING:{
+ case PROP_ANTIALIASING:{
g_value_set_boolean (value, cacasink->antialiasing);
break;
}
{
cacasink->bitmap = NULL;
- if (caca_init () < 0) {
- GST_ELEMENT_ERROR (cacasink, RESOURCE, OPEN_WRITE, (NULL),
- ("caca_init() failed"));
- return FALSE;
- }
+ if (caca_init () < 0)
+ goto init_failed;
cacasink->screen_width = caca_get_width ();
cacasink->screen_height = caca_get_height ();
caca_set_dithering (CACA_DITHERING_NONE);
return TRUE;
+
+ /* ERRORS */
+init_failed:
+ {
+ GST_ELEMENT_ERROR (cacasink, RESOURCE, OPEN_WRITE, (NULL),
+ ("caca_init() failed"));
+ return FALSE;
+ }
}
static void
struct _GstCACASink {
GstBaseSink parent;
- gint width, height;
+ GstVideoInfo info;
gint screen_width, screen_height;
- guint bpp;
+
guint dither;
gboolean antialiasing;
- guint red_mask, green_mask, blue_mask;
struct caca_bitmap *bitmap;
};
{
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
- gst_element_class_add_static_pad_template (element_class,
- &gst_mng_dec_src_pad_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_mng_dec_sink_pad_template);
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_mng_dec_src_pad_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_mng_dec_sink_pad_template));
gst_element_class_set_details_simple (element_class, "MNG video decoder",
"Codec/Decoder/Video",
"Decode a mng video to raw images", "Wim Taymans <wim@fluendo.com>");
gst_element_class_set_details_simple (element_class, "MNG video encoder",
"Codec/Encoder/Video",
"Encode a video frame to an .mng video", "Wim Taymans <wim@fluendo.com>");
-
- gst_object_unref (mngenc_src_template);
- gst_object_unref (mngenc_sink_template);
}
static void
GST_DEBUG_CATEGORY_STATIC (pngdec_debug);
#define GST_CAT_DEFAULT pngdec_debug
-static void gst_pngdec_base_init (gpointer g_class);
-static void gst_pngdec_class_init (GstPngDecClass * klass);
-static void gst_pngdec_init (GstPngDec * pngdec);
-
static gboolean gst_pngdec_libpng_init (GstPngDec * pngdec);
static gboolean gst_pngdec_libpng_clear (GstPngDec * pngdec);
static GstStateChangeReturn gst_pngdec_change_state (GstElement * element,
GstStateChange transition);
-static gboolean gst_pngdec_sink_activate_push (GstPad * sinkpad,
- gboolean active);
-static gboolean gst_pngdec_sink_activate_pull (GstPad * sinkpad,
- gboolean active);
-static gboolean gst_pngdec_sink_activate (GstPad * sinkpad);
+static gboolean gst_pngdec_sink_activate_mode (GstPad * sinkpad,
+ GstObject * parent, GstPadMode mode, gboolean active);
+static gboolean gst_pngdec_sink_activate (GstPad * sinkpad, GstObject * parent);
static GstFlowReturn gst_pngdec_caps_create_and_set (GstPngDec * pngdec);
static void gst_pngdec_task (GstPad * pad);
-static GstFlowReturn gst_pngdec_chain (GstPad * pad, GstBuffer * buffer);
-static gboolean gst_pngdec_sink_event (GstPad * pad, GstEvent * event);
-static gboolean gst_pngdec_sink_setcaps (GstPad * pad, GstCaps * caps);
-
-static GstElementClass *parent_class = NULL;
+static GstFlowReturn gst_pngdec_chain (GstPad * pad, GstObject * parent,
+ GstBuffer * buffer);
+static gboolean gst_pngdec_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+static gboolean gst_pngdec_sink_setcaps (GstPngDec * pngdec, GstCaps * caps);
-GType
-gst_pngdec_get_type (void)
-{
- static GType pngdec_type = 0;
-
- if (!pngdec_type) {
- static const GTypeInfo pngdec_info = {
- sizeof (GstPngDecClass),
- gst_pngdec_base_init,
- NULL,
- (GClassInitFunc) gst_pngdec_class_init,
- NULL,
- NULL,
- sizeof (GstPngDec),
- 0,
- (GInstanceInitFunc) gst_pngdec_init,
- };
-
- pngdec_type = g_type_register_static (GST_TYPE_ELEMENT, "GstPngDec",
- &pngdec_info, 0);
- }
- return pngdec_type;
-}
+static GstFlowReturn gst_pngdec_negotiate_pool (GstPngDec * dec,
+ GstCaps * caps, GstVideoInfo * info);
static GstStaticPadTemplate gst_pngdec_src_pad_template =
- GST_STATIC_PAD_TEMPLATE ("src",
+GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS (GST_VIDEO_CAPS_RGBA ";" GST_VIDEO_CAPS_RGB ";"
- GST_VIDEO_CAPS_ARGB_64)
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ RGBA, RGB }"))
);
static GstStaticPadTemplate gst_pngdec_sink_pad_template =
GST_STATIC_CAPS ("image/png")
);
-static void
-gst_pngdec_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_static_pad_template (element_class,
- &gst_pngdec_src_pad_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_pngdec_sink_pad_template);
- gst_element_class_set_details_simple (element_class, "PNG image decoder",
- "Codec/Decoder/Image",
- "Decode a png video frame to a raw image",
- "Wim Taymans <wim@fluendo.com>");
-}
+#define gst_pngdec_parent_class parent_class
+G_DEFINE_TYPE (GstPngDec, gst_pngdec, GST_TYPE_ELEMENT);
static void
gst_pngdec_class_init (GstPngDecClass * klass)
gstelement_class = (GstElementClass *) klass;
- parent_class = g_type_class_peek_parent (klass);
+ gstelement_class->change_state = GST_DEBUG_FUNCPTR (gst_pngdec_change_state);
- gstelement_class->change_state = gst_pngdec_change_state;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_pngdec_src_pad_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_pngdec_sink_pad_template));
+ gst_element_class_set_details_simple (gstelement_class, "PNG image decoder",
+ "Codec/Decoder/Image",
+ "Decode a png video frame to a raw image",
+ "Wim Taymans <wim@fluendo.com>");
GST_DEBUG_CATEGORY_INIT (pngdec_debug, "pngdec", 0, "PNG image decoder");
}
{
pngdec->sinkpad =
gst_pad_new_from_static_template (&gst_pngdec_sink_pad_template, "sink");
- gst_pad_set_activate_function (pngdec->sinkpad, gst_pngdec_sink_activate);
- gst_pad_set_activatepush_function (pngdec->sinkpad,
- gst_pngdec_sink_activate_push);
- gst_pad_set_activatepull_function (pngdec->sinkpad,
- gst_pngdec_sink_activate_pull);
- gst_pad_set_chain_function (pngdec->sinkpad, gst_pngdec_chain);
- gst_pad_set_event_function (pngdec->sinkpad, gst_pngdec_sink_event);
- gst_pad_set_setcaps_function (pngdec->sinkpad, gst_pngdec_sink_setcaps);
+ gst_pad_set_activate_function (pngdec->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_pngdec_sink_activate));
+ gst_pad_set_activatemode_function (pngdec->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_pngdec_sink_activate_mode));
+ gst_pad_set_chain_function (pngdec->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_pngdec_chain));
+ gst_pad_set_event_function (pngdec->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_pngdec_sink_event));
gst_element_add_pad (GST_ELEMENT (pngdec), pngdec->sinkpad);
pngdec->srcpad =
pngdec->color_type = -1;
pngdec->width = -1;
pngdec->height = -1;
- pngdec->bpp = -1;
pngdec->fps_n = 0;
pngdec->fps_d = 1;
{
GstPngDec *pngdec = NULL;
GstFlowReturn ret = GST_FLOW_OK;
- size_t buffer_size;
GstBuffer *buffer = NULL;
pngdec = GST_PNGDEC (png_get_io_ptr (png_ptr));
goto beach;
}
- /* Allocate output buffer */
- pngdec->rowbytes = png_get_rowbytes (pngdec->png, pngdec->info);
- if (pngdec->rowbytes > (G_MAXUINT32 - 3)
- || pngdec->height > G_MAXUINT32 / pngdec->rowbytes) {
- ret = GST_FLOW_ERROR;
- goto beach;
+ if (gst_pad_check_reconfigure (pngdec->srcpad)) {
+ GstCaps *caps;
+
+ caps = gst_pad_get_current_caps (pngdec->srcpad);
+ gst_pngdec_negotiate_pool (pngdec, caps, &pngdec->vinfo);
+ gst_caps_unref (caps);
}
- pngdec->rowbytes = GST_ROUND_UP_4 (pngdec->rowbytes);
- buffer_size = pngdec->height * pngdec->rowbytes;
- ret =
- gst_pad_alloc_buffer_and_set_caps (pngdec->srcpad, GST_BUFFER_OFFSET_NONE,
- buffer_size, GST_PAD_CAPS (pngdec->srcpad), &buffer);
+ /* Allocate output buffer */
+ g_assert (pngdec->pool);
+ ret = gst_buffer_pool_acquire_buffer (pngdec->pool, &buffer, NULL);
if (ret != GST_FLOW_OK) {
+ GST_DEBUG_OBJECT (pngdec, "failed to acquire buffer");
+ ret = GST_FLOW_ERROR;
goto beach;
}
/* If buffer_out doesn't exist, it means buffer_alloc failed, which
* will already have set the return code */
if (GST_IS_BUFFER (pngdec->buffer_out)) {
- size_t offset = row_num * pngdec->rowbytes;
+ GstVideoFrame frame;
+ GstBuffer *buffer = pngdec->buffer_out;
+ size_t offset;
+ gint width;
+ guint8 *data;
+
+ if (!gst_video_frame_map (&frame, &pngdec->vinfo, buffer, GST_MAP_WRITE)) {
+ pngdec->ret = GST_FLOW_ERROR;
+ return;
+ }
+ data = GST_VIDEO_FRAME_COMP_DATA (&frame, 0);
+ offset = row_num * GST_VIDEO_FRAME_COMP_STRIDE (&frame, 0);
GST_LOG ("got row %u, copying in buffer %p at offset %" G_GSIZE_FORMAT,
(guint) row_num, pngdec->buffer_out, offset);
- memcpy (GST_BUFFER_DATA (pngdec->buffer_out) + offset, new_row,
- pngdec->rowbytes);
+ width = GST_ROUND_UP_4 (png_get_rowbytes (pngdec->png, pngdec->info));
+ memcpy (data + offset, new_row, width);
+ gst_video_frame_unmap (&frame);
pngdec->ret = GST_FLOW_OK;
}
}
buffer_clip (GstPngDec * dec, GstBuffer * buffer)
{
gboolean res = TRUE;
- gint64 cstart, cstop;
-
+ guint64 cstart, cstop;
if ((!GST_CLOCK_TIME_IS_VALID (GST_BUFFER_TIMESTAMP (buffer))) ||
(!GST_CLOCK_TIME_IS_VALID (GST_BUFFER_DURATION (buffer))) ||
if (ret != GST_FLOW_OK)
goto pause;
- size = GST_BUFFER_SIZE (buffer);
+ size = gst_buffer_get_size (buffer);
if (size != length)
goto short_buffer;
- memcpy (data, GST_BUFFER_DATA (buffer), size);
-
+ gst_buffer_extract (buffer, 0, data, size);
gst_buffer_unref (buffer);
pngdec->offset += length;
GST_INFO_OBJECT (pngdec, "pausing task, reason %s",
gst_flow_get_name (ret));
gst_pad_pause_task (pngdec->sinkpad);
- if (ret == GST_FLOW_UNEXPECTED) {
+ if (ret == GST_FLOW_EOS) {
gst_pad_push_event (pngdec->srcpad, gst_event_new_eos ());
- } else if (ret < GST_FLOW_UNEXPECTED || ret == GST_FLOW_NOT_LINKED) {
+ } else if (ret < GST_FLOW_EOS || ret == GST_FLOW_NOT_LINKED) {
GST_ELEMENT_ERROR (pngdec, STREAM, FAILED,
(_("Internal data stream error.")),
("stream stopped, reason %s", gst_flow_get_name (ret)));
}
static GstFlowReturn
+gst_pngdec_negotiate_pool (GstPngDec * dec, GstCaps * caps, GstVideoInfo * info)
+{
+ GstQuery *query;
+ GstBufferPool *pool = NULL;
+ guint size, min, max, prefix, alignment;
+ GstStructure *config;
+
+ /* find a pool for the negotiated caps now */
+ query = gst_query_new_allocation (caps, TRUE);
+
+ if (gst_pad_peer_query (dec->srcpad, query)) {
+ GST_DEBUG_OBJECT (dec, "got downstream ALLOCATION hints");
+ /* we got configuration from our peer, parse them */
+ gst_query_parse_allocation_params (query, &size, &min, &max, &prefix,
+ &alignment, &pool);
+ size = MAX (size, info->size);
+ } else {
+ GST_DEBUG_OBJECT (dec, "didn't get downstream ALLOCATION hints");
+ size = info->size;
+ min = max = 0;
+ prefix = 0;
+ alignment = 0;
+ }
+
+ if (pool == NULL) {
+ /* we did not get a pool, make one ourselves then */
+ pool = gst_buffer_pool_new ();
+ }
+
+ if (dec->pool)
+ gst_object_unref (dec->pool);
+ dec->pool = pool;
+
+ config = gst_buffer_pool_get_config (pool);
+ gst_buffer_pool_config_set (config, caps, size, min, max, prefix, alignment);
+ /* just set the option, if the pool can support it we will transparently use
+ * it through the video info API. We could also see if the pool support this
+ * option and only activate it then. */
+ gst_buffer_pool_config_add_option (config, GST_BUFFER_POOL_OPTION_VIDEO_META);
+
+ gst_buffer_pool_set_config (pool, config);
+ /* and activate */
+ gst_buffer_pool_set_active (pool, TRUE);
+
+ gst_query_unref (query);
+
+ return GST_FLOW_OK;
+}
+
+static GstFlowReturn
gst_pngdec_caps_create_and_set (GstPngDec * pngdec)
{
GstFlowReturn ret = GST_FLOW_OK;
GstPadTemplate *templ = NULL;
gint bpc = 0, color_type;
png_uint_32 width, height;
+ GstVideoFormat format;
+ GstVideoInfo vinfo = { 0, };
g_return_val_if_fail (GST_IS_PNGDEC (pngdec), GST_FLOW_ERROR);
switch (pngdec->color_type) {
case PNG_COLOR_TYPE_RGB:
GST_LOG_OBJECT (pngdec, "we have no alpha channel, depth is 24 bits");
- pngdec->bpp = 3 * bpc;
+ format = GST_VIDEO_FORMAT_RGB;
break;
case PNG_COLOR_TYPE_RGB_ALPHA:
GST_LOG_OBJECT (pngdec, "we have an alpha channel, depth is 32 bits");
- pngdec->bpp = 4 * bpc;
+ format = GST_VIDEO_FORMAT_RGBA;
break;
default:
GST_ELEMENT_ERROR (pngdec, STREAM, NOT_IMPLEMENTED, (NULL),
goto beach;
}
- caps = gst_caps_new_simple ("video/x-raw-rgb",
- "width", G_TYPE_INT, pngdec->width,
- "height", G_TYPE_INT, pngdec->height,
- "bpp", G_TYPE_INT, pngdec->bpp,
- "framerate", GST_TYPE_FRACTION, pngdec->fps_n, pngdec->fps_d, NULL);
+ gst_video_info_set_format (&vinfo, format, pngdec->width, pngdec->height);
+ vinfo.fps_n = pngdec->fps_n;
+ vinfo.fps_d = pngdec->fps_d;
+ vinfo.par_n = 1;
+ vinfo.par_d = 1;
+
+ if (memcmp (&vinfo, &pngdec->vinfo, sizeof (vinfo)) == 0) {
+ GST_DEBUG_OBJECT (pngdec, "video info unchanged, skip negotiation");
+ ret = GST_FLOW_OK;
+ goto beach;
+ }
+
+ pngdec->vinfo = vinfo;
+
+ caps = gst_video_info_to_caps (&pngdec->vinfo);
templ = gst_static_pad_template_get (&gst_pngdec_src_pad_template);
if (!gst_pad_set_caps (pngdec->srcpad, res))
ret = GST_FLOW_NOT_NEGOTIATED;
+ /* clear pending reconfigure */
+ gst_pad_check_reconfigure (pngdec->srcpad);
+
GST_DEBUG_OBJECT (pngdec, "our caps %" GST_PTR_FORMAT, res);
+ gst_pngdec_negotiate_pool (pngdec, res, &pngdec->vinfo);
gst_caps_unref (res);
/* Push a newsegment event */
if (pngdec->need_newsegment) {
+ gst_segment_init (&pngdec->segment, GST_FORMAT_TIME);
gst_pad_push_event (pngdec->srcpad,
- gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME, 0, -1, 0));
+ gst_event_new_segment (&pngdec->segment));
pngdec->need_newsegment = FALSE;
}
{
GstPngDec *pngdec;
GstBuffer *buffer = NULL;
- size_t buffer_size = 0;
gint i = 0;
- png_bytep *rows, inp;
- png_uint_32 rowbytes;
+ png_bytep *rows, inp = NULL;
GstFlowReturn ret = GST_FLOW_OK;
+ GstVideoFrame frame;
pngdec = GST_PNGDEC (GST_OBJECT_PARENT (pad));
/* Read info */
png_read_info (pngdec->png, pngdec->info);
+ pngdec->fps_n = 0;
+ pngdec->fps_d = 1;
+
/* Generate the caps and configure */
ret = gst_pngdec_caps_create_and_set (pngdec);
if (ret != GST_FLOW_OK) {
}
/* Allocate output buffer */
- rowbytes = png_get_rowbytes (pngdec->png, pngdec->info);
- if (rowbytes > (G_MAXUINT32 - 3) || pngdec->height > G_MAXUINT32 / rowbytes) {
- ret = GST_FLOW_ERROR;
- goto pause;
- }
- rowbytes = GST_ROUND_UP_4 (rowbytes);
- buffer_size = pngdec->height * rowbytes;
- ret =
- gst_pad_alloc_buffer_and_set_caps (pngdec->srcpad, GST_BUFFER_OFFSET_NONE,
- buffer_size, GST_PAD_CAPS (pngdec->srcpad), &buffer);
+ g_assert (pngdec->pool);
+ ret = gst_buffer_pool_acquire_buffer (pngdec->pool, &buffer, NULL);
if (ret != GST_FLOW_OK)
goto pause;
rows = (png_bytep *) g_malloc (sizeof (png_bytep) * pngdec->height);
- inp = GST_BUFFER_DATA (buffer);
+ if (!gst_video_frame_map (&frame, &pngdec->vinfo, buffer, GST_MAP_WRITE))
+ goto invalid_frame;
+
+ inp = GST_VIDEO_FRAME_COMP_DATA (&frame, 0);
for (i = 0; i < pngdec->height; i++) {
rows[i] = inp;
- inp += rowbytes;
+ inp += GST_VIDEO_FRAME_COMP_STRIDE (&frame, 0);
}
/* Read the actual picture */
png_read_image (pngdec->png, rows);
g_free (rows);
+ gst_video_frame_unmap (&frame);
+ inp = NULL;
+
/* Push the raw RGB frame */
ret = gst_pad_push (pngdec->srcpad, buffer);
+ buffer = NULL;
if (ret != GST_FLOW_OK)
goto pause;
pause:
{
+ if (inp)
+ gst_video_frame_unmap (&frame);
+ if (buffer)
+ gst_buffer_unref (buffer);
GST_INFO_OBJECT (pngdec, "pausing task, reason %s",
gst_flow_get_name (ret));
gst_pad_pause_task (pngdec->sinkpad);
- if (ret == GST_FLOW_UNEXPECTED) {
+ if (ret == GST_FLOW_EOS) {
gst_pad_push_event (pngdec->srcpad, gst_event_new_eos ());
- } else if (ret == GST_FLOW_NOT_LINKED || ret < GST_FLOW_UNEXPECTED) {
+ } else if (ret == GST_FLOW_NOT_LINKED || ret < GST_FLOW_EOS) {
GST_ELEMENT_ERROR (pngdec, STREAM, FAILED,
(_("Internal data stream error.")),
("stream stopped, reason %s", gst_flow_get_name (ret)));
gst_pad_push_event (pngdec->srcpad, gst_event_new_eos ());
}
}
+invalid_frame:
+ {
+ GST_DEBUG_OBJECT (pngdec, "could not map video frame");
+ ret = GST_FLOW_ERROR;
+ goto pause;
+ }
}
static GstFlowReturn
-gst_pngdec_chain (GstPad * pad, GstBuffer * buffer)
+gst_pngdec_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer)
{
GstPngDec *pngdec;
GstFlowReturn ret = GST_FLOW_OK;
+ GstMapInfo map = GST_MAP_INFO_INIT;
- pngdec = GST_PNGDEC (gst_pad_get_parent (pad));
-
- GST_LOG_OBJECT (pngdec, "Got buffer, size=%u", GST_BUFFER_SIZE (buffer));
+ pngdec = GST_PNGDEC (parent);
if (G_UNLIKELY (!pngdec->setup))
goto not_configured;
pngdec->in_timestamp = GST_BUFFER_TIMESTAMP (buffer);
pngdec->in_duration = GST_BUFFER_DURATION (buffer);
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+
+ GST_LOG_OBJECT (pngdec, "Got buffer, size=%d", (gint) map.size);
+
/* Progressive loading of the PNG image */
- png_process_data (pngdec->png, pngdec->info, GST_BUFFER_DATA (buffer),
- GST_BUFFER_SIZE (buffer));
+ png_process_data (pngdec->png, pngdec->info, map.data, map.size);
if (pngdec->image_ready) {
if (pngdec->framed) {
/* grab new return code */
ret = pngdec->ret;
+beach:
+ if (G_LIKELY (map.data))
+ gst_buffer_unmap (buffer, &map);
+
/* And release the buffer */
gst_buffer_unref (buffer);
-beach:
- gst_object_unref (pngdec);
-
return ret;
/* ERRORS */
not_configured:
{
GST_LOG_OBJECT (pngdec, "we are not configured yet");
- ret = GST_FLOW_WRONG_STATE;
+ ret = GST_FLOW_FLUSHING;
goto beach;
}
}
static gboolean
-gst_pngdec_sink_setcaps (GstPad * pad, GstCaps * caps)
+gst_pngdec_sink_setcaps (GstPngDec * pngdec, GstCaps * caps)
{
GstStructure *s;
- GstPngDec *pngdec;
gint num, denom;
- pngdec = GST_PNGDEC (gst_pad_get_parent (pad));
-
s = gst_caps_get_structure (caps, 0);
if (gst_structure_get_fraction (s, "framerate", &num, &denom)) {
GST_DEBUG_OBJECT (pngdec, "framed input");
pngdec->fps_d = 1;
}
- gst_object_unref (pngdec);
return TRUE;
}
static gboolean
-gst_pngdec_sink_event (GstPad * pad, GstEvent * event)
+gst_pngdec_sink_event (GstPad * pad, GstObject * parent, GstEvent * event)
{
GstPngDec *pngdec;
gboolean res;
- pngdec = GST_PNGDEC (gst_pad_get_parent (pad));
+ pngdec = GST_PNGDEC (parent);
switch (GST_EVENT_TYPE (event)) {
- case GST_EVENT_NEWSEGMENT:{
- gdouble rate, arate;
- gboolean update;
- gint64 start, stop, position;
- GstFormat fmt;
+ case GST_EVENT_SEGMENT:{
+ gst_event_copy_segment (event, &pngdec->segment);
- gst_event_parse_new_segment_full (event, &update, &rate, &arate, &fmt,
- &start, &stop, &position);
+ GST_LOG_OBJECT (pngdec, "SEGMENT %" GST_SEGMENT_FORMAT, &pngdec->segment);
- gst_segment_set_newsegment_full (&pngdec->segment, update, rate, arate,
- fmt, start, stop, position);
-
- GST_LOG_OBJECT (pngdec, "NEWSEGMENT (%s)", gst_format_get_name (fmt));
-
- if (fmt == GST_FORMAT_TIME) {
+ if (pngdec->segment.format == GST_FORMAT_TIME) {
pngdec->need_newsegment = FALSE;
res = gst_pad_push_event (pngdec->srcpad, event);
} else {
{
GST_LOG_OBJECT (pngdec, "EOS");
gst_pngdec_libpng_clear (pngdec);
- pngdec->ret = GST_FLOW_UNEXPECTED;
+ pngdec->ret = GST_FLOW_EOS;
res = gst_pad_push_event (pngdec->srcpad, event);
break;
}
+ case GST_EVENT_CAPS:
+ {
+ GstCaps *caps;
+
+ gst_event_parse_caps (event, &caps);
+ res = gst_pngdec_sink_setcaps (pngdec, caps);
+ gst_event_unref (event);
+ break;
+ }
default:
res = gst_pad_push_event (pngdec->srcpad, event);
break;
}
- gst_object_unref (pngdec);
return res;
}
pngdec->endinfo = NULL;
}
- pngdec->bpp = pngdec->color_type = pngdec->height = pngdec->width = -1;
+ pngdec->color_type = pngdec->height = pngdec->width = -1;
pngdec->offset = 0;
- pngdec->rowbytes = 0;
pngdec->buffer_out = NULL;
pngdec->setup = FALSE;
break;
}
- ret = parent_class->change_state (element, transition);
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
if (ret != GST_STATE_CHANGE_SUCCESS)
return ret;
switch (transition) {
case GST_STATE_CHANGE_PAUSED_TO_READY:
gst_pngdec_libpng_clear (pngdec);
+ if (pngdec->pool)
+ gst_object_unref (pngdec->pool);
break;
default:
break;
return ret;
}
-/* this function gets called when we activate ourselves in push mode. */
+/* this function gets called when we activate ourselves in pull mode.
+ * We can perform random access to the resource and we start a task
+ * to start reading */
static gboolean
-gst_pngdec_sink_activate_push (GstPad * sinkpad, gboolean active)
+gst_pngdec_sink_activate_mode (GstPad * sinkpad, GstObject * parent,
+ GstPadMode mode, gboolean active)
{
- GstPngDec *pngdec;
-
- pngdec = GST_PNGDEC (GST_OBJECT_PARENT (sinkpad));
-
- pngdec->ret = GST_FLOW_OK;
-
- if (active) {
- /* Let libpng come back here on error */
- if (setjmp (png_jmpbuf (pngdec->png)))
- goto setup_failed;
+ GstPngDec *pngdec = GST_PNGDEC (parent);
+ gboolean res;
- GST_LOG ("setting up progressive loading callbacks");
- png_set_progressive_read_fn (pngdec->png, pngdec,
- user_info_callback, user_endrow_callback, user_end_callback);
+ switch (mode) {
+ case GST_PAD_MODE_PULL:
+ if (active) {
+ res = gst_pad_start_task (sinkpad, (GstTaskFunction) gst_pngdec_task,
+ sinkpad);
+ } else {
+ res = gst_pad_stop_task (sinkpad);
+ }
+ break;
+ case GST_PAD_MODE_PUSH:
+ GST_DEBUG_OBJECT (pngdec, "activating push/chain function");
+ if (active) {
+ pngdec->ret = GST_FLOW_OK;
+
+ /* Let libpng come back here on error */
+ if (setjmp (png_jmpbuf (pngdec->png)))
+ goto setup_failed;
+
+ GST_LOG_OBJECT (pngdec, "setting up progressive loading callbacks");
+ png_set_progressive_read_fn (pngdec->png, pngdec,
+ user_info_callback, user_endrow_callback, user_end_callback);
+ } else {
+ GST_DEBUG_OBJECT (pngdec, "deactivating push/chain function");
+ }
+ res = TRUE;
+ break;
+ default:
+ res = FALSE;
+ break;
}
- return TRUE;
+ return res;
setup_failed:
{
- GST_LOG ("failed setting up libpng jmpbuf");
+ GST_LOG_OBJECT (pngdec, "failed setting up libpng jmpbuf");
gst_pngdec_libpng_clear (pngdec);
return FALSE;
}
}
-/* this function gets called when we activate ourselves in pull mode.
- * We can perform random access to the resource and we start a task
- * to start reading */
-static gboolean
-gst_pngdec_sink_activate_pull (GstPad * sinkpad, gboolean active)
-{
- if (active) {
- return gst_pad_start_task (sinkpad, (GstTaskFunction) gst_pngdec_task,
- sinkpad);
- } else {
- return gst_pad_stop_task (sinkpad);
- }
-}
-
/* this function is called when the pad is activated and should start
* processing data.
*
* pull based.
*/
static gboolean
-gst_pngdec_sink_activate (GstPad * sinkpad)
+gst_pngdec_sink_activate (GstPad * sinkpad, GstObject * parent)
{
- if (gst_pad_check_pull_range (sinkpad)) {
- return gst_pad_activate_pull (sinkpad, TRUE);
- } else {
- return gst_pad_activate_push (sinkpad, TRUE);
+ GstQuery *query;
+ gboolean pull_mode;
+
+ query = gst_query_new_scheduling ();
+
+ if (!gst_pad_peer_query (sinkpad, query)) {
+ gst_query_unref (query);
+ goto activate_push;
+ }
+
+ pull_mode = gst_query_has_scheduling_mode (query, GST_PAD_MODE_PULL);
+ gst_query_unref (query);
+
+ if (!pull_mode)
+ goto activate_push;
+
+ GST_DEBUG_OBJECT (sinkpad, "activating pull");
+ return gst_pad_activate_mode (sinkpad, GST_PAD_MODE_PULL, TRUE);
+
+activate_push:
+ {
+ GST_DEBUG_OBJECT (sinkpad, "activating push");
+ return gst_pad_activate_mode (sinkpad, GST_PAD_MODE_PUSH, TRUE);
}
}
#define __GST_PNGDEC_H__
#include <gst/gst.h>
+#include <gst/video/video.h>
+#include <gst/video/gstvideopool.h>
+
#include <png.h>
G_BEGIN_DECLS
/* Progressive */
GstBuffer *buffer_out;
GstFlowReturn ret;
- png_uint_32 rowbytes;
/* Pull range */
gint offset;
png_infop endinfo;
gboolean setup;
+ GstVideoInfo vinfo;
gint width;
gint height;
- gint bpp;
gint color_type;
gint fps_n;
gint fps_d;
+ GstBufferPool *pool;
+
/* Chain */
gboolean framed;
GstClockTime in_timestamp;
#include <string.h>
#include <gst/gst.h>
#include "gstpngenc.h"
-#include <gst/video/video.h>
#include <zlib.h>
GST_DEBUG_CATEGORY_STATIC (pngenc_debug);
LAST_SIGNAL
};
-#define DEFAULT_SNAPSHOT TRUE
+#define DEFAULT_SNAPSHOT FALSE
/* #define DEFAULT_NEWMEDIA FALSE */
#define DEFAULT_COMPRESSION_LEVEL 6
);
static GstStaticPadTemplate pngenc_sink_template =
- GST_STATIC_PAD_TEMPLATE ("sink",
+GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS (GST_VIDEO_CAPS_RGBA ";" GST_VIDEO_CAPS_RGB ";"
- GST_VIDEO_CAPS_GRAY8)
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ RGBA, RGB, GRAY8 }"))
);
/* static GstElementClass *parent_class = NULL; */
-GST_BOILERPLATE (GstPngEnc, gst_pngenc, GstElement, GST_TYPE_ELEMENT);
+G_DEFINE_TYPE (GstPngEnc, gst_pngenc, GST_TYPE_ELEMENT);
static void gst_pngenc_set_property (GObject * object,
guint prop_id, const GValue * value, GParamSpec * pspec);
static void gst_pngenc_get_property (GObject * object,
guint prop_id, GValue * value, GParamSpec * pspec);
-static GstFlowReturn gst_pngenc_chain (GstPad * pad, GstBuffer * data);
+static GstFlowReturn gst_pngenc_chain (GstPad * pad, GstObject * parent,
+ GstBuffer * data);
+static gboolean gst_pngenc_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
static void
user_error_fn (png_structp png_ptr, png_const_charp error_msg)
}
static void
-gst_pngenc_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_static_pad_template
- (element_class, &pngenc_sink_template);
- gst_element_class_add_static_pad_template
- (element_class, &pngenc_src_template);
- gst_element_class_set_details_simple (element_class, "PNG image encoder",
- "Codec/Encoder/Image",
- "Encode a video frame to a .png image",
- "Jeremy SIMON <jsimon13@yahoo.fr>");
-}
-
-static void
gst_pngenc_class_init (GstPngEncClass * klass)
{
GObjectClass *gobject_class;
+ GstElementClass *element_class;
gobject_class = (GObjectClass *) klass;
-
- parent_class = g_type_class_peek_parent (klass);
+ element_class = (GstElementClass *) klass;
gobject_class->get_property = gst_pngenc_get_property;
gobject_class->set_property = gst_pngenc_set_property;
DEFAULT_COMPRESSION_LEVEL,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ gst_element_class_add_pad_template
+ (element_class, gst_static_pad_template_get (&pngenc_sink_template));
+ gst_element_class_add_pad_template
+ (element_class, gst_static_pad_template_get (&pngenc_src_template));
+ gst_element_class_set_details_simple (element_class, "PNG image encoder",
+ "Codec/Encoder/Image",
+ "Encode a video frame to a .png image",
+ "Jeremy SIMON <jsimon13@yahoo.fr>");
+
GST_DEBUG_CATEGORY_INIT (pngenc_debug, "pngenc", 0, "PNG image encoder");
}
static gboolean
-gst_pngenc_setcaps (GstPad * pad, GstCaps * caps)
+gst_pngenc_setcaps (GstPngEnc * pngenc, GstCaps * caps)
{
- GstPngEnc *pngenc;
- GstVideoFormat format;
int fps_n, fps_d;
GstCaps *pcaps;
gboolean ret;
+ GstVideoInfo info;
- pngenc = GST_PNGENC (gst_pad_get_parent (pad));
-
- ret = gst_video_format_parse_caps (caps, &format,
- &pngenc->width, &pngenc->height);
- if (G_LIKELY (ret))
- ret = gst_video_parse_caps_framerate (caps, &fps_n, &fps_d);
+ ret = gst_video_info_from_caps (&info, caps);
if (G_UNLIKELY (!ret))
goto done;
- switch (format) {
+ pngenc->info = info;
+
+ switch (GST_VIDEO_INFO_FORMAT (&info)) {
case GST_VIDEO_FORMAT_RGBA:
pngenc->png_color_type = PNG_COLOR_TYPE_RGBA;
break;
goto done;
}
+ pngenc->width = GST_VIDEO_INFO_WIDTH (&info);
+ pngenc->height = GST_VIDEO_INFO_HEIGHT (&info);
+ fps_n = GST_VIDEO_INFO_FPS_N (&info);
+ fps_d = GST_VIDEO_INFO_FPS_D (&info);
+
if (G_UNLIKELY (pngenc->width < 16 || pngenc->width > 1000000 ||
pngenc->height < 16 || pngenc->height > 1000000)) {
ret = FALSE;
goto done;
}
- pngenc->stride = gst_video_format_get_row_stride (format, 0, pngenc->width);
-
pcaps = gst_caps_new_simple ("image/png",
"width", G_TYPE_INT, pngenc->width,
"height", G_TYPE_INT, pngenc->height,
pngenc->height = 0;
}
- gst_object_unref (pngenc);
-
return ret;
}
static void
-gst_pngenc_init (GstPngEnc * pngenc, GstPngEncClass * g_class)
+gst_pngenc_init (GstPngEnc * pngenc)
{
/* sinkpad */
pngenc->sinkpad = gst_pad_new_from_static_template
(&pngenc_sink_template, "sink");
- gst_pad_set_chain_function (pngenc->sinkpad, gst_pngenc_chain);
- /* gst_pad_set_link_function (pngenc->sinkpad, gst_pngenc_sinklink); */
- /* gst_pad_set_getcaps_function (pngenc->sinkpad, gst_pngenc_sink_getcaps); */
- gst_pad_set_setcaps_function (pngenc->sinkpad, gst_pngenc_setcaps);
+ gst_pad_set_chain_function (pngenc->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_pngenc_chain));
+ gst_pad_set_event_function (pngenc->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_pngenc_sink_event));
gst_element_add_pad (GST_ELEMENT (pngenc), pngenc->sinkpad);
/* srcpad */
pngenc->srcpad = gst_pad_new_from_static_template
(&pngenc_src_template, "src");
- /* pngenc->srcpad = gst_pad_new ("src", GST_PAD_SRC); */
- /* gst_pad_set_getcaps_function (pngenc->srcpad, gst_pngenc_src_getcaps); */
- /* gst_pad_set_setcaps_function (pngenc->srcpad, gst_pngenc_setcaps); */
+ gst_pad_use_fixed_caps (pngenc->srcpad);
gst_element_add_pad (GST_ELEMENT (pngenc), pngenc->srcpad);
/* init settings */
user_write_data (png_structp png_ptr, png_bytep data, png_uint_32 length)
{
GstPngEnc *pngenc;
+ GstMapInfo map;
pngenc = (GstPngEnc *) png_get_io_ptr (png_ptr);
- if (pngenc->written + length >= GST_BUFFER_SIZE (pngenc->buffer_out)) {
+ gst_buffer_map (pngenc->buffer_out, &map, GST_MAP_WRITE);
+ if (pngenc->written + length >= map.size) {
+ gst_buffer_unmap (pngenc->buffer_out, &map);
GST_ERROR_OBJECT (pngenc, "output buffer bigger than the input buffer!?");
png_error (png_ptr, "output buffer bigger than the input buffer!?");
return;
}
- memcpy (GST_BUFFER_DATA (pngenc->buffer_out) + pngenc->written, data, length);
+ GST_DEBUG_OBJECT (pngenc, "writing %u bytes", (guint) length);
+
+ memcpy (map.data + pngenc->written, data, length);
+ gst_buffer_unmap (pngenc->buffer_out, &map);
pngenc->written += length;
}
static GstFlowReturn
-gst_pngenc_chain (GstPad * pad, GstBuffer * buf)
+gst_pngenc_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
{
GstPngEnc *pngenc;
gint row_index;
png_byte **row_pointers;
GstFlowReturn ret = GST_FLOW_OK;
GstBuffer *encoded_buf = NULL;
+ GstVideoFrame frame;
- pngenc = GST_PNGENC (gst_pad_get_parent (pad));
+ pngenc = GST_PNGENC (parent);
GST_DEBUG_OBJECT (pngenc, "BEGINNING");
if (G_UNLIKELY (pngenc->width <= 0 || pngenc->height <= 0)) {
ret = GST_FLOW_NOT_NEGOTIATED;
- goto done;
+ goto exit;
}
- if (G_UNLIKELY (GST_BUFFER_SIZE (buf) < pngenc->height * pngenc->stride)) {
- gst_buffer_unref (buf);
+ if (!gst_video_frame_map (&frame, &pngenc->info, buf, GST_MAP_READ)) {
GST_ELEMENT_ERROR (pngenc, STREAM, FORMAT, (NULL),
- ("Provided input buffer is too small, caps problem?"));
+ ("Failed to map video frame, caps problem?"));
ret = GST_FLOW_ERROR;
- goto done;
+ goto exit;
}
/* initialize png struct stuff */
pngenc->png_struct_ptr = png_create_write_struct (PNG_LIBPNG_VER_STRING,
(png_voidp) NULL, user_error_fn, user_warning_fn);
if (pngenc->png_struct_ptr == NULL) {
- gst_buffer_unref (buf);
GST_ELEMENT_ERROR (pngenc, LIBRARY, INIT, (NULL),
("Failed to initialize png structure"));
ret = GST_FLOW_ERROR;
pngenc->png_info_ptr = png_create_info_struct (pngenc->png_struct_ptr);
if (!pngenc->png_info_ptr) {
- gst_buffer_unref (buf);
png_destroy_write_struct (&(pngenc->png_struct_ptr), (png_infopp) NULL);
GST_ELEMENT_ERROR (pngenc, LIBRARY, INIT, (NULL),
("Failed to initialize the png info structure"));
/* non-0 return is from a longjmp inside of libpng */
if (setjmp (png_jmpbuf (pngenc->png_struct_ptr)) != 0) {
- gst_buffer_unref (buf);
png_destroy_write_struct (&pngenc->png_struct_ptr, &pngenc->png_info_ptr);
GST_ELEMENT_ERROR (pngenc, LIBRARY, FAILED, (NULL),
("returning from longjmp"));
row_pointers = g_new (png_byte *, pngenc->height);
for (row_index = 0; row_index < pngenc->height; row_index++) {
- row_pointers[row_index] = GST_BUFFER_DATA (buf) +
- (row_index * pngenc->stride);
+ row_pointers[row_index] = GST_VIDEO_FRAME_COMP_DATA (&frame, 0) +
+ (row_index * GST_VIDEO_FRAME_COMP_STRIDE (&frame, 0));
}
/* allocate the output buffer */
pngenc->buffer_out =
- gst_buffer_new_and_alloc (pngenc->height * pngenc->stride);
+ gst_buffer_new_and_alloc (pngenc->height * pngenc->width);
pngenc->written = 0;
png_write_info (pngenc->png_struct_ptr, pngenc->png_info_ptr);
g_free (row_pointers);
- encoded_buf = gst_buffer_create_sub (pngenc->buffer_out, 0, pngenc->written);
+ GST_DEBUG_OBJECT (pngenc, "written %d", pngenc->written);
+
+ encoded_buf =
+ gst_buffer_copy_region (pngenc->buffer_out, GST_BUFFER_COPY_MEMORY,
+ 0, pngenc->written);
png_destroy_info_struct (pngenc->png_struct_ptr, &pngenc->png_info_ptr);
png_destroy_write_struct (&pngenc->png_struct_ptr, (png_infopp) NULL);
- gst_buffer_copy_metadata (encoded_buf, buf, GST_BUFFER_COPY_TIMESTAMPS);
- gst_buffer_unref (buf);
- gst_buffer_set_caps (encoded_buf, GST_PAD_CAPS (pngenc->srcpad));
+
+ GST_BUFFER_TIMESTAMP (encoded_buf) = GST_BUFFER_TIMESTAMP (buf);
+ GST_BUFFER_DURATION (encoded_buf) = GST_BUFFER_DURATION (buf);
if ((ret = gst_pad_push (pngenc->srcpad, encoded_buf)) != GST_FLOW_OK)
goto done;
event = gst_event_new_eos ();
gst_pad_push_event (pngenc->srcpad, event);
- ret = GST_FLOW_UNEXPECTED;
+ ret = GST_FLOW_EOS;
}
done:
+ gst_video_frame_unmap (&frame);
+exit:
+ gst_buffer_unref (buf);
GST_DEBUG_OBJECT (pngenc, "END, ret:%d", ret);
if (pngenc->buffer_out != NULL) {
pngenc->buffer_out = NULL;
}
- gst_object_unref (pngenc);
return ret;
}
+static gboolean
+gst_pngenc_sink_event (GstPad * pad, GstObject * parent, GstEvent * event)
+{
+ GstPngEnc *enc;
+ gboolean res;
+
+ enc = GST_PNGENC (parent);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_CAPS:
+ {
+ GstCaps *caps;
+
+ gst_event_parse_caps (event, &caps);
+ res = gst_pngenc_setcaps (enc, caps);
+ gst_event_unref (event);
+ break;
+ }
+ default:
+ res = gst_pad_push_event (enc->srcpad, event);
+ break;
+ }
+ return res;
+}
static void
gst_pngenc_get_property (GObject * object,
#define __GST_PNGENC_H__
#include <gst/gst.h>
+#include <gst/video/video.h>
#include <png.h>
#ifdef __cplusplus
png_structp png_struct_ptr;
png_infop png_info_ptr;
+ GstVideoInfo info;
gint png_color_type;
gint width;
gint height;
- gint stride;
guint compression_level;
gboolean snapshot;
{
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
- gst_element_class_add_static_pad_template (element_class,
- &mikmod_src_factory);
- gst_element_class_add_static_pad_template (element_class,
- &mikmod_sink_factory);
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&mikmod_src_factory));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&mikmod_sink_factory));
gst_element_class_set_details_simple (element_class, "MikMod audio decoder",
"Codec/Decoder/Audio",
"Module decoder based on libmikmod", "Jeremy SIMON <jsimon13@yahoo.fr>");
pulsemixertrack.c \
pulseprobe.c \
pulsesink.c \
- pulseaudiosink.c \
pulsesrc.c \
pulseutil.c
GST_TYPE_PULSESRC))
return FALSE;
-#ifdef HAVE_PULSE_1_0
- if (!gst_element_register (plugin, "pulseaudiosink", GST_RANK_MARGINAL - 1,
- GST_TYPE_PULSE_AUDIO_SINK))
- return FALSE;
-#endif
-
if (!gst_element_register (plugin, "pulsemixer", GST_RANK_NONE,
GST_TYPE_PULSEMIXER))
return FALSE;
+++ /dev/null
-/*-*- Mode: C; c-basic-offset: 2 -*-*/
-
-/* GStreamer pulseaudio plugin
- *
- * Copyright (c) 2011 Intel Corporation
- * 2011 Collabora
- * 2011 Arun Raghavan <arun.raghavan@collabora.co.uk>
- * 2011 Sebastian Dröge <sebastian.droege@collabora.co.uk>
- *
- * gst-pulse is free software; you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as
- * published by the Free Software Foundation; either version 2.1 of the
- * License, or (at your option) any later version.
- *
- * gst-pulse is distributed in the hope that it will be useful, but
- * WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public
- * License along with gst-pulse; if not, write to the Free Software
- * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
- * USA.
- */
-
-/**
- * SECTION:element-pulseaudiosink
- * @see_also: pulsesink, pulsesrc, pulsemixer
- *
- * This element outputs audio to a
- * <ulink href="http://www.pulseaudio.org">PulseAudio sound server</ulink> via
- * the @pulsesink element. It transparently takes care of passing compressed
- * format as-is if the sink supports it, decoding if necessary, and changes
- * to supported formats at runtime.
- *
- * <refsect2>
- * <title>Example pipelines</title>
- * |[
- * gst-launch -v filesrc location=sine.ogg ! oggdemux ! vorbisdec ! pulseaudiosink
- * ]| Decode and play an Ogg/Vorbis file.
- * |[
- * gst-launch -v filesrc location=test.mp3 ! mp3parse ! pulseaudiosink stream-properties="props,media.title=test"
- * ]| Play an MP3 file on a sink that supports decoding directly, plug in a
- * decoder if/when required.
- * </refsect2>
- */
-
-#ifdef HAVE_CONFIG_H
-#include "config.h"
-#endif
-
-#ifdef HAVE_PULSE_1_0
-
-#include <gst/pbutils/pbutils.h>
-#include <gst/gst-i18n-plugin.h>
-#include <gst/glib-compat-private.h>
-
-#include <gst/audio/gstaudioiec61937.h>
-#include "pulsesink.h"
-
-GST_DEBUG_CATEGORY (pulseaudiosink_debug);
-#define GST_CAT_DEFAULT (pulseaudiosink_debug)
-
-#define GST_PULSE_AUDIO_SINK_LOCK(obj) G_STMT_START { \
- GST_LOG_OBJECT (obj, \
- "locking from thread %p", \
- g_thread_self ()); \
- g_mutex_lock (GST_PULSE_AUDIO_SINK_CAST(obj)->lock); \
- GST_LOG_OBJECT (obj, \
- "locked from thread %p", \
- g_thread_self ()); \
-} G_STMT_END
-
-#define GST_PULSE_AUDIO_SINK_UNLOCK(obj) G_STMT_START { \
- GST_LOG_OBJECT (obj, \
- "unlocking from thread %p", \
- g_thread_self ()); \
- g_mutex_unlock (GST_PULSE_AUDIO_SINK_CAST(obj)->lock); \
-} G_STMT_END
-
-typedef struct
-{
- GstBin parent;
- GMutex *lock;
-
- GstPad *sinkpad;
- GstPad *sink_proxypad;
- GstPadEventFunction sinkpad_old_eventfunc;
- GstPadEventFunction proxypad_old_eventfunc;
-
- GstPulseSink *psink;
- GstElement *dbin2;
-
- GstSegment segment;
-
- guint event_probe_id;
- gulong pad_added_id;
-
- gboolean format_lost;
-} GstPulseAudioSink;
-
-typedef struct
-{
- GstBinClass parent_class;
- guint n_prop_own;
- guint n_prop_total;
-} GstPulseAudioSinkClass;
-
-static void gst_pulse_audio_sink_get_property (GObject * object, guint prop_id,
- GValue * value, GParamSpec * pspec);
-static void gst_pulse_audio_sink_set_property (GObject * object, guint prop_id,
- const GValue * value, GParamSpec * pspec);
-static void gst_pulse_audio_sink_dispose (GObject * object);
-static gboolean gst_pulse_audio_sink_src_event (GstPad * pad, GstEvent * event);
-static gboolean gst_pulse_audio_sink_sink_event (GstPad * pad,
- GstEvent * event);
-static gboolean gst_pulse_audio_sink_sink_acceptcaps (GstPad * pad,
- GstCaps * caps);
-static gboolean gst_pulse_audio_sink_sink_setcaps (GstPad * pad,
- GstCaps * caps);
-static GstStateChangeReturn
-gst_pulse_audio_sink_change_state (GstElement * element,
- GstStateChange transition);
-
-static void
-gst_pulse_audio_sink_do_init (GType type)
-{
- GST_DEBUG_CATEGORY_INIT (pulseaudiosink_debug, "pulseaudiosink", 0,
- "Bin that wraps pulsesink for handling compressed formats");
-}
-
-GST_BOILERPLATE_FULL (GstPulseAudioSink, gst_pulse_audio_sink, GstBin,
- GST_TYPE_BIN, gst_pulse_audio_sink_do_init);
-
-static GstStaticPadTemplate sink_template =
-GST_STATIC_PAD_TEMPLATE ("sink", GST_PAD_SINK, GST_PAD_ALWAYS,
- GST_STATIC_CAPS (PULSE_SINK_TEMPLATE_CAPS));
-
-static void
-gst_pulse_audio_sink_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_static_pad_template (element_class, &sink_template);
-
- gst_element_class_set_details_simple (element_class,
- "Bin wrapping pulsesink", "Sink/Audio/Bin",
- "Correctly handles sink changes when streaming compressed formats to "
- "pulsesink", "Arun Raghavan <arun.raghavan@collabora.co.uk>");
-}
-
-static GParamSpec *
-param_spec_copy (GParamSpec * spec)
-{
- const char *name, *nick, *blurb;
- GParamFlags flags;
-
- name = g_param_spec_get_name (spec);
- nick = g_param_spec_get_nick (spec);
- blurb = g_param_spec_get_blurb (spec);
- flags = spec->flags;
-
- if (G_PARAM_SPEC_TYPE (spec) == G_TYPE_PARAM_BOOLEAN) {
- return g_param_spec_boolean (name, nick, blurb,
- G_PARAM_SPEC_BOOLEAN (spec)->default_value, flags);
- }
-
- if (G_PARAM_SPEC_TYPE (spec) == G_TYPE_PARAM_BOXED) {
- return g_param_spec_boxed (name, nick, blurb, spec->value_type, flags);
- }
-
- if (G_PARAM_SPEC_TYPE (spec) == G_TYPE_PARAM_CHAR) {
- GParamSpecChar *cspec = G_PARAM_SPEC_CHAR (spec);
- return g_param_spec_char (name, nick, blurb, cspec->minimum,
- cspec->maximum, cspec->default_value, flags);
- }
-
- if (G_PARAM_SPEC_TYPE (spec) == G_TYPE_PARAM_DOUBLE) {
- GParamSpecDouble *dspec = G_PARAM_SPEC_DOUBLE (spec);
- return g_param_spec_double (name, nick, blurb, dspec->minimum,
- dspec->maximum, dspec->default_value, flags);
- }
-
- if (G_PARAM_SPEC_TYPE (spec) == G_TYPE_PARAM_ENUM) {
- return g_param_spec_enum (name, nick, blurb, spec->value_type,
- G_PARAM_SPEC_ENUM (spec)->default_value, flags);
- }
-
- if (G_PARAM_SPEC_TYPE (spec) == G_TYPE_PARAM_FLAGS) {
- return g_param_spec_flags (name, nick, blurb, spec->value_type,
- G_PARAM_SPEC_ENUM (spec)->default_value, flags);
- }
-
- if (G_PARAM_SPEC_TYPE (spec) == G_TYPE_PARAM_FLOAT) {
- GParamSpecFloat *fspec = G_PARAM_SPEC_FLOAT (spec);
- return g_param_spec_double (name, nick, blurb, fspec->minimum,
- fspec->maximum, fspec->default_value, flags);
- }
-
- if (G_PARAM_SPEC_TYPE (spec) == G_TYPE_PARAM_GTYPE) {
- return g_param_spec_gtype (name, nick, blurb,
- G_PARAM_SPEC_GTYPE (spec)->is_a_type, flags);
- }
-
- if (G_PARAM_SPEC_TYPE (spec) == G_TYPE_PARAM_INT) {
- GParamSpecInt *ispec = G_PARAM_SPEC_INT (spec);
- return g_param_spec_int (name, nick, blurb, ispec->minimum,
- ispec->maximum, ispec->default_value, flags);
- }
-
- if (G_PARAM_SPEC_TYPE (spec) == G_TYPE_PARAM_INT64) {
- GParamSpecInt64 *ispec = G_PARAM_SPEC_INT64 (spec);
- return g_param_spec_int64 (name, nick, blurb, ispec->minimum,
- ispec->maximum, ispec->default_value, flags);
- }
-
- if (G_PARAM_SPEC_TYPE (spec) == G_TYPE_PARAM_LONG) {
- GParamSpecLong *lspec = G_PARAM_SPEC_LONG (spec);
- return g_param_spec_long (name, nick, blurb, lspec->minimum,
- lspec->maximum, lspec->default_value, flags);
- }
-
- if (G_PARAM_SPEC_TYPE (spec) == G_TYPE_PARAM_OBJECT) {
- return g_param_spec_object (name, nick, blurb, spec->value_type, flags);
- }
-
- if (G_PARAM_SPEC_TYPE (spec) == G_TYPE_PARAM_PARAM) {
- return g_param_spec_param (name, nick, blurb, spec->value_type, flags);
- }
-
- if (G_PARAM_SPEC_TYPE (spec) == G_TYPE_PARAM_POINTER) {
- return g_param_spec_pointer (name, nick, blurb, flags);
- }
-
- if (G_PARAM_SPEC_TYPE (spec) == G_TYPE_PARAM_STRING) {
- return g_param_spec_string (name, nick, blurb,
- G_PARAM_SPEC_STRING (spec)->default_value, flags);
- }
-
- if (G_PARAM_SPEC_TYPE (spec) == G_TYPE_PARAM_UCHAR) {
- GParamSpecUChar *cspec = G_PARAM_SPEC_UCHAR (spec);
- return g_param_spec_uchar (name, nick, blurb, cspec->minimum,
- cspec->maximum, cspec->default_value, flags);
- }
-
- if (G_PARAM_SPEC_TYPE (spec) == G_TYPE_PARAM_UINT) {
- GParamSpecUInt *ispec = G_PARAM_SPEC_UINT (spec);
- return g_param_spec_uint (name, nick, blurb, ispec->minimum,
- ispec->maximum, ispec->default_value, flags);
- }
-
- if (G_PARAM_SPEC_TYPE (spec) == G_TYPE_PARAM_UINT64) {
- GParamSpecUInt64 *ispec = G_PARAM_SPEC_UINT64 (spec);
- return g_param_spec_uint64 (name, nick, blurb, ispec->minimum,
- ispec->maximum, ispec->default_value, flags);
- }
-
- if (G_PARAM_SPEC_TYPE (spec) == G_TYPE_PARAM_ULONG) {
- GParamSpecULong *lspec = G_PARAM_SPEC_ULONG (spec);
- return g_param_spec_ulong (name, nick, blurb, lspec->minimum,
- lspec->maximum, lspec->default_value, flags);
- }
-
- if (G_PARAM_SPEC_TYPE (spec) == G_TYPE_PARAM_UNICHAR) {
- return g_param_spec_unichar (name, nick, blurb,
- G_PARAM_SPEC_UNICHAR (spec)->default_value, flags);
- }
-
- if (G_PARAM_SPEC_TYPE (spec) == G_TYPE_PARAM_VARIANT) {
- GParamSpecVariant *vspec = G_PARAM_SPEC_VARIANT (spec);
- return g_param_spec_variant (name, nick, blurb, vspec->type,
- vspec->default_value, flags);
- }
-
- if (G_PARAM_SPEC_TYPE (spec) == GST_TYPE_PARAM_MINI_OBJECT) {
- return gst_param_spec_mini_object (name, nick, blurb, spec->value_type,
- flags);
- }
-
- g_warning ("Unknown param type %ld for '%s'",
- (long) G_PARAM_SPEC_TYPE (spec), name);
- g_assert_not_reached ();
-}
-
-static void
-gst_pulse_audio_sink_class_init (GstPulseAudioSinkClass * klass)
-{
- GObjectClass *gobject_class = (GObjectClass *) klass;
- GstElementClass *element_class = (GstElementClass *) klass;
- GstPulseSinkClass *psink_class =
- GST_PULSESINK_CLASS (g_type_class_ref (GST_TYPE_PULSESINK));
- GParamSpec **specs;
- guint n, i, j;
-
- gobject_class->get_property = gst_pulse_audio_sink_get_property;
- gobject_class->set_property = gst_pulse_audio_sink_set_property;
- gobject_class->dispose = gst_pulse_audio_sink_dispose;
- element_class->change_state =
- GST_DEBUG_FUNCPTR (gst_pulse_audio_sink_change_state);
-
- /* Find out how many properties we already have */
- specs = g_object_class_list_properties (gobject_class, &klass->n_prop_own);
- g_free (specs);
-
- /* Proxy pulsesink's properties */
- specs = g_object_class_list_properties (G_OBJECT_CLASS (psink_class), &n);
- for (i = 0, j = klass->n_prop_own; i < n; i++) {
- if (g_object_class_find_property (gobject_class,
- g_param_spec_get_name (specs[i]))) {
- /* We already inherited this property from a parent, skip */
- j--;
- } else {
- g_object_class_install_property (gobject_class, i + j + 1,
- param_spec_copy (specs[i]));
- }
- }
-
- klass->n_prop_total = i + j;
-
- g_free (specs);
- g_type_class_unref (psink_class);
-}
-
-static GstPad *
-get_proxypad (GstPad * sinkpad)
-{
- GstIterator *iter = NULL;
- GstPad *proxypad = NULL;
-
- iter = gst_pad_iterate_internal_links (sinkpad);
- if (iter) {
- if (gst_iterator_next (iter, (gpointer) & proxypad) != GST_ITERATOR_OK)
- proxypad = NULL;
- gst_iterator_free (iter);
- }
-
- return proxypad;
-}
-
-static void
-post_missing_element_message (GstPulseAudioSink * pbin, const gchar * name)
-{
- GstMessage *msg;
-
- msg = gst_missing_element_message_new (GST_ELEMENT_CAST (pbin), name);
- gst_element_post_message (GST_ELEMENT_CAST (pbin), msg);
-}
-
-static void
-notify_cb (GObject * selector, GParamSpec * pspec, GstPulseAudioSink * pbin)
-{
- g_object_notify (G_OBJECT (pbin), g_param_spec_get_name (pspec));
-}
-
-static void
-gst_pulse_audio_sink_init (GstPulseAudioSink * pbin,
- GstPulseAudioSinkClass * klass)
-{
- GstPadTemplate *template;
- GstPad *pad = NULL;
- GParamSpec **specs;
- GString *prop;
- guint i;
-
- pbin->lock = g_mutex_new ();
-
- gst_segment_init (&pbin->segment, GST_FORMAT_UNDEFINED);
-
- pbin->psink = GST_PULSESINK (gst_element_factory_make ("pulsesink",
- "pulseaudiosink-sink"));
- g_assert (pbin->psink != NULL);
-
- if (!gst_bin_add (GST_BIN (pbin), GST_ELEMENT (pbin->psink))) {
- GST_ERROR_OBJECT (pbin, "Failed to add pulsesink to bin");
- goto error;
- }
-
- pad = gst_element_get_static_pad (GST_ELEMENT (pbin->psink), "sink");
- template = gst_static_pad_template_get (&sink_template);
- pbin->sinkpad = gst_ghost_pad_new_from_template ("sink", pad, template);
- gst_object_unref (template);
-
- pbin->sinkpad_old_eventfunc = GST_PAD_EVENTFUNC (pbin->sinkpad);
- gst_pad_set_event_function (pbin->sinkpad,
- GST_DEBUG_FUNCPTR (gst_pulse_audio_sink_sink_event));
- gst_pad_set_setcaps_function (pbin->sinkpad,
- GST_DEBUG_FUNCPTR (gst_pulse_audio_sink_sink_setcaps));
- gst_pad_set_acceptcaps_function (pbin->sinkpad,
- GST_DEBUG_FUNCPTR (gst_pulse_audio_sink_sink_acceptcaps));
-
- gst_element_add_pad (GST_ELEMENT (pbin), pbin->sinkpad);
-
- if (!(pbin->sink_proxypad = get_proxypad (pbin->sinkpad)))
- GST_ERROR_OBJECT (pbin, "Failed to get proxypad of srcpad");
- else {
- pbin->proxypad_old_eventfunc = GST_PAD_EVENTFUNC (pbin->sink_proxypad);
- gst_pad_set_event_function (pbin->sink_proxypad,
- GST_DEBUG_FUNCPTR (gst_pulse_audio_sink_src_event));
- }
-
- /* Now proxy all the notify::* signals */
- specs = g_object_class_list_properties (G_OBJECT_CLASS (klass), &i);
- prop = g_string_sized_new (30);
-
- for (i--; i >= klass->n_prop_own; i--) {
- g_string_printf (prop, "notify::%s", g_param_spec_get_name (specs[i]));
- g_signal_connect (pbin->psink, prop->str, G_CALLBACK (notify_cb), pbin);
- }
-
- g_string_free (prop, TRUE);
- g_free (specs);
-
- pbin->format_lost = FALSE;
-
-out:
- if (pad)
- gst_object_unref (pad);
-
- return;
-
-error:
- if (pbin->psink)
- gst_object_unref (pbin->psink);
- goto out;
-}
-
-static void
-gst_pulse_audio_sink_set_property (GObject * object, guint prop_id,
- const GValue * value, GParamSpec * pspec)
-{
- GstPulseAudioSink *pbin = GST_PULSE_AUDIO_SINK (object);
- GstPulseAudioSinkClass *klass =
- GST_PULSE_AUDIO_SINK_CLASS (G_OBJECT_GET_CLASS (object));
-
- g_return_if_fail (prop_id <= klass->n_prop_total);
-
- g_object_set_property (G_OBJECT (pbin->psink), g_param_spec_get_name (pspec),
- value);
-}
-
-static void
-gst_pulse_audio_sink_get_property (GObject * object, guint prop_id,
- GValue * value, GParamSpec * pspec)
-{
- GstPulseAudioSink *pbin = GST_PULSE_AUDIO_SINK (object);
- GstPulseAudioSinkClass *klass =
- GST_PULSE_AUDIO_SINK_CLASS (G_OBJECT_GET_CLASS (object));
-
- g_return_if_fail (prop_id <= klass->n_prop_total);
-
- g_object_get_property (G_OBJECT (pbin->psink), g_param_spec_get_name (pspec),
- value);
-}
-
-static void
-gst_pulse_audio_sink_free_dbin2 (GstPulseAudioSink * pbin)
-{
- g_signal_handler_disconnect (pbin->dbin2, pbin->pad_added_id);
- gst_element_set_state (pbin->dbin2, GST_STATE_NULL);
-
- gst_bin_remove (GST_BIN (pbin), pbin->dbin2);
-
- pbin->dbin2 = NULL;
-}
-
-static void
-gst_pulse_audio_sink_dispose (GObject * object)
-{
- GstPulseAudioSink *pbin = GST_PULSE_AUDIO_SINK (object);
-
- if (pbin->lock) {
- g_mutex_free (pbin->lock);
- pbin->lock = NULL;
- }
-
- if (pbin->sink_proxypad) {
- gst_object_unref (pbin->sink_proxypad);
- pbin->sink_proxypad = NULL;
- }
-
- if (pbin->dbin2) {
- g_signal_handler_disconnect (pbin->dbin2, pbin->pad_added_id);
- pbin->dbin2 = NULL;
- }
-
- pbin->sinkpad = NULL;
- pbin->psink = NULL;
-
- G_OBJECT_CLASS (parent_class)->dispose (object);
-}
-
-static gboolean
-gst_pulse_audio_sink_update_sinkpad (GstPulseAudioSink * pbin, GstPad * sinkpad)
-{
- gboolean ret;
-
- ret = gst_ghost_pad_set_target (GST_GHOST_PAD_CAST (pbin->sinkpad), sinkpad);
-
- if (!ret)
- GST_WARNING_OBJECT (pbin, "Could not update ghostpad target");
-
- return ret;
-}
-
-static void
-distribute_running_time (GstElement * element, const GstSegment * segment)
-{
- GstEvent *event;
- GstPad *pad;
-
- pad = gst_element_get_static_pad (element, "sink");
-
- /* FIXME: Some decoders collect newsegments and send them out at once, making
- * them lose accumulator events (and thus making dbin2_event_probe() hard to
- * do right if we're sending these as well. We can get away with not sending
- * these at the moment, but this should be fixed! */
-#if 0
- if (segment->accum) {
- event = gst_event_new_new_segment_full (FALSE, segment->rate,
- segment->applied_rate, segment->format, 0, segment->accum, 0);
- gst_pad_send_event (pad, event);
- }
-#endif
-
- event = gst_event_new_new_segment_full (FALSE, segment->rate,
- segment->applied_rate, segment->format,
- segment->start, segment->stop, segment->time);
- gst_pad_send_event (pad, event);
-
- gst_object_unref (pad);
-}
-
-static gboolean
-dbin2_event_probe (GstPad * pad, GstMiniObject * obj, gpointer data)
-{
- GstPulseAudioSink *pbin = GST_PULSE_AUDIO_SINK (data);
- GstEvent *event = GST_EVENT (obj);
-
- if (GST_EVENT_TYPE (event) == GST_EVENT_NEWSEGMENT) {
- GST_DEBUG_OBJECT (pbin, "Got newsegment - dropping");
- gst_pad_remove_event_probe (pad, pbin->event_probe_id);
- return FALSE;
- }
-
- return TRUE;
-}
-
-static void
-pad_added_cb (GstElement * dbin2, GstPad * pad, gpointer * data)
-{
- GstPulseAudioSink *pbin;
- GstPad *sinkpad = NULL;
-
- pbin = GST_PULSE_AUDIO_SINK (data);
- sinkpad = gst_element_get_static_pad (GST_ELEMENT (pbin->psink), "sink");
-
- GST_PULSE_AUDIO_SINK_LOCK (pbin);
- if (gst_pad_link (pad, sinkpad) != GST_PAD_LINK_OK)
- GST_ERROR_OBJECT (pbin, "Failed to link decodebin2 to pulsesink");
- else
- GST_DEBUG_OBJECT (pbin, "Linked new pad to pulsesink");
- GST_PULSE_AUDIO_SINK_UNLOCK (pbin);
-
- gst_object_unref (sinkpad);
-}
-
-/* Called with pbin lock held */
-static void
-gst_pulse_audio_sink_add_dbin2 (GstPulseAudioSink * pbin)
-{
- GstPad *sinkpad = NULL;
-
- g_assert (pbin->dbin2 == NULL);
-
- pbin->dbin2 = gst_element_factory_make ("decodebin2", "pulseaudiosink-dbin2");
-
- if (!pbin->dbin2) {
- post_missing_element_message (pbin, "decodebin2");
- GST_ELEMENT_WARNING (pbin, CORE, MISSING_PLUGIN,
- (_("Missing element '%s' - check your GStreamer installation."),
- "decodebin2"), ("audio playback might fail"));
- goto out;
- }
-
- if (!gst_bin_add (GST_BIN (pbin), pbin->dbin2)) {
- GST_ERROR_OBJECT (pbin, "Failed to add decodebin2 to bin");
- goto out;
- }
-
- pbin->pad_added_id = g_signal_connect (pbin->dbin2, "pad-added",
- G_CALLBACK (pad_added_cb), pbin);
-
- if (!gst_element_sync_state_with_parent (pbin->dbin2)) {
- GST_ERROR_OBJECT (pbin, "Failed to set decodebin2 to parent state");
- goto out;
- }
-
- /* Trap the newsegment events that we feed the decodebin and discard them */
- sinkpad = gst_element_get_static_pad (GST_ELEMENT (pbin->psink), "sink");
- pbin->event_probe_id = gst_pad_add_event_probe_full (sinkpad,
- G_CALLBACK (dbin2_event_probe), gst_object_ref (pbin),
- (GDestroyNotify) gst_object_unref);
- gst_object_unref (sinkpad);
- sinkpad = NULL;
-
- GST_DEBUG_OBJECT (pbin, "Distributing running time to decodebin");
- distribute_running_time (pbin->dbin2, &pbin->segment);
-
- sinkpad = gst_element_get_static_pad (pbin->dbin2, "sink");
-
- gst_pulse_audio_sink_update_sinkpad (pbin, sinkpad);
-
-out:
- if (sinkpad)
- gst_object_unref (sinkpad);
-}
-
-static void
-update_eac3_alignment (GstPulseAudioSink * pbin)
-{
- GstCaps *caps = gst_pad_peer_get_caps_reffed (pbin->sinkpad);
- GstStructure *st;
-
- if (!caps)
- return;
-
- st = gst_caps_get_structure (caps, 0);
-
- if (g_str_equal (gst_structure_get_name (st), "audio/x-eac3")) {
- GstStructure *event_st = gst_structure_new ("ac3parse-set-alignment",
- "alignment", G_TYPE_STRING, pbin->dbin2 ? "frame" : "iec61937", NULL);
-
- if (!gst_pad_push_event (pbin->sinkpad,
- gst_event_new_custom (GST_EVENT_CUSTOM_UPSTREAM, event_st)))
- GST_WARNING_OBJECT (pbin->sinkpad, "Could not update alignment");
- }
-
- gst_caps_unref (caps);
-}
-
-static void
-proxypad_blocked_cb (GstPad * pad, gboolean blocked, gpointer data)
-{
- GstPulseAudioSink *pbin = GST_PULSE_AUDIO_SINK (data);
- GstCaps *caps;
- GstPad *sinkpad = NULL;
-
- if (!blocked) {
- /* Unblocked, don't need to do anything */
- GST_DEBUG_OBJECT (pbin, "unblocked");
- return;
- }
-
- GST_DEBUG_OBJECT (pbin, "blocked");
-
- GST_PULSE_AUDIO_SINK_LOCK (pbin);
-
- if (!pbin->format_lost) {
- sinkpad = gst_element_get_static_pad (GST_ELEMENT (pbin->psink), "sink");
-
- if (GST_PAD_CAPS (pbin->sinkpad)) {
- /* See if we already got caps on our sinkpad */
- caps = gst_caps_ref (GST_PAD_CAPS (pbin->sinkpad));
- } else {
- /* We haven't, so get caps from upstream */
- caps = gst_pad_get_caps_reffed (pad);
- }
-
- if (gst_pad_accept_caps (sinkpad, caps)) {
- if (pbin->dbin2) {
- GST_DEBUG_OBJECT (pbin, "Removing decodebin");
- gst_pulse_audio_sink_free_dbin2 (pbin);
- gst_pulse_audio_sink_update_sinkpad (pbin, sinkpad);
- } else
- GST_DEBUG_OBJECT (pbin, "Doing nothing");
-
- gst_caps_unref (caps);
- gst_object_unref (sinkpad);
- goto done;
- }
- /* pulsesink doesn't accept the incoming caps, so add a decodebin
- * (potentially after removing the existing once, since decodebin2 can't
- * renegotiate). */
- } else {
- /* Format lost, proceed to try plugging a decodebin */
- pbin->format_lost = FALSE;
- }
-
- if (pbin->dbin2 != NULL) {
- /* decodebin2 doesn't support reconfiguration, so throw this one away and
- * create a new one. */
- gst_pulse_audio_sink_free_dbin2 (pbin);
- }
-
- GST_DEBUG_OBJECT (pbin, "Adding decodebin");
- gst_pulse_audio_sink_add_dbin2 (pbin);
-
-done:
- update_eac3_alignment (pbin);
-
- gst_pad_set_blocked_async_full (pad, FALSE, proxypad_blocked_cb,
- gst_object_ref (pbin), (GDestroyNotify) gst_object_unref);
-
- GST_PULSE_AUDIO_SINK_UNLOCK (pbin);
-}
-
-static gboolean
-gst_pulse_audio_sink_src_event (GstPad * pad, GstEvent * event)
-{
- GstPulseAudioSink *pbin = NULL;
- GstPad *ghostpad = NULL;
- gboolean ret = FALSE;
-
- ghostpad = GST_PAD_CAST (gst_pad_get_parent (pad));
- if (G_UNLIKELY (!ghostpad)) {
- GST_WARNING_OBJECT (pad, "Could not get ghostpad");
- goto out;
- }
-
- pbin = GST_PULSE_AUDIO_SINK (gst_pad_get_parent (ghostpad));
- if (G_UNLIKELY (!pbin)) {
- GST_WARNING_OBJECT (pad, "Could not get pulseaudiosink");
- goto out;
- }
-
- if (G_UNLIKELY (GST_EVENT_TYPE (event) == GST_EVENT_CUSTOM_UPSTREAM) &&
- (gst_event_has_name (event, "pulse-format-lost") ||
- gst_event_has_name (event, "pulse-sink-changed"))) {
- g_return_val_if_fail (pad->mode != GST_ACTIVATE_PULL, FALSE);
-
- GST_PULSE_AUDIO_SINK_LOCK (pbin);
- if (gst_event_has_name (event, "pulse-format-lost"))
- pbin->format_lost = TRUE;
-
- if (!gst_pad_is_blocked (pad))
- gst_pad_set_blocked_async_full (pad, TRUE, proxypad_blocked_cb,
- gst_object_ref (pbin), (GDestroyNotify) gst_object_unref);
- GST_PULSE_AUDIO_SINK_UNLOCK (pbin);
-
- ret = TRUE;
- } else if (pbin->proxypad_old_eventfunc) {
- ret = pbin->proxypad_old_eventfunc (pad, event);
- event = NULL;
- }
-
-out:
- if (ghostpad)
- gst_object_unref (ghostpad);
- if (pbin)
- gst_object_unref (pbin);
- if (event)
- gst_event_unref (event);
-
- return ret;
-}
-
-static gboolean
-gst_pulse_audio_sink_sink_event (GstPad * pad, GstEvent * event)
-{
- GstPulseAudioSink *pbin = GST_PULSE_AUDIO_SINK (gst_pad_get_parent (pad));
- gboolean ret;
-
- ret = pbin->sinkpad_old_eventfunc (pad, gst_event_ref (event));
-
- switch (GST_EVENT_TYPE (event)) {
- case GST_EVENT_NEWSEGMENT:
- {
- GstFormat format;
- gdouble rate, arate;
- gint64 start, stop, time;
- gboolean update;
-
- GST_PULSE_AUDIO_SINK_LOCK (pbin);
- gst_event_parse_new_segment_full (event, &update, &rate, &arate, &format,
- &start, &stop, &time);
-
- GST_DEBUG_OBJECT (pbin,
- "newsegment: update %d, rate %g, arate %g, start %" GST_TIME_FORMAT
- ", stop %" GST_TIME_FORMAT ", time %" GST_TIME_FORMAT,
- update, rate, arate, GST_TIME_ARGS (start), GST_TIME_ARGS (stop),
- GST_TIME_ARGS (time));
-
- if (format == GST_FORMAT_TIME) {
- /* Store the values for feeding to sub-elements */
- gst_segment_set_newsegment_full (&pbin->segment, update,
- rate, arate, format, start, stop, time);
- } else {
- GST_WARNING_OBJECT (pbin, "Got a non-TIME format segment");
- gst_segment_init (&pbin->segment, GST_FORMAT_TIME);
- }
- GST_PULSE_AUDIO_SINK_UNLOCK (pbin);
-
- break;
- }
-
- case GST_EVENT_FLUSH_STOP:
- GST_PULSE_AUDIO_SINK_LOCK (pbin);
- gst_segment_init (&pbin->segment, GST_FORMAT_UNDEFINED);
- GST_PULSE_AUDIO_SINK_UNLOCK (pbin);
- break;
-
- default:
- break;
- }
-
- gst_object_unref (pbin);
- gst_event_unref (event);
-
- return ret;
-}
-
-/* The bin's acceptcaps should be exactly equivalent to a pulsesink that is
- * connected to a sink that supports all the formats in template caps. This
- * means that upstream will have to have everything possibly upto a parser
- * plugged and we plugin a decoder whenever required. */
-static gboolean
-gst_pulse_audio_sink_sink_acceptcaps (GstPad * pad, GstCaps * caps)
-{
- GstPulseAudioSink *pbin = GST_PULSE_AUDIO_SINK (gst_pad_get_parent (pad));
- GstRingBufferSpec spec = { 0 };
- const GstStructure *st;
- GstCaps *pad_caps = NULL;
- gboolean ret = FALSE;
-
- pad_caps = gst_pad_get_caps_reffed (pad);
- if (!pad_caps || !gst_caps_can_intersect (pad_caps, caps))
- goto out;
-
- /* If we've not got fixed caps, creating a stream might fail, so let's just
- * return from here with default acceptcaps behaviour */
- if (!gst_caps_is_fixed (caps))
- goto out;
-
- spec.latency_time = GST_BASE_AUDIO_SINK (pbin->psink)->latency_time;
- if (!gst_ring_buffer_parse_caps (&spec, caps))
- goto out;
-
- /* Make sure non-raw input is framed (one frame per buffer) and can be
- * payloaded */
- st = gst_caps_get_structure (caps, 0);
-
- if (!g_str_has_prefix (gst_structure_get_name (st), "audio/x-raw")) {
- gboolean framed = FALSE, parsed = FALSE;
-
- gst_structure_get_boolean (st, "framed", &framed);
- gst_structure_get_boolean (st, "parsed", &parsed);
- if ((!framed && !parsed) || gst_audio_iec61937_frame_size (&spec) <= 0)
- goto out;
- }
-
- ret = TRUE;
-
-out:
- if (pad_caps)
- gst_caps_unref (pad_caps);
-
- gst_object_unref (pbin);
-
- return ret;
-}
-
-static gboolean
-gst_pulse_audio_sink_sink_setcaps (GstPad * pad, GstCaps * caps)
-{
- GstPulseAudioSink *pbin = GST_PULSE_AUDIO_SINK (gst_pad_get_parent (pad));
- gboolean ret = TRUE;
-
- GST_PULSE_AUDIO_SINK_LOCK (pbin);
-
- if (!gst_pad_is_blocked (pbin->sinkpad))
- gst_pad_set_blocked_async_full (pbin->sink_proxypad, TRUE,
- proxypad_blocked_cb, gst_object_ref (pbin),
- (GDestroyNotify) gst_object_unref);
-
- GST_PULSE_AUDIO_SINK_UNLOCK (pbin);
-
- gst_object_unref (pbin);
-
- return ret;
-}
-
-static GstStateChangeReturn
-gst_pulse_audio_sink_change_state (GstElement * element,
- GstStateChange transition)
-{
- GstPulseAudioSink *pbin = GST_PULSE_AUDIO_SINK (element);
- GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
-
- /* Nothing to do for upward transitions */
- switch (transition) {
- case GST_STATE_CHANGE_PAUSED_TO_READY:
- GST_PULSE_AUDIO_SINK_LOCK (pbin);
- if (gst_pad_is_blocked (pbin->sinkpad)) {
- gst_pad_set_blocked_async_full (pbin->sink_proxypad, FALSE,
- proxypad_blocked_cb, gst_object_ref (pbin),
- (GDestroyNotify) gst_object_unref);
- }
- GST_PULSE_AUDIO_SINK_UNLOCK (pbin);
- break;
-
- default:
- break;
- }
-
- ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
- if (ret != GST_STATE_CHANGE_SUCCESS) {
- GST_DEBUG_OBJECT (pbin, "Base class returned %d on state change", ret);
- goto out;
- }
-
- switch (transition) {
- case GST_STATE_CHANGE_PAUSED_TO_READY:
- GST_PULSE_AUDIO_SINK_LOCK (pbin);
- gst_segment_init (&pbin->segment, GST_FORMAT_UNDEFINED);
-
- if (pbin->dbin2) {
- GstPad *pad = gst_element_get_static_pad (GST_ELEMENT (pbin->psink),
- "sink");
-
- gst_pulse_audio_sink_free_dbin2 (pbin);
- gst_pulse_audio_sink_update_sinkpad (pbin, pad);
-
- gst_object_unref (pad);
-
- }
- GST_PULSE_AUDIO_SINK_UNLOCK (pbin);
-
- break;
-
- default:
- break;
- }
-
-out:
- return ret;
-}
-
-#endif /* HAVE_PULSE_1_0 */
static GstStateChangeReturn gst_pulsemixer_change_state (GstElement * element,
GstStateChange transition);
-static void gst_pulsemixer_init_interfaces (GType type);
-
GST_IMPLEMENT_PULSEMIXER_CTRL_METHODS (GstPulseMixer, gst_pulsemixer);
-GST_IMPLEMENT_PULSEPROBE_METHODS (GstPulseMixer, gst_pulsemixer);
-GST_BOILERPLATE_FULL (GstPulseMixer, gst_pulsemixer, GstElement,
- GST_TYPE_ELEMENT, gst_pulsemixer_init_interfaces);
-
-static gboolean
-gst_pulsemixer_interface_supported (GstImplementsInterface
- * iface, GType interface_type)
-{
- GstPulseMixer *this = GST_PULSEMIXER (iface);
-
- if (interface_type == GST_TYPE_MIXER && this->mixer)
- return TRUE;
-
- if (interface_type == GST_TYPE_PROPERTY_PROBE && this->probe)
- return TRUE;
-
- return FALSE;
-}
-static void
-gst_pulsemixer_implements_interface_init (GstImplementsInterfaceClass * klass)
-{
- klass->supported = gst_pulsemixer_interface_supported;
-}
-
-static void
-gst_pulsemixer_init_interfaces (GType type)
-{
- static const GInterfaceInfo implements_iface_info = {
- (GInterfaceInitFunc) gst_pulsemixer_implements_interface_init,
- NULL,
- NULL,
- };
- static const GInterfaceInfo mixer_iface_info = {
- (GInterfaceInitFunc) gst_pulsemixer_mixer_interface_init,
- NULL,
- NULL,
- };
- static const GInterfaceInfo probe_iface_info = {
- (GInterfaceInitFunc) gst_pulsemixer_property_probe_interface_init,
- NULL,
- NULL,
- };
-
- g_type_add_interface_static (type, GST_TYPE_IMPLEMENTS_INTERFACE,
- &implements_iface_info);
- g_type_add_interface_static (type, GST_TYPE_MIXER, &mixer_iface_info);
- g_type_add_interface_static (type, GST_TYPE_PROPERTY_PROBE,
- &probe_iface_info);
-}
-static void
-gst_pulsemixer_base_init (gpointer g_class)
-{
- gst_element_class_set_details_simple (GST_ELEMENT_CLASS (g_class),
- "PulseAudio Mixer",
- "Generic/Audio",
- "Control sound input and output levels for PulseAudio",
- "Lennart Poettering");
-}
+#define gst_pulsemixer_parent_class parent_class
+G_DEFINE_TYPE_WITH_CODE (GstPulseMixer, gst_pulsemixer, GST_TYPE_ELEMENT,
+ G_IMPLEMENT_INTERFACE (GST_TYPE_MIXER,
+ gst_pulsemixer_mixer_interface_init));
static void
gst_pulsemixer_class_init (GstPulseMixerClass * g_class)
g_param_spec_string ("device-name", "Device name",
"Human-readable name of the sound device", NULL,
G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+
+ gst_element_class_set_details_simple (GST_ELEMENT_CLASS (g_class),
+ "PulseAudio Mixer",
+ "Generic/Audio",
+ "Control sound input and output levels for PulseAudio",
+ "Lennart Poettering");
}
static void
-gst_pulsemixer_init (GstPulseMixer * this, GstPulseMixerClass * g_class)
+gst_pulsemixer_init (GstPulseMixer * this)
{
this->mixer = NULL;
this->server = NULL;
#define __GST_PULSEMIXERCTRL_H__
#include <gst/gst.h>
-#include <gst/interfaces/mixer.h>
+#include <gst/audio/mixer.h>
#include <pulse/pulseaudio.h>
#include <pulse/thread-mainloop.h>
return gst_pulsemixer_ctrl_get_mixer_flags (this->mixer); \
} \
static void \
-interface_as_function ## _mixer_interface_init (GstMixerClass * klass) \
+interface_as_function ## _mixer_interface_init (GstMixerInterface * iface) \
{ \
- GST_MIXER_TYPE (klass) = GST_MIXER_HARDWARE; \
+ GST_MIXER_TYPE (iface) = GST_MIXER_HARDWARE; \
\
- klass->list_tracks = interface_as_function ## _list_tracks; \
- klass->set_volume = interface_as_function ## _set_volume; \
- klass->get_volume = interface_as_function ## _get_volume; \
- klass->set_mute = interface_as_function ## _set_mute; \
- klass->set_record = interface_as_function ## _set_record; \
- klass->get_mixer_flags = interface_as_function ## _get_mixer_flags; \
+ iface->list_tracks = interface_as_function ## _list_tracks; \
+ iface->set_volume = interface_as_function ## _set_volume; \
+ iface->get_volume = interface_as_function ## _get_volume; \
+ iface->set_mute = interface_as_function ## _set_mute; \
+ iface->set_record = interface_as_function ## _set_record; \
+ iface->get_mixer_flags = interface_as_function ## _get_mixer_flags; \
}
G_END_DECLS
}
}
+#if 0
GValueArray *
gst_pulseprobe_get_values (GstPulseProbe * c, guint prop_id,
const GParamSpec * pspec)
return array;
}
+#endif
void
gst_pulseprobe_set_server (GstPulseProbe * c, const gchar * server)
G_BEGIN_DECLS
-#include <gst/interfaces/propertyprobe.h>
#include <pulse/pulseaudio.h>
#include <pulse/thread-mainloop.h>
const GParamSpec * pspec);
void gst_pulseprobe_probe_property (GstPulseProbe * probe, guint prop_id,
const GParamSpec * pspec);
+#if 0
GValueArray *gst_pulseprobe_get_values (GstPulseProbe * probe, guint prop_id,
const GParamSpec * pspec);
+#endif
void gst_pulseprobe_set_server (GstPulseProbe * c, const gchar * server);
#include <gst/base/gstbasesink.h>
#include <gst/gsttaglist.h>
-#include <gst/interfaces/streamvolume.h>
+#include <gst/audio/streamvolume.h>
#include <gst/gst-i18n-plugin.h>
#include <gst/audio/gstaudioiec61937.h>
PROP_DEVICE_NAME,
PROP_VOLUME,
PROP_MUTE,
- PROP_CLIENT,
+ PROP_CLIENT_NAME,
PROP_STREAM_PROPERTIES,
PROP_LAST
};
static guint mainloop_ref_ct = 0;
/* lock for access to shared resources */
-static GMutex *pa_shared_resource_mutex = NULL;
+static GMutex pa_shared_resource_mutex;
/* We keep a custom ringbuffer that is backed up by data allocated by
* pulseaudio. We must also overide the commit function to write into
* pulseaudio memory instead. */
struct _GstPulseRingBuffer
{
- GstRingBuffer object;
+ GstAudioRingBuffer object;
gchar *context_name;
gchar *stream_name;
pa_context *context;
pa_stream *stream;
-#ifdef HAVE_PULSE_1_0
pa_format_info *format;
guint channels;
gboolean is_pcm;
-#else
- pa_sample_spec sample_spec;
-#endif
void *m_data;
size_t m_towrite;
};
struct _GstPulseRingBufferClass
{
- GstRingBufferClass parent_class;
+ GstAudioRingBufferClass parent_class;
};
static GType gst_pulseringbuffer_get_type (void);
static void gst_pulseringbuffer_finalize (GObject * object);
-static GstRingBufferClass *ring_parent_class = NULL;
-
-static gboolean gst_pulseringbuffer_open_device (GstRingBuffer * buf);
-static gboolean gst_pulseringbuffer_close_device (GstRingBuffer * buf);
-static gboolean gst_pulseringbuffer_acquire (GstRingBuffer * buf,
- GstRingBufferSpec * spec);
-static gboolean gst_pulseringbuffer_release (GstRingBuffer * buf);
-static gboolean gst_pulseringbuffer_start (GstRingBuffer * buf);
-static gboolean gst_pulseringbuffer_pause (GstRingBuffer * buf);
-static gboolean gst_pulseringbuffer_stop (GstRingBuffer * buf);
-static void gst_pulseringbuffer_clear (GstRingBuffer * buf);
-static guint gst_pulseringbuffer_commit (GstRingBuffer * buf,
+static GstAudioRingBufferClass *ring_parent_class = NULL;
+
+static gboolean gst_pulseringbuffer_open_device (GstAudioRingBuffer * buf);
+static gboolean gst_pulseringbuffer_close_device (GstAudioRingBuffer * buf);
+static gboolean gst_pulseringbuffer_acquire (GstAudioRingBuffer * buf,
+ GstAudioRingBufferSpec * spec);
+static gboolean gst_pulseringbuffer_release (GstAudioRingBuffer * buf);
+static gboolean gst_pulseringbuffer_start (GstAudioRingBuffer * buf);
+static gboolean gst_pulseringbuffer_pause (GstAudioRingBuffer * buf);
+static gboolean gst_pulseringbuffer_stop (GstAudioRingBuffer * buf);
+static void gst_pulseringbuffer_clear (GstAudioRingBuffer * buf);
+static guint gst_pulseringbuffer_commit (GstAudioRingBuffer * buf,
guint64 * sample, guchar * data, gint in_samples, gint out_samples,
gint * accum);
-G_DEFINE_TYPE (GstPulseRingBuffer, gst_pulseringbuffer, GST_TYPE_RING_BUFFER);
+G_DEFINE_TYPE (GstPulseRingBuffer, gst_pulseringbuffer,
+ GST_TYPE_AUDIO_RING_BUFFER);
static void
gst_pulsesink_init_contexts (void)
{
- g_assert (pa_shared_resource_mutex == NULL);
- pa_shared_resource_mutex = g_mutex_new ();
+ g_mutex_init (&pa_shared_resource_mutex);
gst_pulse_shared_contexts = g_hash_table_new_full (g_str_hash, g_str_equal,
g_free, NULL);
}
gst_pulseringbuffer_class_init (GstPulseRingBufferClass * klass)
{
GObjectClass *gobject_class;
- GstRingBufferClass *gstringbuffer_class;
+ GstAudioRingBufferClass *gstringbuffer_class;
gobject_class = (GObjectClass *) klass;
- gstringbuffer_class = (GstRingBufferClass *) klass;
+ gstringbuffer_class = (GstAudioRingBufferClass *) klass;
ring_parent_class = g_type_class_peek_parent (klass);
pbuf->context = NULL;
pbuf->stream = NULL;
-#ifdef HAVE_PULSE_1_0
pbuf->format = NULL;
pbuf->channels = 0;
pbuf->is_pcm = FALSE;
-#else
- pa_sample_spec_init (&pbuf->sample_spec);
-#endif
pbuf->m_data = NULL;
pbuf->m_towrite = 0;
pbuf->m_offset = 0;
pbuf->m_lastoffset = 0;
}
-#ifdef HAVE_PULSE_1_0
if (pbuf->format) {
pa_format_info_free (pbuf->format);
pbuf->format = NULL;
pbuf->channels = 0;
pbuf->is_pcm = FALSE;
}
-#endif
pa_stream_disconnect (pbuf->stream);
static void
gst_pulsering_destroy_context (GstPulseRingBuffer * pbuf)
{
- g_mutex_lock (pa_shared_resource_mutex);
+ g_mutex_lock (&pa_shared_resource_mutex);
GST_DEBUG_OBJECT (pbuf, "destroying ringbuffer %p", pbuf);
g_free (pbuf->context_name);
pbuf->context_name = NULL;
}
- g_mutex_unlock (pa_shared_resource_mutex);
+ g_mutex_unlock (&pa_shared_resource_mutex);
}
static void
if (idx != pa_stream_get_index (pbuf->stream))
continue;
-#ifdef HAVE_PULSE_1_0
if (psink->device && pbuf->is_pcm &&
!g_str_equal (psink->device,
pa_stream_get_device_name (pbuf->stream))) {
GST_INFO_OBJECT (psink, "emitting sink-changed");
+ /* FIXME: send reconfigure event instead and let decodebin/playbin
+ * handle that. Also take care of ac3 alignment. See "pulse-format-lost" */
renego = gst_event_new_custom (GST_EVENT_CUSTOM_UPSTREAM,
- gst_structure_new ("pulse-sink-changed", NULL));
+ gst_structure_new_empty ("pulse-sink-changed"));
if (!gst_pad_push_event (GST_BASE_SINK (psink)->sinkpad, renego))
GST_DEBUG_OBJECT (psink, "Emitted sink-changed - nobody was listening");
}
-#endif
/* Actually this event is also triggered when other properties of
* the stream change that are unrelated to the volume. However it is
/* will be called when the device should be opened. In this case we will connect
* to the server. We should not try to open any streams in this state. */
static gboolean
-gst_pulseringbuffer_open_device (GstRingBuffer * buf)
+gst_pulseringbuffer_open_device (GstAudioRingBuffer * buf)
{
GstPulseSink *psink;
GstPulseRingBuffer *pbuf;
pa_threaded_mainloop_lock (mainloop);
- g_mutex_lock (pa_shared_resource_mutex);
+ g_mutex_lock (&pa_shared_resource_mutex);
need_unlock_shared = TRUE;
pctx = g_hash_table_lookup (gst_pulse_shared_contexts, pbuf->context_name);
pctx->ring_buffers = g_slist_prepend (pctx->ring_buffers, pbuf);
}
- g_mutex_unlock (pa_shared_resource_mutex);
+ g_mutex_unlock (&pa_shared_resource_mutex);
need_unlock_shared = FALSE;
/* context created or shared okay */
unlock_and_fail:
{
if (need_unlock_shared)
- g_mutex_unlock (pa_shared_resource_mutex);
+ g_mutex_unlock (&pa_shared_resource_mutex);
gst_pulsering_destroy_context (pbuf);
pa_threaded_mainloop_unlock (mainloop);
return FALSE;
/* close the device */
static gboolean
-gst_pulseringbuffer_close_device (GstRingBuffer * buf)
+gst_pulseringbuffer_close_device (GstAudioRingBuffer * buf)
{
GstPulseSink *psink;
GstPulseRingBuffer *pbuf;
gst_pulsering_stream_request_cb (pa_stream * s, size_t length, void *userdata)
{
GstPulseSink *psink;
- GstRingBuffer *rbuf;
+ GstAudioRingBuffer *rbuf;
GstPulseRingBuffer *pbuf;
- rbuf = GST_RING_BUFFER_CAST (userdata);
+ rbuf = GST_AUDIO_RING_BUFFER_CAST (userdata);
pbuf = GST_PULSERING_BUFFER_CAST (userdata);
psink = GST_PULSESINK_CAST (GST_OBJECT_PARENT (pbuf));
gst_element_post_message (GST_ELEMENT_CAST (psink),
gst_message_new_request_state (GST_OBJECT_CAST (psink),
GST_STATE_PLAYING));
-#ifdef HAVE_PULSE_1_0
} else if (!strcmp (name, PA_STREAM_EVENT_FORMAT_LOST)) {
GstEvent *renego;
g_free (psink->device);
psink->device = g_strdup (pa_proplist_gets (pl, "device"));
+ /* FIXME: send reconfigure event instead and let decodebin/playbin
+ * handle that. Also take care of ac3 alignment */
renego = gst_event_new_custom (GST_EVENT_CUSTOM_UPSTREAM,
- gst_structure_new ("pulse-format-lost", NULL));
+ gst_structure_new_empty ("pulse-format-lost"));
+
+#if 0
+ if (g_str_equal (gst_structure_get_name (st), "audio/x-eac3")) {
+ GstStructure *event_st = gst_structure_new ("ac3parse-set-alignment",
+ "alignment", G_TYPE_STRING, pbin->dbin ? "frame" : "iec61937", NULL);
+
+ if (!gst_pad_push_event (pbin->sinkpad,
+ gst_event_new_custom (GST_EVENT_CUSTOM_UPSTREAM, event_st)))
+ GST_WARNING_OBJECT (pbin->sinkpad, "Could not update alignment");
+ }
+#endif
if (!gst_pad_push_event (GST_BASE_SINK (psink)->sinkpad, renego)) {
/* Nobody handled the format change - emit an error */
GST_ELEMENT_ERROR (psink, STREAM, FORMAT, ("Sink format changed"),
("Sink format changed"));
}
-#endif
} else {
GST_DEBUG_OBJECT (psink, "got unknown event %s", name);
}
/* This method should create a new stream of the given @spec. No playback should
* start yet so we start in the corked state. */
static gboolean
-gst_pulseringbuffer_acquire (GstRingBuffer * buf, GstRingBufferSpec * spec)
+gst_pulseringbuffer_acquire (GstAudioRingBuffer * buf,
+ GstAudioRingBufferSpec * spec)
{
GstPulseSink *psink;
GstPulseRingBuffer *pbuf;
const pa_buffer_attr *actual;
pa_channel_map channel_map;
pa_operation *o = NULL;
-#ifdef HAVE_PULSE_0_9_20
pa_cvolume v;
-#endif
pa_cvolume *pv = NULL;
pa_stream_flags_t flags;
const gchar *name;
GstAudioClock *clock;
-#ifdef HAVE_PULSE_1_0
pa_format_info *formats[1];
#ifndef GST_DISABLE_GST_DEBUG
gchar print_buf[PA_FORMAT_INFO_SNPRINT_MAX];
#endif
-#endif
psink = GST_PULSESINK_CAST (GST_OBJECT_PARENT (buf));
pbuf = GST_PULSERING_BUFFER_CAST (buf);
GST_LOG_OBJECT (psink, "creating sample spec");
/* convert the gstreamer sample spec to the pulseaudio format */
-#ifdef HAVE_PULSE_1_0
if (!gst_pulse_fill_format_info (spec, &pbuf->format, &pbuf->channels))
goto invalid_spec;
pbuf->is_pcm = pa_format_info_is_pcm (pbuf->format);
-#else
- if (!gst_pulse_fill_sample_spec (spec, &pbuf->sample_spec))
- goto invalid_spec;
-#endif
pa_threaded_mainloop_lock (mainloop);
pa_operation_unref (o);
/* initialize the channel map */
-#ifdef HAVE_PULSE_1_0
if (pbuf->is_pcm && gst_pulse_gst_to_channel_map (&channel_map, spec))
pa_format_info_set_channel_map (pbuf->format, &channel_map);
-#else
- gst_pulse_gst_to_channel_map (&channel_map, spec);
-#endif
/* find a good name for the stream */
if (psink->stream_name)
name = "Playback Stream";
/* create a stream */
-#ifdef HAVE_PULSE_1_0
formats[0] = pbuf->format;
if (!(pbuf->stream = pa_stream_new_extended (pbuf->context, name, formats, 1,
psink->proplist)))
goto stream_failed;
-#else
- GST_LOG_OBJECT (psink, "creating stream with name %s", name);
- if (!(pbuf->stream = pa_stream_new_with_proplist (pbuf->context, name,
- &pbuf->sample_spec, &channel_map, psink->proplist)))
- goto stream_failed;
-#endif
/* install essential callbacks */
pa_stream_set_state_callback (pbuf->stream,
GST_INFO_OBJECT (psink, "prebuf: %d", wanted.prebuf);
GST_INFO_OBJECT (psink, "minreq: %d", wanted.minreq);
-#ifdef HAVE_PULSE_0_9_20
/* configure volume when we changed it, else we leave the default */
if (psink->volume_set) {
GST_LOG_OBJECT (psink, "have volume of %f", psink->volume);
pv = &v;
-#ifdef HAVE_PULSE_1_0
if (pbuf->is_pcm)
gst_pulse_cvolume_from_linear (pv, pbuf->channels, psink->volume);
else {
GST_DEBUG_OBJECT (psink, "passthrough stream, not setting volume");
pv = NULL;
}
-#else
- gst_pulse_cvolume_from_linear (pv, pbuf->sample_spec.channels,
- psink->volume);
-#endif
} else {
pv = NULL;
}
-#endif
/* construct the flags */
flags = PA_STREAM_INTERPOLATE_TIMING | PA_STREAM_AUTO_TIMING_UPDATE |
goto connect_failed;
/* our clock will now start from 0 again */
- clock = GST_AUDIO_CLOCK (GST_BASE_AUDIO_SINK (psink)->provided_clock);
+ clock = GST_AUDIO_CLOCK (GST_AUDIO_BASE_SINK (psink)->provided_clock);
gst_audio_clock_reset (clock, 0);
if (!gst_pulsering_wait_for_stream_ready (psink, pbuf->stream))
goto connect_failed;
-#ifdef HAVE_PULSE_1_0
g_free (psink->device);
psink->device = g_strdup (pa_stream_get_device_name (pbuf->stream));
pa_stream_get_format_info (pbuf->stream));
GST_INFO_OBJECT (psink, "negotiated to: %s", print_buf);
#endif
-#endif
/* After we passed the volume off of to PA we never want to set it
again, since it is PA's job to save/restore volumes. */
/* free the stream that we acquired before */
static gboolean
-gst_pulseringbuffer_release (GstRingBuffer * buf)
+gst_pulseringbuffer_release (GstAudioRingBuffer * buf)
{
GstPulseRingBuffer *pbuf;
gst_pulsering_destroy_stream (pbuf);
pa_threaded_mainloop_unlock (mainloop);
-#ifdef HAVE_PULSE_1_0
{
GstPulseSink *psink;
g_atomic_int_set (&psink->format_lost, FALSE);
psink->format_lost_time = GST_CLOCK_TIME_NONE;
}
-#endif
return TRUE;
}
psink = GST_PULSESINK_CAST (GST_OBJECT_PARENT (pbuf));
-#ifdef HAVE_PULSE_1_0
if (g_atomic_int_get (&psink->format_lost)) {
/* Sink format changed, stream's gone so fake being paused */
return TRUE;
}
-#endif
GST_DEBUG_OBJECT (psink, "setting corked state to %d", corked);
if (pbuf->corked != corked) {
}
static void
-gst_pulseringbuffer_clear (GstRingBuffer * buf)
+gst_pulseringbuffer_clear (GstAudioRingBuffer * buf)
{
GstPulseSink *psink;
GstPulseRingBuffer *pbuf;
/* start/resume playback ASAP, we don't uncork here but in the commit method */
static gboolean
-gst_pulseringbuffer_start (GstRingBuffer * buf)
+gst_pulseringbuffer_start (GstAudioRingBuffer * buf)
{
GstPulseSink *psink;
GstPulseRingBuffer *pbuf;
/* EOS needs running clock */
if (GST_BASE_SINK_CAST (psink)->eos ||
- g_atomic_int_get (&GST_BASE_AUDIO_SINK (psink)->abidata.
- ABI.eos_rendering))
+ g_atomic_int_get (&GST_AUDIO_BASE_SINK (psink)->eos_rendering))
gst_pulsering_set_corked (pbuf, FALSE, FALSE);
pa_threaded_mainloop_unlock (mainloop);
/* pause/stop playback ASAP */
static gboolean
-gst_pulseringbuffer_pause (GstRingBuffer * buf)
+gst_pulseringbuffer_pause (GstAudioRingBuffer * buf)
{
GstPulseSink *psink;
GstPulseRingBuffer *pbuf;
/* stop playback, we flush everything. */
static gboolean
-gst_pulseringbuffer_stop (GstRingBuffer * buf)
+gst_pulseringbuffer_stop (GstAudioRingBuffer * buf)
{
GstPulseSink *psink;
GstPulseRingBuffer *pbuf;
GST_DEBUG_OBJECT (psink, "signal commit thread");
pa_threaded_mainloop_signal (mainloop, 0);
}
-#ifdef HAVE_PULSE_1_0
if (g_atomic_int_get (&psink->format_lost)) {
/* Don't try to flush, the stream's probably gone by now */
res = TRUE;
goto cleanup;
}
-#endif
/* then try to flush, it's not fatal when this fails */
GST_DEBUG_OBJECT (psink, "flushing");
G_STMT_START { \
guint8 *sb = s, *db = d; \
while (s <= se && d < de) { \
- memcpy (d, s, bps); \
- s += bps; \
+ memcpy (d, s, bpf); \
+ s += bpf; \
*accum += outr; \
if ((*accum << 1) >= inr) { \
*accum -= inr; \
- d += bps; \
+ d += bpf; \
} \
} \
- in_samples -= (s - sb)/bps; \
- out_samples -= (d - db)/bps; \
+ in_samples -= (s - sb)/bpf; \
+ out_samples -= (d - db)/bpf; \
GST_DEBUG ("fwd_up end %d/%d",*accum,*toprocess); \
} G_STMT_END
G_STMT_START { \
guint8 *sb = s, *db = d; \
while (s <= se && d < de) { \
- memcpy (d, s, bps); \
- d += bps; \
+ memcpy (d, s, bpf); \
+ d += bpf; \
*accum += inr; \
if ((*accum << 1) >= outr) { \
*accum -= outr; \
- s += bps; \
+ s += bpf; \
} \
} \
- in_samples -= (s - sb)/bps; \
- out_samples -= (d - db)/bps; \
+ in_samples -= (s - sb)/bpf; \
+ out_samples -= (d - db)/bpf; \
GST_DEBUG ("fwd_down end %d/%d",*accum,*toprocess); \
} G_STMT_END
G_STMT_START { \
guint8 *sb = se, *db = d; \
while (s <= se && d < de) { \
- memcpy (d, se, bps); \
- se -= bps; \
+ memcpy (d, se, bpf); \
+ se -= bpf; \
*accum += outr; \
while (d < de && (*accum << 1) >= inr) { \
*accum -= inr; \
- d += bps; \
+ d += bpf; \
} \
} \
- in_samples -= (sb - se)/bps; \
- out_samples -= (d - db)/bps; \
+ in_samples -= (sb - se)/bpf; \
+ out_samples -= (d - db)/bpf; \
GST_DEBUG ("rev_up end %d/%d",*accum,*toprocess); \
} G_STMT_END
G_STMT_START { \
guint8 *sb = se, *db = d; \
while (s <= se && d < de) { \
- memcpy (d, se, bps); \
- d += bps; \
+ memcpy (d, se, bpf); \
+ d += bpf; \
*accum += inr; \
while (s <= se && (*accum << 1) >= outr) { \
*accum -= outr; \
- se -= bps; \
+ se -= bpf; \
} \
} \
- in_samples -= (sb - se)/bps; \
- out_samples -= (d - db)/bps; \
+ in_samples -= (sb - se)/bpf; \
+ out_samples -= (d - db)/bpf; \
GST_DEBUG ("rev_down end %d/%d",*accum,*toprocess); \
} G_STMT_END
/* our custom commit function because we write into the buffer of pulseaudio
* instead of keeping our own buffer */
static guint
-gst_pulseringbuffer_commit (GstRingBuffer * buf, guint64 * sample,
+gst_pulseringbuffer_commit (GstAudioRingBuffer * buf, guint64 * sample,
guchar * data, gint in_samples, gint out_samples, gint * accum)
{
GstPulseSink *psink;
guint8 *data_end;
gboolean reverse;
gint *toprocess;
- gint inr, outr, bps;
+ gint inr, outr, bpf;
gint64 offset;
guint bufsize;
/* make sure the ringbuffer is started */
if (G_UNLIKELY (g_atomic_int_get (&buf->state) !=
- GST_RING_BUFFER_STATE_STARTED)) {
+ GST_AUDIO_RING_BUFFER_STATE_STARTED)) {
/* see if we are allowed to start it */
- if (G_UNLIKELY (g_atomic_int_get (&buf->abidata.ABI.may_start) == FALSE))
+ if (G_UNLIKELY (g_atomic_int_get (&buf->may_start) == FALSE))
goto no_start;
GST_DEBUG_OBJECT (buf, "start!");
- if (!gst_ring_buffer_start (buf))
+ if (!gst_audio_ring_buffer_start (buf))
goto start_failed;
}
GST_DEBUG_OBJECT (psink, "entering commit");
pbuf->in_commit = TRUE;
- bps = buf->spec.bytes_per_sample;
+ bpf = GST_AUDIO_INFO_BPF (&buf->spec.info);
bufsize = buf->spec.segsize * buf->spec.segtotal;
/* our toy resampler for trick modes */
/* data_end points to the last sample we have to write, not past it. This is
* needed to properly handle reverse playback: it points to the last sample. */
- data_end = data + (bps * inr);
+ data_end = data + (bpf * inr);
-#ifdef HAVE_PULSE_1_0
if (g_atomic_int_get (&psink->format_lost)) {
/* Sink format changed, drop the data and hope upstream renegotiates */
goto fake_done;
}
-#endif
if (pbuf->paused)
goto was_paused;
/* offset is in bytes */
- offset = *sample * bps;
+ offset = *sample * bpf;
while (*toprocess > 0) {
size_t avail;
GST_LOG_OBJECT (psink, "discontinuity, offset is %" G_GINT64_FORMAT ", "
"last offset was %" G_GINT64_FORMAT, offset, pbuf->m_lastoffset);
- towrite = out_samples * bps;
+ towrite = out_samples * bpf;
/* Wait for at least segsize bytes to become available */
if (towrite > buf->spec.segsize)
GST_LOG_OBJECT (psink,
"flushing %u samples at offset %" G_GINT64_FORMAT,
- (guint) pbuf->m_towrite / bps, pbuf->m_offset);
+ (guint) pbuf->m_towrite / bpf, pbuf->m_offset);
if (pa_stream_write (pbuf->stream, (uint8_t *) pbuf->m_data,
pbuf->m_towrite, NULL, pbuf->m_offset, PA_SEEK_ABSOLUTE) < 0) {
for (;;) {
pbuf->m_writable = pa_stream_writable_size (pbuf->stream);
-#ifdef HAVE_PULSE_1_0
if (g_atomic_int_get (&psink->format_lost)) {
/* Sink format changed, give up and hope upstream renegotiates */
goto fake_done;
}
-#endif
if (pbuf->m_writable == (size_t) - 1)
goto writable_size_failed;
- pbuf->m_writable /= bps;
- pbuf->m_writable *= bps; /* handle only complete samples */
+ pbuf->m_writable /= bpf;
+ pbuf->m_writable *= bpf; /* handle only complete samples */
if (pbuf->m_writable >= towrite)
break;
}
/* Recalculate what we can write in the next chunk */
- towrite = out_samples * bps;
+ towrite = out_samples * bpf;
if (pbuf->m_writable > towrite)
pbuf->m_writable = towrite;
if (towrite > pbuf->m_writable)
towrite = pbuf->m_writable;
- avail = towrite / bps;
+ avail = towrite / bpf;
GST_LOG_OBJECT (psink, "writing %u samples at offset %" G_GUINT64_FORMAT,
(guint) avail, offset);
-#ifdef HAVE_PULSE_1_0
/* No trick modes for passthrough streams */
if (G_UNLIKELY (!pbuf->is_pcm && (inr != outr || reverse))) {
GST_WARNING_OBJECT (psink, "Passthrough stream can't run in trick mode");
goto unlock_and_fail;
}
-#endif
if (G_LIKELY (inr == outr && !reverse)) {
/* no rate conversion, simply write out the samples */
pbuf->m_towrite += towrite;
pbuf->m_writable -= towrite;
- avail = towrite / bps;
+ avail = towrite / bpf;
}
/* flush the buffer if it's full */
if ((pbuf->m_data != NULL) && (pbuf->m_towrite > 0)
&& (pbuf->m_writable == 0)) {
GST_LOG_OBJECT (psink, "flushing %u samples at offset %" G_GINT64_FORMAT,
- (guint) pbuf->m_towrite / bps, pbuf->m_offset);
+ (guint) pbuf->m_towrite / bpf, pbuf->m_offset);
if (pa_stream_write (pbuf->stream, (uint8_t *) pbuf->m_data,
pbuf->m_towrite, NULL, pbuf->m_offset, PA_SEEK_ABSOLUTE) < 0) {
}
*sample += avail;
- offset += avail * bps;
+ offset += avail * bpf;
pbuf->m_lastoffset = offset;
/* check if we need to uncork after writing the samples */
}
}
-#ifdef HAVE_PULSE_1_0
fake_done:
-#endif
/* we consumed all samples here */
- data = data_end + bps;
+ data = data_end + bpf;
pbuf->in_commit = FALSE;
pa_threaded_mainloop_unlock (mainloop);
done:
- result = inr - ((data_end - data) / bps);
+ result = inr - ((data_end - data) / bpf);
GST_LOG_OBJECT (psink, "wrote %d samples", result);
return result;
/* flush the buffer if possible */
if (pbuf->stream && (pbuf->m_data != NULL) && (pbuf->m_towrite > 0)) {
#ifndef GST_DISABLE_GST_DEBUG
- gint bps;
+ gint bpf;
- bps = (GST_RING_BUFFER_CAST (pbuf))->spec.bytes_per_sample;
+ bpf = (GST_AUDIO_RING_BUFFER_CAST (pbuf))->spec.info.bpf;
GST_LOG_OBJECT (psink,
"flushing %u samples at offset %" G_GINT64_FORMAT,
- (guint) pbuf->m_towrite / bps, pbuf->m_offset);
+ (guint) pbuf->m_towrite / bpf, pbuf->m_offset);
#endif
if (pa_stream_write (pbuf->stream, (uint8_t *) pbuf->m_data,
static void gst_pulsesink_finalize (GObject * object);
static gboolean gst_pulsesink_event (GstBaseSink * sink, GstEvent * event);
+static gboolean gst_pulsesink_query (GstBaseSink * sink, GstQuery * query);
static GstStateChangeReturn gst_pulsesink_change_state (GstElement * element,
GstStateChange transition);
-static void gst_pulsesink_init_interfaces (GType type);
-
-GST_IMPLEMENT_PULSEPROBE_METHODS (GstPulseSink, gst_pulsesink);
-
-#define _do_init(type) \
- gst_pulsesink_init_contexts (); \
- gst_pulsesink_init_interfaces (type);
-
-GST_BOILERPLATE_FULL (GstPulseSink, gst_pulsesink, GstBaseAudioSink,
- GST_TYPE_BASE_AUDIO_SINK, _do_init);
-
-static gboolean
-gst_pulsesink_interface_supported (GstImplementsInterface *
- iface, GType interface_type)
-{
- GstPulseSink *this = GST_PULSESINK_CAST (iface);
-
- if (interface_type == GST_TYPE_PROPERTY_PROBE && this->probe)
- return TRUE;
- if (interface_type == GST_TYPE_STREAM_VOLUME)
- return TRUE;
-
- return FALSE;
-}
-
-static void
-gst_pulsesink_implements_interface_init (GstImplementsInterfaceClass * klass)
-{
- klass->supported = gst_pulsesink_interface_supported;
-}
-
-static void
-gst_pulsesink_init_interfaces (GType type)
-{
- static const GInterfaceInfo implements_iface_info = {
- (GInterfaceInitFunc) gst_pulsesink_implements_interface_init,
- NULL,
- NULL,
- };
- static const GInterfaceInfo probe_iface_info = {
- (GInterfaceInitFunc) gst_pulsesink_property_probe_interface_init,
- NULL,
- NULL,
- };
- static const GInterfaceInfo svol_iface_info = {
- NULL, NULL, NULL
- };
-
- g_type_add_interface_static (type, GST_TYPE_STREAM_VOLUME, &svol_iface_info);
- g_type_add_interface_static (type, GST_TYPE_IMPLEMENTS_INTERFACE,
- &implements_iface_info);
- g_type_add_interface_static (type, GST_TYPE_PROPERTY_PROBE,
- &probe_iface_info);
-}
-
-static void
-gst_pulsesink_base_init (gpointer g_class)
-{
- static GstStaticPadTemplate pad_template = GST_STATIC_PAD_TEMPLATE ("sink",
- GST_PAD_SINK,
- GST_PAD_ALWAYS,
- GST_STATIC_CAPS (PULSE_SINK_TEMPLATE_CAPS));
-
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
+static GstStaticPadTemplate pad_template = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (PULSE_SINK_TEMPLATE_CAPS));
- gst_element_class_set_details_simple (element_class,
- "PulseAudio Audio Sink",
- "Sink/Audio", "Plays audio to a PulseAudio server", "Lennart Poettering");
- gst_element_class_add_static_pad_template (element_class, &pad_template);
-}
+#define gst_pulsesink_parent_class parent_class
+G_DEFINE_TYPE_WITH_CODE (GstPulseSink, gst_pulsesink, GST_TYPE_AUDIO_BASE_SINK,
+ gst_pulsesink_init_contexts ();
+ G_IMPLEMENT_INTERFACE (GST_TYPE_STREAM_VOLUME, NULL)
+ );
-static GstRingBuffer *
-gst_pulsesink_create_ringbuffer (GstBaseAudioSink * sink)
+static GstAudioRingBuffer *
+gst_pulsesink_create_ringbuffer (GstAudioBaseSink * sink)
{
- GstRingBuffer *buffer;
+ GstAudioRingBuffer *buffer;
GST_DEBUG_OBJECT (sink, "creating ringbuffer");
buffer = g_object_new (GST_TYPE_PULSERING_BUFFER, NULL);
}
static GstBuffer *
-gst_pulsesink_payload (GstBaseAudioSink * sink, GstBuffer * buf)
+gst_pulsesink_payload (GstAudioBaseSink * sink, GstBuffer * buf)
{
switch (sink->ringbuffer->spec.type) {
- case GST_BUFTYPE_AC3:
- case GST_BUFTYPE_EAC3:
- case GST_BUFTYPE_DTS:
- case GST_BUFTYPE_MPEG:
+ case GST_AUDIO_RING_BUFFER_FORMAT_TYPE_AC3:
+ case GST_AUDIO_RING_BUFFER_FORMAT_TYPE_EAC3:
+ case GST_AUDIO_RING_BUFFER_FORMAT_TYPE_DTS:
+ case GST_AUDIO_RING_BUFFER_FORMAT_TYPE_MPEG:
{
/* FIXME: alloc memory from PA if possible */
gint framesize = gst_audio_iec61937_frame_size (&sink->ringbuffer->spec);
GstBuffer *out;
+ GstMapInfo inmap, outmap;
+ gboolean res;
if (framesize <= 0)
return NULL;
out = gst_buffer_new_and_alloc (framesize);
- if (!gst_audio_iec61937_payload (GST_BUFFER_DATA (buf),
- GST_BUFFER_SIZE (buf), GST_BUFFER_DATA (out),
- GST_BUFFER_SIZE (out), &sink->ringbuffer->spec)) {
+ gst_buffer_map (buf, &inmap, GST_MAP_READ);
+ gst_buffer_map (out, &outmap, GST_MAP_WRITE);
+
+ res = gst_audio_iec61937_payload (inmap.data, inmap.size,
+ outmap.data, outmap.size, &sink->ringbuffer->spec);
+
+ gst_buffer_unmap (buf, &inmap);
+ gst_buffer_unmap (out, &outmap);
+
+ if (!res) {
gst_buffer_unref (out);
return NULL;
}
- gst_buffer_copy_metadata (out, buf, GST_BUFFER_COPY_ALL);
+ gst_buffer_copy_into (out, buf, GST_BUFFER_COPY_METADATA, 0, -1);
return out;
}
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
GstBaseSinkClass *gstbasesink_class = GST_BASE_SINK_CLASS (klass);
GstBaseSinkClass *bc;
- GstBaseAudioSinkClass *gstaudiosink_class = GST_BASE_AUDIO_SINK_CLASS (klass);
+ GstAudioBaseSinkClass *gstaudiosink_class = GST_AUDIO_BASE_SINK_CLASS (klass);
GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
gchar *clientname;
gobject_class->get_property = gst_pulsesink_get_property;
gstbasesink_class->event = GST_DEBUG_FUNCPTR (gst_pulsesink_event);
+ gstbasesink_class->query = GST_DEBUG_FUNCPTR (gst_pulsesink_query);
/* restore the original basesink pull methods */
bc = g_type_class_peek (GST_TYPE_BASE_SINK);
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
/**
- * GstPulseSink:client
+ * GstPulseSink:client-name
*
* The PulseAudio client name to use.
- *
- * Since: 0.10.25
*/
clientname = gst_pulse_client_name ();
g_object_class_install_property (gobject_class,
- PROP_CLIENT,
- g_param_spec_string ("client", "Client",
+ PROP_CLIENT_NAME,
+ g_param_spec_string ("client-name", "Client Name",
"The PulseAudio client name to use", clientname,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
g_param_spec_boxed ("stream-properties", "stream properties",
"list of pulseaudio stream properties",
GST_TYPE_STRUCTURE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "PulseAudio Audio Sink",
+ "Sink/Audio", "Plays audio to a PulseAudio server", "Lennart Poettering");
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&pad_template));
}
/* returns the current time of the sink ringbuffer */
static GstClockTime
-gst_pulsesink_get_time (GstClock * clock, GstBaseAudioSink * sink)
+gst_pulsesink_get_time (GstClock * clock, GstAudioBaseSink * sink)
{
GstPulseSink *psink;
GstPulseRingBuffer *pbuf;
pbuf = GST_PULSERING_BUFFER_CAST (sink->ringbuffer);
psink = GST_PULSESINK_CAST (GST_OBJECT_PARENT (pbuf));
-#ifdef HAVE_PULSE_1_0
if (g_atomic_int_get (&psink->format_lost)) {
/* Stream was lost in a format change, it'll get set up again once
* upstream renegotiates */
return psink->format_lost_time;
}
-#endif
pa_threaded_mainloop_lock (mainloop);
if (gst_pulsering_is_dead (psink, pbuf, TRUE))
{
GstPulseRingBuffer *pbuf;
GstPulseSink *psink;
-#ifdef HAVE_PULSE_1_0
GList *l;
guint8 j;
-#endif
pbuf = GST_PULSERING_BUFFER_CAST (userdata);
psink = GST_PULSESINK_CAST (GST_OBJECT_PARENT (pbuf));
g_free (psink->device_description);
psink->device_description = g_strdup (i->description);
-#ifdef HAVE_PULSE_1_0
- g_mutex_lock (psink->sink_formats_lock);
+ g_mutex_lock (&psink->sink_formats_lock);
for (l = g_list_first (psink->sink_formats); l; l = g_list_next (l))
pa_format_info_free ((pa_format_info *) l->data);
psink->sink_formats = g_list_prepend (psink->sink_formats,
pa_format_info_copy (i->formats[j]));
- g_mutex_unlock (psink->sink_formats_lock);
-#endif
+ g_mutex_unlock (&psink->sink_formats_lock);
done:
pa_threaded_mainloop_signal (mainloop, 0);
}
-#ifdef HAVE_PULSE_1_0
-/* NOTE: If you're making changes here, see if pulseaudiosink acceptcaps also
- * needs to be changed accordingly. */
static gboolean
-gst_pulsesink_pad_acceptcaps (GstPad * pad, GstCaps * caps)
+gst_pulsesink_query_acceptcaps (GstPulseSink * psink, GstCaps * caps)
{
- GstPulseSink *psink = GST_PULSESINK (gst_pad_get_parent_element (pad));
- GstPulseRingBuffer *pbuf = GST_PULSERING_BUFFER_CAST (GST_BASE_AUDIO_SINK
+ GstPulseRingBuffer *pbuf = GST_PULSERING_BUFFER_CAST (GST_AUDIO_BASE_SINK
(psink)->ringbuffer);
+ GstPad *pad = GST_BASE_SINK_PAD (psink);
GstCaps *pad_caps;
GstStructure *st;
gboolean ret = FALSE;
- GstRingBufferSpec spec = { 0 };
+ GstAudioRingBufferSpec spec = { 0 };
pa_stream *stream = NULL;
pa_operation *o = NULL;
pa_channel_map channel_map;
pa_format_info *format = NULL, *formats[1];
guint channels;
- pad_caps = gst_pad_get_caps_reffed (pad);
- if (pad_caps) {
- ret = gst_caps_can_intersect (pad_caps, caps);
- gst_caps_unref (pad_caps);
- }
+ pad_caps = gst_pad_query_caps (pad, caps);
+ ret = pad_caps != NULL;
+ gst_caps_unref (pad_caps);
/* Either template caps didn't match, or we're still in NULL state */
if (!ret || !pbuf->context)
pa_threaded_mainloop_lock (mainloop);
- spec.latency_time = GST_BASE_AUDIO_SINK (psink)->latency_time;
- if (!gst_ring_buffer_parse_caps (&spec, caps))
+ spec.latency_time = GST_AUDIO_BASE_SINK (psink)->latency_time;
+ if (!gst_audio_ring_buffer_parse_caps (&spec, caps))
goto out;
if (!gst_pulse_fill_format_info (&spec, &format, &channels))
goto out;
}
- g_mutex_lock (psink->sink_formats_lock);
+ g_mutex_lock (&psink->sink_formats_lock);
for (i = g_list_first (psink->sink_formats); i; i = g_list_next (i)) {
if (pa_format_info_is_compatible ((pa_format_info *) i->data, format)) {
ret = TRUE;
break;
}
}
- g_mutex_unlock (psink->sink_formats_lock);
+ g_mutex_unlock (&psink->sink_formats_lock);
} else {
/* We're in READY, let's connect a stream to see if the format is
* accpeted by whatever sink we're routed to */
pa_threaded_mainloop_unlock (mainloop);
done:
- gst_object_unref (psink);
return ret;
info_failed:
goto out;
}
}
-#endif
static void
-gst_pulsesink_init (GstPulseSink * pulsesink, GstPulseSinkClass * klass)
+gst_pulsesink_init (GstPulseSink * pulsesink)
{
pulsesink->server = NULL;
pulsesink->device = NULL;
pulsesink->device_description = NULL;
pulsesink->client_name = gst_pulse_client_name ();
-#ifdef HAVE_PULSE_1_0
- pulsesink->sink_formats_lock = g_mutex_new ();
+ g_mutex_init (&pulsesink->sink_formats_lock);
pulsesink->sink_formats = NULL;
-#endif
pulsesink->volume = DEFAULT_VOLUME;
pulsesink->volume_set = FALSE;
pulsesink->notify = 0;
-#ifdef HAVE_PULSE_1_0
g_atomic_int_set (&pulsesink->format_lost, FALSE);
pulsesink->format_lost_time = GST_CLOCK_TIME_NONE;
-#endif
pulsesink->properties = NULL;
pulsesink->proplist = NULL;
/* override with a custom clock */
- if (GST_BASE_AUDIO_SINK (pulsesink)->provided_clock)
- gst_object_unref (GST_BASE_AUDIO_SINK (pulsesink)->provided_clock);
+ if (GST_AUDIO_BASE_SINK (pulsesink)->provided_clock)
+ gst_object_unref (GST_AUDIO_BASE_SINK (pulsesink)->provided_clock);
- GST_BASE_AUDIO_SINK (pulsesink)->provided_clock =
+ GST_AUDIO_BASE_SINK (pulsesink)->provided_clock =
gst_audio_clock_new ("GstPulseSinkClock",
- (GstAudioClockGetTimeFunc) gst_pulsesink_get_time, pulsesink);
-
-#ifdef HAVE_PULSE_1_0
- gst_pad_set_acceptcaps_function (GST_BASE_SINK (pulsesink)->sinkpad,
- GST_DEBUG_FUNCPTR (gst_pulsesink_pad_acceptcaps));
-#endif
+ (GstAudioClockGetTimeFunc) gst_pulsesink_get_time, pulsesink, NULL);
/* TRUE for sinks, FALSE for sources */
pulsesink->probe = gst_pulseprobe_new (G_OBJECT (pulsesink),
gst_pulsesink_finalize (GObject * object)
{
GstPulseSink *pulsesink = GST_PULSESINK_CAST (object);
-#ifdef HAVE_PULSE_1_0
GList *i;
-#endif
g_free (pulsesink->server);
g_free (pulsesink->device);
g_free (pulsesink->device_description);
g_free (pulsesink->client_name);
-#ifdef HAVE_PULSE_1_0
for (i = g_list_first (pulsesink->sink_formats); i; i = g_list_next (i))
pa_format_info_free ((pa_format_info *) i->data);
g_list_free (pulsesink->sink_formats);
- g_mutex_free (pulsesink->sink_formats_lock);
-#endif
+ g_mutex_clear (&pulsesink->sink_formats_lock);
if (pulsesink->properties)
gst_structure_free (pulsesink->properties);
GST_DEBUG_OBJECT (psink, "setting volume to %f", volume);
- pbuf = GST_PULSERING_BUFFER_CAST (GST_BASE_AUDIO_SINK (psink)->ringbuffer);
+ pbuf = GST_PULSERING_BUFFER_CAST (GST_AUDIO_BASE_SINK (psink)->ringbuffer);
if (pbuf == NULL || pbuf->stream == NULL)
goto no_buffer;
if ((idx = pa_stream_get_index (pbuf->stream)) == PA_INVALID_INDEX)
goto no_index;
-#ifdef HAVE_PULSE_1_0
if (pbuf->is_pcm)
gst_pulse_cvolume_from_linear (&v, pbuf->channels, volume);
else
/* FIXME: this will eventually be superceded by checks to see if the volume
* is readable/writable */
goto unlock;
-#else
- gst_pulse_cvolume_from_linear (&v, pbuf->sample_spec.channels, volume);
-#endif
if (!(o = pa_context_set_sink_input_volume (pbuf->context, idx,
&v, NULL, NULL)))
GST_DEBUG_OBJECT (psink, "setting mute state to %d", mute);
- pbuf = GST_PULSERING_BUFFER_CAST (GST_BASE_AUDIO_SINK (psink)->ringbuffer);
+ pbuf = GST_PULSERING_BUFFER_CAST (GST_AUDIO_BASE_SINK (psink)->ringbuffer);
if (pbuf == NULL || pbuf->stream == NULL)
goto no_buffer;
pa_threaded_mainloop_lock (mainloop);
- pbuf = GST_PULSERING_BUFFER_CAST (GST_BASE_AUDIO_SINK (psink)->ringbuffer);
+ pbuf = GST_PULSERING_BUFFER_CAST (GST_AUDIO_BASE_SINK (psink)->ringbuffer);
if (pbuf == NULL || pbuf->stream == NULL)
goto no_buffer;
pa_threaded_mainloop_lock (mainloop);
mute = psink->mute;
- pbuf = GST_PULSERING_BUFFER_CAST (GST_BASE_AUDIO_SINK (psink)->ringbuffer);
+ pbuf = GST_PULSERING_BUFFER_CAST (GST_AUDIO_BASE_SINK (psink)->ringbuffer);
if (pbuf == NULL || pbuf->stream == NULL)
goto no_buffer;
goto no_mainloop;
pa_threaded_mainloop_lock (mainloop);
- pbuf = GST_PULSERING_BUFFER_CAST (GST_BASE_AUDIO_SINK (psink)->ringbuffer);
+ pbuf = GST_PULSERING_BUFFER_CAST (GST_AUDIO_BASE_SINK (psink)->ringbuffer);
if (pbuf == NULL)
goto no_buffer;
case PROP_MUTE:
gst_pulsesink_set_mute (pulsesink, g_value_get_boolean (value));
break;
- case PROP_CLIENT:
+ case PROP_CLIENT_NAME:
g_free (pulsesink->client_name);
if (!g_value_get_string (value)) {
GST_WARNING_OBJECT (pulsesink,
case PROP_MUTE:
g_value_set_boolean (value, gst_pulsesink_get_mute (pulsesink));
break;
- case PROP_CLIENT:
+ case PROP_CLIENT_NAME:
g_value_set_string (value, pulsesink->client_name);
break;
case PROP_STREAM_PROPERTIES:
pa_threaded_mainloop_lock (mainloop);
- pbuf = GST_PULSERING_BUFFER_CAST (GST_BASE_AUDIO_SINK (psink)->ringbuffer);
+ pbuf = GST_PULSERING_BUFFER_CAST (GST_AUDIO_BASE_SINK (psink)->ringbuffer);
if (pbuf == NULL || pbuf->stream == NULL)
goto no_buffer;
goto finish;
pa_threaded_mainloop_lock (mainloop);
- pbuf = GST_PULSERING_BUFFER_CAST (GST_BASE_AUDIO_SINK (psink)->ringbuffer);
+ pbuf = GST_PULSERING_BUFFER_CAST (GST_AUDIO_BASE_SINK (psink)->ringbuffer);
if (pbuf == NULL || pbuf->stream == NULL)
goto no_buffer;
pa_threaded_mainloop_lock (mainloop);
- pbuf = GST_PULSERING_BUFFER_CAST (GST_BASE_AUDIO_SINK (psink)->ringbuffer);
+ pbuf = GST_PULSERING_BUFFER_CAST (GST_AUDIO_BASE_SINK (psink)->ringbuffer);
if (pbuf == NULL || pbuf->stream == NULL)
goto no_buffer;
return GST_BASE_SINK_CLASS (parent_class)->event (sink, event);
}
+static gboolean
+gst_pulsesink_query (GstBaseSink * sink, GstQuery * query)
+{
+ GstPulseSink *pulsesink = GST_PULSESINK_CAST (sink);
+ gboolean ret;
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_ACCEPT_CAPS:
+ {
+ GstCaps *caps;
+
+ gst_query_parse_accept_caps (query, &caps);
+ ret = gst_pulsesink_query_acceptcaps (pulsesink, caps);
+ gst_query_set_accept_caps_result (query, ret);
+ ret = TRUE;
+ break;
+ }
+ default:
+ ret = GST_BASE_SINK_CLASS (parent_class)->query (sink, query);
+ break;
+ }
+ return ret;
+}
+
static void
gst_pulsesink_release_mainloop (GstPulseSink * psink)
{
}
pa_threaded_mainloop_unlock (mainloop);
- g_mutex_lock (pa_shared_resource_mutex);
+ g_mutex_lock (&pa_shared_resource_mutex);
mainloop_ref_ct--;
if (!mainloop_ref_ct) {
GST_INFO_OBJECT (psink, "terminating pa main loop thread");
pa_threaded_mainloop_free (mainloop);
mainloop = NULL;
}
- g_mutex_unlock (pa_shared_resource_mutex);
+ g_mutex_unlock (&pa_shared_resource_mutex);
}
static GstStateChangeReturn
switch (transition) {
case GST_STATE_CHANGE_NULL_TO_READY:
- g_mutex_lock (pa_shared_resource_mutex);
+ g_mutex_lock (&pa_shared_resource_mutex);
if (!mainloop_ref_ct) {
GST_INFO_OBJECT (element, "new pa main loop thread");
if (!(mainloop = pa_threaded_mainloop_new ()))
goto mainloop_start_failed;
}
mainloop_ref_ct = 1;
- g_mutex_unlock (pa_shared_resource_mutex);
+ g_mutex_unlock (&pa_shared_resource_mutex);
} else {
GST_INFO_OBJECT (element, "reusing pa main loop thread");
mainloop_ref_ct++;
- g_mutex_unlock (pa_shared_resource_mutex);
+ g_mutex_unlock (&pa_shared_resource_mutex);
}
break;
case GST_STATE_CHANGE_READY_TO_PAUSED:
gst_element_post_message (element,
gst_message_new_clock_provide (GST_OBJECT_CAST (element),
- GST_BASE_AUDIO_SINK (pulsesink)->provided_clock, TRUE));
+ GST_AUDIO_BASE_SINK (pulsesink)->provided_clock, TRUE));
break;
default:
switch (transition) {
case GST_STATE_CHANGE_PAUSED_TO_READY:
- /* format_lost is reset in release() in baseaudiosink */
+ /* format_lost is reset in release() in audiobasesink */
gst_element_post_message (element,
gst_message_new_clock_lost (GST_OBJECT_CAST (element),
- GST_BASE_AUDIO_SINK (pulsesink)->provided_clock));
+ GST_AUDIO_BASE_SINK (pulsesink)->provided_clock));
break;
case GST_STATE_CHANGE_READY_TO_NULL:
gst_pulsesink_release_mainloop (pulsesink);
/* ERRORS */
mainloop_failed:
{
- g_mutex_unlock (pa_shared_resource_mutex);
+ g_mutex_unlock (&pa_shared_resource_mutex);
GST_ELEMENT_ERROR (pulsesink, RESOURCE, FAILED,
("pa_threaded_mainloop_new() failed"), (NULL));
return GST_STATE_CHANGE_FAILURE;
}
mainloop_start_failed:
{
- g_mutex_unlock (pa_shared_resource_mutex);
+ g_mutex_unlock (&pa_shared_resource_mutex);
GST_ELEMENT_ERROR (pulsesink, RESOURCE, FAILED,
("pa_threaded_mainloop_start() failed"), (NULL));
return GST_STATE_CHANGE_FAILURE;
state_failure:
{
if (transition == GST_STATE_CHANGE_NULL_TO_READY) {
- /* Clear the PA mainloop if baseaudiosink failed to open the ring_buffer */
+ /* Clear the PA mainloop if audiobasesink failed to open the ring_buffer */
g_assert (mainloop);
gst_pulsesink_release_mainloop (pulsesink);
}
struct _GstPulseSink
{
- GstBaseAudioSink sink;
+ GstAudioBaseSink sink;
gchar *server, *device, *stream_name, *client_name;
gchar *device_description;
GstStructure *properties;
pa_proplist *proplist;
-#ifdef HAVE_PULSE_1_0
- GMutex *sink_formats_lock;
+ GMutex sink_formats_lock;
GList *sink_formats;
volatile gint format_lost;
GstClockTime format_lost_time;
-#endif
};
struct _GstPulseSinkClass
{
- GstBaseAudioSinkClass parent_class;
+ GstAudioBaseSinkClass parent_class;
};
GType gst_pulsesink_get_type (void);
#if (G_BYTE_ORDER == G_LITTLE_ENDIAN)
-# define ENDIANNESS "LITTLE_ENDIAN, BIG_ENDIAN"
+# define FORMATS "{ S16LE, S16BE, F32LE, F32BE, S32LE, S32BE, " \
+ "S24LE, S24BE, S24_32LE, S24_32BE, S8 }"
#else
-# define ENDIANNESS "BIG_ENDIAN, LITTLE_ENDIAN"
+# define FORMATS "{ S16BE, S16LE, F32BE, F32LE, S32BE, S32LE, " \
+ "S24BE, S24LE, S24_32BE, S24_32LE, S8 }"
#endif
#define _PULSE_SINK_CAPS_COMMON \
- "audio/x-raw-int, " \
- "endianness = (int) { " ENDIANNESS " }, " \
- "signed = (boolean) TRUE, " \
- "width = (int) 16, " \
- "depth = (int) 16, " \
- "rate = (int) [ 1, MAX ], " \
- "channels = (int) [ 1, 32 ];" \
- "audio/x-raw-float, " \
- "endianness = (int) { " ENDIANNESS " }, " \
- "width = (int) 32, " \
- "rate = (int) [ 1, MAX ], " \
- "channels = (int) [ 1, 32 ];" \
- "audio/x-raw-int, " \
- "endianness = (int) { " ENDIANNESS " }, " \
- "signed = (boolean) TRUE, " \
- "width = (int) 32, " \
- "depth = (int) 32, " \
- "rate = (int) [ 1, MAX ], " "channels = (int) [ 1, 32 ];" \
- "audio/x-raw-int, " \
- "signed = (boolean) FALSE, " \
- "width = (int) 8, " \
- "depth = (int) 8, " \
+ "audio/x-raw, " \
+ "format = (string) " FORMATS ", " \
+ "layout = (string) interleaved, " \
"rate = (int) [ 1, MAX ], " \
"channels = (int) [ 1, 32 ];" \
"audio/x-alaw, " \
+ "layout = (string) interleaved, " \
"rate = (int) [ 1, MAX], " \
"channels = (int) [ 1, 32 ];" \
"audio/x-mulaw, " \
- "rate = (int) [ 1, MAX], " "channels = (int) [ 1, 32 ];" \
- "audio/x-raw-int, " \
- "endianness = (int) { " ENDIANNESS " }, " \
- "signed = (boolean) TRUE, " \
- "width = (int) 24, " \
- "depth = (int) 24, " \
- "rate = (int) [ 1, MAX ], " \
- "channels = (int) [ 1, 32 ];" \
- "audio/x-raw-int, " \
- "endianness = (int) { " ENDIANNESS " }, " \
- "signed = (boolean) TRUE, " \
- "width = (int) 32, " \
- "depth = (int) 24, " \
- "rate = (int) [ 1, MAX ], " "channels = (int) [ 1, 32 ];"
-
-#ifdef HAVE_PULSE_1_0
+ "layout = (string) interleaved, " \
+ "rate = (int) [ 1, MAX], " \
+ "channels = (int) [ 1, 32 ];"
+
#define _PULSE_SINK_CAPS_1_0 \
"audio/x-ac3, framed = (boolean) true;" \
"audio/x-eac3, framed = (boolean) true; " \
"block-size = (int) { 512, 1024, 2048 }; " \
"audio/mpeg, mpegversion = (int) 1, " \
"mpegaudioversion = (int) [ 1, 2 ], parsed = (boolean) true;"
-#else
-#define _PULSE_SINK_CAPS_1_0 ""
-#endif
#define PULSE_SINK_TEMPLATE_CAPS \
_PULSE_SINK_CAPS_COMMON \
_PULSE_SINK_CAPS_1_0
-#ifdef HAVE_PULSE_1_0
-
-#define GST_TYPE_PULSE_AUDIO_SINK \
- (gst_pulse_audio_sink_get_type())
-#define GST_PULSE_AUDIO_SINK(obj) \
- (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_PULSE_AUDIO_SINK,GstPulseAudioSink))
-#define GST_PULSE_AUDIO_SINK_CLASS(klass) \
- (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_PULSE_AUDIO_SINK,GstPulseAudioSinkClass))
-#define GST_IS_PULSE_AUDIO_SINK(obj) \
- (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_PULSE_AUDIO_SINK))
-#define GST_IS_PULSE_AUDIO_SINK_CLASS(obj) \
- (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_PULSE_AUDIO_SINK))
-#define GST_PULSE_AUDIO_SINK_CAST(obj) \
- ((GstPulseAudioSink *)(obj))
-
-GType gst_pulse_audio_sink_get_type (void);
-
-#endif /* HAVE_PULSE_1_0 */
-
G_END_DECLS
#endif /* __GST_PULSESINK_H__ */
#include <gst/base/gstbasesrc.h>
#include <gst/gsttaglist.h>
-#ifdef HAVE_PULSE_1_0
-#include <gst/interfaces/streamvolume.h>
-#endif
+#include <gst/audio/streamvolume.h>
#include "pulsesrc.h"
#include "pulseutil.h"
#define DEFAULT_DEVICE NULL
#define DEFAULT_DEVICE_NAME NULL
-#ifdef HAVE_PULSE_1_0
#define DEFAULT_VOLUME 1.0
#define DEFAULT_MUTE FALSE
#define MAX_VOLUME 10.0
-#endif
enum
{
PROP_SERVER,
PROP_DEVICE,
PROP_DEVICE_NAME,
- PROP_CLIENT,
+ PROP_CLIENT_NAME,
PROP_STREAM_PROPERTIES,
PROP_SOURCE_OUTPUT_INDEX,
-#ifdef HAVE_PULSE_1_0
PROP_VOLUME,
PROP_MUTE,
-#endif
PROP_LAST
};
GValue * value, GParamSpec * pspec);
static void gst_pulsesrc_finalize (GObject * object);
+static gboolean gst_pulsesrc_set_corked (GstPulseSrc * psrc, gboolean corked,
+ gboolean wait);
static gboolean gst_pulsesrc_open (GstAudioSrc * asrc);
static gboolean gst_pulsesrc_close (GstAudioSrc * asrc);
static gboolean gst_pulsesrc_prepare (GstAudioSrc * asrc,
- GstRingBufferSpec * spec);
+ GstAudioRingBufferSpec * spec);
static gboolean gst_pulsesrc_unprepare (GstAudioSrc * asrc);
static GstStateChangeReturn gst_pulsesrc_change_state (GstElement *
element, GstStateChange transition);
-static void gst_pulsesrc_init_interfaces (GType type);
-
#if (G_BYTE_ORDER == G_LITTLE_ENDIAN)
-# define ENDIANNESS "LITTLE_ENDIAN, BIG_ENDIAN"
+# define FORMATS "{ S16LE, S16BE, F32LE, F32BE, S32LE, S32BE, U8 }"
#else
-# define ENDIANNESS "BIG_ENDIAN, LITTLE_ENDIAN"
+# define FORMATS "{ S16BE, S16LE, F32BE, F32LE, S32BE, S32LE, U8 }"
#endif
-GST_IMPLEMENT_PULSEMIXER_CTRL_METHODS (GstPulseSrc, gst_pulsesrc);
-GST_IMPLEMENT_PULSEPROBE_METHODS (GstPulseSrc, gst_pulsesrc);
-GST_BOILERPLATE_FULL (GstPulseSrc, gst_pulsesrc, GstAudioSrc,
- GST_TYPE_AUDIO_SRC, gst_pulsesrc_init_interfaces);
-
-static gboolean
-gst_pulsesrc_interface_supported (GstImplementsInterface *
- iface, GType interface_type)
-{
- GstPulseSrc *this = GST_PULSESRC_CAST (iface);
+static GstStaticPadTemplate pad_template = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "format = (string) " FORMATS ", "
+ "layout = (string) interleaved, "
+ "rate = (int) [ 1, MAX ], "
+ "channels = (int) [ 1, 32 ];"
+ "audio/x-alaw, "
+ "rate = (int) [ 1, MAX], "
+ "channels = (int) [ 1, 32 ];"
+ "audio/x-mulaw, "
+ "rate = (int) [ 1, MAX], " "channels = (int) [ 1, 32 ]")
+ );
- if (interface_type == GST_TYPE_MIXER && this->mixer)
- return TRUE;
- if (interface_type == GST_TYPE_PROPERTY_PROBE && this->probe)
- return TRUE;
-
-#ifdef HAVE_PULSE_1_0
- if (interface_type == GST_TYPE_STREAM_VOLUME)
- return TRUE;
-#endif
-
- return FALSE;
-}
-
-static void
-gst_pulsesrc_implements_interface_init (GstImplementsInterfaceClass * klass)
-{
- klass->supported = gst_pulsesrc_interface_supported;
-}
-
-static void
-gst_pulsesrc_init_interfaces (GType type)
-{
-#ifdef HAVE_PULSE_1_0
- static const GInterfaceInfo svol_iface_info = {
- NULL, NULL, NULL,
- };
-#endif
- static const GInterfaceInfo implements_iface_info = {
- (GInterfaceInitFunc) gst_pulsesrc_implements_interface_init,
- NULL,
- NULL,
- };
- static const GInterfaceInfo mixer_iface_info = {
- (GInterfaceInitFunc) gst_pulsesrc_mixer_interface_init,
- NULL,
- NULL,
- };
- static const GInterfaceInfo probe_iface_info = {
- (GInterfaceInitFunc) gst_pulsesrc_property_probe_interface_init,
- NULL,
- NULL,
- };
-
-#ifdef HAVE_PULSE_1_0
- g_type_add_interface_static (type, GST_TYPE_STREAM_VOLUME, &svol_iface_info);
-#endif
- g_type_add_interface_static (type, GST_TYPE_IMPLEMENTS_INTERFACE,
- &implements_iface_info);
- g_type_add_interface_static (type, GST_TYPE_MIXER, &mixer_iface_info);
- g_type_add_interface_static (type, GST_TYPE_PROPERTY_PROBE,
- &probe_iface_info);
-}
-
-static void
-gst_pulsesrc_base_init (gpointer g_class)
-{
+GST_IMPLEMENT_PULSEMIXER_CTRL_METHODS (GstPulseSrc, gst_pulsesrc);
- static GstStaticPadTemplate pad_template = GST_STATIC_PAD_TEMPLATE ("src",
- GST_PAD_SRC,
- GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("audio/x-raw-int, "
- "endianness = (int) { " ENDIANNESS " }, "
- "signed = (boolean) TRUE, "
- "width = (int) 16, "
- "depth = (int) 16, "
- "rate = (int) [ 1, MAX ], "
- "channels = (int) [ 1, 32 ];"
- "audio/x-raw-float, "
- "endianness = (int) { " ENDIANNESS " }, "
- "width = (int) 32, "
- "rate = (int) [ 1, MAX ], "
- "channels = (int) [ 1, 32 ];"
- "audio/x-raw-int, "
- "endianness = (int) { " ENDIANNESS " }, "
- "signed = (boolean) TRUE, "
- "width = (int) 32, "
- "depth = (int) 32, "
- "rate = (int) [ 1, MAX ], "
- "channels = (int) [ 1, 32 ];"
- "audio/x-raw-int, "
- "signed = (boolean) FALSE, "
- "width = (int) 8, "
- "depth = (int) 8, "
- "rate = (int) [ 1, MAX ], "
- "channels = (int) [ 1, 32 ];"
- "audio/x-alaw, "
- "rate = (int) [ 1, MAX], "
- "channels = (int) [ 1, 32 ];"
- "audio/x-mulaw, "
- "rate = (int) [ 1, MAX], " "channels = (int) [ 1, 32 ]")
- );
-
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_set_details_simple (element_class,
- "PulseAudio Audio Source",
- "Source/Audio",
- "Captures audio from a PulseAudio server", "Lennart Poettering");
- gst_element_class_add_static_pad_template (element_class, &pad_template);
-}
+#define gst_pulsesrc_parent_class parent_class
+G_DEFINE_TYPE_WITH_CODE (GstPulseSrc, gst_pulsesrc, GST_TYPE_AUDIO_SRC,
+ G_IMPLEMENT_INTERFACE (GST_TYPE_MIXER, gst_pulsesrc_mixer_interface_init);
+ G_IMPLEMENT_INTERFACE (GST_TYPE_STREAM_VOLUME, NULL));
static void
gst_pulsesrc_class_init (GstPulseSrcClass * klass)
clientname = gst_pulse_client_name ();
/**
- * GstPulseSrc:client
+ * GstPulseSrc:client-name
*
* The PulseAudio client name to use.
- *
- * Since: 0.10.27
*/
g_object_class_install_property (gobject_class,
- PROP_CLIENT,
- g_param_spec_string ("client", "Client",
+ PROP_CLIENT_NAME,
+ g_param_spec_string ("client-name", "Client Name",
"The PulseAudio client_name_to_use", clientname,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
g_param_spec_boxed ("stream-properties", "stream properties",
"list of pulseaudio stream properties",
GST_TYPE_STRUCTURE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
-
/**
* GstPulseSrc:source-output-index
*
"record stream", 0, G_MAXUINT, PA_INVALID_INDEX,
G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
-#ifdef HAVE_PULSE_1_0
+ gst_element_class_set_details_simple (gstelement_class,
+ "PulseAudio Audio Source",
+ "Source/Audio",
+ "Captures audio from a PulseAudio server", "Lennart Poettering");
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&pad_template));
+
/**
* GstPulseSrc:volume
*
- * The volume of the record stream. Only works when using PulseAudio 1.0 or
- * later.
- *
- * Since: 0.10.36
+ * The volume of the record stream.
*/
g_object_class_install_property (gobject_class,
PROP_VOLUME, g_param_spec_double ("volume", "Volume",
/**
* GstPulseSrc:mute
*
- * Whether the stream is muted or not. Only works when using PulseAudio 1.0
- * or later.
- *
- * Since: 0.10.36
+ * Whether the stream is muted or not.
*/
g_object_class_install_property (gobject_class,
PROP_MUTE, g_param_spec_boolean ("mute", "Mute",
"Mute state of this stream",
DEFAULT_MUTE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
-#endif
}
static void
-gst_pulsesrc_init (GstPulseSrc * pulsesrc, GstPulseSrcClass * klass)
+gst_pulsesrc_init (GstPulseSrc * pulsesrc)
{
pulsesrc->server = NULL;
pulsesrc->device = NULL;
pulsesrc->context = NULL;
pulsesrc->stream = NULL;
+ pulsesrc->stream_connected = FALSE;
pulsesrc->source_output_idx = PA_INVALID_INDEX;
pulsesrc->read_buffer = NULL;
pa_sample_spec_init (&pulsesrc->sample_spec);
pulsesrc->operation_success = FALSE;
- pulsesrc->paused = FALSE;
+ pulsesrc->paused = TRUE;
pulsesrc->in_read = FALSE;
-#ifdef HAVE_PULSE_1_0
pulsesrc->volume = DEFAULT_VOLUME;
pulsesrc->volume_set = FALSE;
pulsesrc->mute_set = FALSE;
pulsesrc->notify = 0;
-#endif
pulsesrc->mixer = NULL;
pulsesrc->probe = gst_pulseprobe_new (G_OBJECT (pulsesrc), G_OBJECT_GET_CLASS (pulsesrc), PROP_DEVICE, pulsesrc->server, FALSE, TRUE); /* FALSE for sinks, TRUE for sources */
/* this should be the default but it isn't yet */
- gst_base_audio_src_set_slave_method (GST_BASE_AUDIO_SRC (pulsesrc),
- GST_BASE_AUDIO_SRC_SLAVE_SKEW);
+ gst_audio_base_src_set_slave_method (GST_AUDIO_BASE_SRC (pulsesrc),
+ GST_AUDIO_BASE_SRC_SLAVE_SKEW);
}
static void
pa_stream_disconnect (pulsesrc->stream);
pa_stream_unref (pulsesrc->stream);
pulsesrc->stream = NULL;
+ pulsesrc->stream_connected = FALSE;
pulsesrc->source_output_idx = PA_INVALID_INDEX;
g_object_notify (G_OBJECT (pulsesrc), "source-output-index");
}
/* Make sure we don't get any further callbacks */
pa_context_set_state_callback (pulsesrc->context, NULL, NULL);
-#ifdef HAVE_PULSE_1_0
pa_context_set_subscribe_callback (pulsesrc->context, NULL, NULL);
-#endif
pa_context_unref (pulsesrc->context);
}
}
-#ifdef HAVE_PULSE_1_0
static void
gst_pulsesrc_source_output_info_cb (pa_context * c,
const pa_source_output_info * i, int eol, void *userdata)
goto unlock;
}
}
-#endif
static void
gst_pulsesrc_set_property (GObject * object,
g_free (pulsesrc->device);
pulsesrc->device = g_value_dup_string (value);
break;
- case PROP_CLIENT:
+ case PROP_CLIENT_NAME:
g_free (pulsesrc->client_name);
if (!g_value_get_string (value)) {
GST_WARNING_OBJECT (pulsesrc,
pa_proplist_free (pulsesrc->proplist);
pulsesrc->proplist = gst_pulse_make_proplist (pulsesrc->properties);
break;
-#ifdef HAVE_PULSE_1_0
case PROP_VOLUME:
gst_pulsesrc_set_stream_volume (pulsesrc, g_value_get_double (value));
break;
case PROP_MUTE:
gst_pulsesrc_set_stream_mute (pulsesrc, g_value_get_boolean (value));
break;
-#endif
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
case PROP_DEVICE_NAME:
g_value_take_string (value, gst_pulsesrc_device_description (pulsesrc));
break;
- case PROP_CLIENT:
+ case PROP_CLIENT_NAME:
g_value_set_string (value, pulsesrc->client_name);
break;
case PROP_STREAM_PROPERTIES:
case PROP_SOURCE_OUTPUT_INDEX:
g_value_set_uint (value, pulsesrc->source_output_idx);
break;
-#ifdef HAVE_PULSE_1_0
case PROP_VOLUME:
g_value_set_double (value, gst_pulsesrc_get_stream_volume (pulsesrc));
break;
case PROP_MUTE:
g_value_set_boolean (value, gst_pulsesrc_get_stream_mute (pulsesrc));
break;
-#endif
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
GST_WARNING_OBJECT (GST_PULSESRC_CAST (userdata), "Got overflow");
}
-#ifdef HAVE_PULSE_1_0
static void
gst_pulsesrc_context_subscribe_cb (pa_context * c,
pa_subscription_event_type_t t, uint32_t idx, void *userdata)
/* inform streaming thread to notify */
g_atomic_int_compare_and_exchange (&psrc->notify, 0, 1);
}
-#endif
static gboolean
gst_pulsesrc_open (GstAudioSrc * asrc)
pa_context_set_state_callback (pulsesrc->context,
gst_pulsesrc_context_state_cb, pulsesrc);
-#ifdef HAVE_PULSE_1_0
pa_context_set_subscribe_callback (pulsesrc->context,
gst_pulsesrc_context_subscribe_cb, pulsesrc);
-#endif
GST_DEBUG_OBJECT (pulsesrc, "connect to server %s",
GST_STR_NULL (pulsesrc->server));
pa_threaded_mainloop_lock (pulsesrc->mainloop);
pulsesrc->in_read = TRUE;
-#ifdef HAVE_PULSE_1_0
if (g_atomic_int_compare_and_exchange (&pulsesrc->notify, 1, 0)) {
g_object_notify (G_OBJECT (pulsesrc), "volume");
g_object_notify (G_OBJECT (pulsesrc), "mute");
}
-#endif
if (pulsesrc->paused)
goto was_paused;
}
static gboolean
-gst_pulsesrc_create_stream (GstPulseSrc * pulsesrc, GstCaps * caps)
+gst_pulsesrc_create_stream (GstPulseSrc * pulsesrc, GstCaps ** caps)
{
pa_channel_map channel_map;
+ const pa_channel_map *m;
GstStructure *s;
gboolean need_channel_layout = FALSE;
- GstRingBufferSpec spec;
+ GstAudioRingBufferSpec spec;
const gchar *name;
- memset (&spec, 0, sizeof (GstRingBufferSpec));
- spec.latency_time = GST_SECOND;
- if (!gst_ring_buffer_parse_caps (&spec, caps)) {
- GST_ELEMENT_ERROR (pulsesrc, RESOURCE, SETTINGS,
- ("Can't parse caps."), (NULL));
- goto fail;
+ s = gst_caps_get_structure (*caps, 0);
+ gst_structure_get_int (s, "channels", &spec.info.channels);
+ if (!gst_structure_has_field (s, "channel-mask")) {
+ if (spec.info.channels == 1) {
+ pa_channel_map_init_mono (&channel_map);
+ } else if (spec.info.channels == 2) {
+ gst_structure_set (s, "channel-mask", GST_TYPE_BITMASK,
+ GST_AUDIO_CHANNEL_POSITION_MASK (FRONT_LEFT) |
+ GST_AUDIO_CHANNEL_POSITION_MASK (FRONT_RIGHT), NULL);
+ pa_channel_map_init_stereo (&channel_map);
+ } else {
+ need_channel_layout = TRUE;
+ gst_structure_set (s, "channel-mask", GST_TYPE_BITMASK,
+ G_GUINT64_CONSTANT (0), NULL);
+ }
}
+
+ memset (&spec, 0, sizeof (GstAudioRingBufferSpec));
+ spec.latency_time = GST_SECOND;
+ if (!gst_audio_ring_buffer_parse_caps (&spec, *caps))
+ goto invalid_caps;
+
/* Keep the refcount of the caps at 1 to make them writable */
gst_caps_unref (spec.caps);
- if (!gst_pulse_fill_sample_spec (&spec, &pulsesrc->sample_spec)) {
- GST_ELEMENT_ERROR (pulsesrc, RESOURCE, SETTINGS,
- ("Invalid sample specification."), (NULL));
- goto fail;
+ if (!need_channel_layout
+ && !gst_pulse_gst_to_channel_map (&channel_map, &spec)) {
+ need_channel_layout = TRUE;
+ gst_structure_set (s, "channel-mask", GST_TYPE_BITMASK,
+ G_GUINT64_CONSTANT (0), NULL);
+ memset (spec.info.position, 0xff, sizeof (spec.info.position));
}
+ if (!gst_pulse_fill_sample_spec (&spec, &pulsesrc->sample_spec))
+ goto invalid_spec;
+
pa_threaded_mainloop_lock (pulsesrc->mainloop);
- if (!pulsesrc->context) {
- GST_ELEMENT_ERROR (pulsesrc, RESOURCE, FAILED, ("Bad context"), (NULL));
- goto unlock_and_fail;
- }
-
- s = gst_caps_get_structure (caps, 0);
- if (!gst_structure_has_field (s, "channel-layout") ||
- !gst_pulse_gst_to_channel_map (&channel_map, &spec)) {
- if (spec.channels == 1)
- pa_channel_map_init_mono (&channel_map);
- else if (spec.channels == 2)
- pa_channel_map_init_stereo (&channel_map);
- else
- need_channel_layout = TRUE;
- }
+ if (!pulsesrc->context)
+ goto bad_context;
name = "Record Stream";
if (pulsesrc->proplist) {
if (!(pulsesrc->stream = pa_stream_new_with_proplist (pulsesrc->context,
name, &pulsesrc->sample_spec,
(need_channel_layout) ? NULL : &channel_map,
- pulsesrc->proplist))) {
- GST_ELEMENT_ERROR (pulsesrc, RESOURCE, FAILED,
- ("Failed to create stream: %s",
- pa_strerror (pa_context_errno (pulsesrc->context))), (NULL));
- goto unlock_and_fail;
- }
+ pulsesrc->proplist)))
+ goto create_failed;
+
} else if (!(pulsesrc->stream = pa_stream_new (pulsesrc->context,
name, &pulsesrc->sample_spec,
- (need_channel_layout) ? NULL : &channel_map))) {
- GST_ELEMENT_ERROR (pulsesrc, RESOURCE, FAILED,
- ("Failed to create stream: %s",
- pa_strerror (pa_context_errno (pulsesrc->context))), (NULL));
- goto unlock_and_fail;
- }
-
- if (need_channel_layout) {
- const pa_channel_map *m = pa_stream_get_channel_map (pulsesrc->stream);
+ (need_channel_layout) ? NULL : &channel_map)))
+ goto create_failed;
- gst_pulse_channel_map_to_gst (m, &spec);
- caps = spec.caps;
- }
+ m = pa_stream_get_channel_map (pulsesrc->stream);
+ gst_pulse_channel_map_to_gst (m, &spec);
+ gst_audio_channel_positions_to_valid_order (spec.info.position,
+ spec.info.channels);
+ gst_caps_unref (*caps);
+ *caps = gst_audio_info_to_caps (&spec.info);
- GST_DEBUG_OBJECT (pulsesrc, "Caps are %" GST_PTR_FORMAT, caps);
+ GST_DEBUG_OBJECT (pulsesrc, "Caps are %" GST_PTR_FORMAT, *caps);
pa_stream_set_state_callback (pulsesrc->stream, gst_pulsesrc_stream_state_cb,
pulsesrc);
return TRUE;
+ /* ERRORS */
+invalid_caps:
+ {
+ GST_ELEMENT_ERROR (pulsesrc, RESOURCE, SETTINGS,
+ ("Can't parse caps."), (NULL));
+ goto fail;
+ }
+invalid_spec:
+ {
+ GST_ELEMENT_ERROR (pulsesrc, RESOURCE, SETTINGS,
+ ("Invalid sample specification."), (NULL));
+ goto fail;
+ }
+bad_context:
+ {
+ GST_ELEMENT_ERROR (pulsesrc, RESOURCE, FAILED, ("Bad context"), (NULL));
+ goto unlock_and_fail;
+ }
+create_failed:
+ {
+ GST_ELEMENT_ERROR (pulsesrc, RESOURCE, FAILED,
+ ("Failed to create stream: %s",
+ pa_strerror (pa_context_errno (pulsesrc->context))), (NULL));
+ goto unlock_and_fail;
+ }
unlock_and_fail:
- gst_pulsesrc_destroy_stream (pulsesrc);
+ {
+ gst_pulsesrc_destroy_stream (pulsesrc);
- pa_threaded_mainloop_unlock (pulsesrc->mainloop);
+ pa_threaded_mainloop_unlock (pulsesrc->mainloop);
-fail:
- return FALSE;
+ fail:
+ return FALSE;
+ }
}
/* This is essentially gst_base_src_negotiate_default() but the caps
static gboolean
gst_pulsesrc_negotiate (GstBaseSrc * basesrc)
{
+ GstPulseSrc *pulsesrc = GST_PULSESRC_CAST (basesrc);
GstCaps *thiscaps;
GstCaps *caps = NULL;
GstCaps *peercaps = NULL;
gboolean result = FALSE;
/* first see what is possible on our source pad */
- thiscaps = gst_pad_get_caps_reffed (GST_BASE_SRC_PAD (basesrc));
+ thiscaps = gst_pad_query_caps (GST_BASE_SRC_PAD (basesrc), NULL);
GST_DEBUG_OBJECT (basesrc, "caps of src: %" GST_PTR_FORMAT, thiscaps);
/* nothing or anything is allowed, we're done */
if (thiscaps == NULL || gst_caps_is_any (thiscaps))
goto no_nego_needed;
/* get the peer caps */
- peercaps = gst_pad_peer_get_caps_reffed (GST_BASE_SRC_PAD (basesrc));
+ peercaps = gst_pad_peer_query_caps (GST_BASE_SRC_PAD (basesrc), NULL);
GST_DEBUG_OBJECT (basesrc, "caps of peer: %" GST_PTR_FORMAT, peercaps);
if (peercaps) {
/* get intersection */
/* now fixate */
if (!gst_caps_is_empty (caps)) {
- gst_pad_fixate_caps (GST_BASE_SRC_PAD (basesrc), caps);
+ GST_BASE_SRC_CLASS (parent_class)->fixate (basesrc, caps);
GST_DEBUG_OBJECT (basesrc, "fixated to: %" GST_PTR_FORMAT, caps);
if (gst_caps_is_any (caps)) {
result = TRUE;
} else if (gst_caps_is_fixed (caps)) {
/* yay, fixed caps, use those then */
- result = gst_pulsesrc_create_stream (GST_PULSESRC_CAST (basesrc), caps);
+ result = gst_pulsesrc_create_stream (pulsesrc, &caps);
if (result)
- result = gst_pad_set_caps (GST_BASE_SRC_PAD (basesrc), caps);
+ result = gst_base_src_set_caps (basesrc, caps);
}
}
gst_caps_unref (caps);
}
static gboolean
-gst_pulsesrc_prepare (GstAudioSrc * asrc, GstRingBufferSpec * spec)
+gst_pulsesrc_prepare (GstAudioSrc * asrc, GstAudioRingBufferSpec * spec)
{
pa_buffer_attr wanted;
const pa_buffer_attr *actual;
GstPulseSrc *pulsesrc = GST_PULSESRC_CAST (asrc);
pa_stream_flags_t flags;
-#ifdef HAVE_PULSE_1_0
pa_operation *o;
-#endif
pa_threaded_mainloop_lock (pulsesrc->mainloop);
-#ifdef HAVE_PULSE_1_0
+ {
+ GstAudioRingBufferSpec s = *spec;
+ const pa_channel_map *m;
+
+ m = pa_stream_get_channel_map (pulsesrc->stream);
+ gst_pulse_channel_map_to_gst (m, &s);
+ gst_audio_ring_buffer_set_channel_positions (GST_AUDIO_BASE_SRC
+ (pulsesrc)->ringbuffer, s.info.position);
+ }
+
/* enable event notifications */
GST_LOG_OBJECT (pulsesrc, "subscribing to context events");
if (!(o = pa_context_subscribe (pulsesrc->context,
}
pa_operation_unref (o);
-#endif
wanted.maxlength = -1;
wanted.tlength = -1;
PA_STREAM_NOT_MONOTONIC | PA_STREAM_ADJUST_LATENCY |
PA_STREAM_START_CORKED;
-#ifdef HAVE_PULSE_1_0
if (pulsesrc->mute_set && pulsesrc->mute)
flags |= PA_STREAM_START_MUTED;
-#endif
if (pa_stream_connect_record (pulsesrc->stream, pulsesrc->device, &wanted,
flags) < 0) {
- GST_ELEMENT_ERROR (pulsesrc, RESOURCE, FAILED,
- ("Failed to connect stream: %s",
- pa_strerror (pa_context_errno (pulsesrc->context))), (NULL));
- goto unlock_and_fail;
+ goto connect_failed;
}
pulsesrc->corked = TRUE;
state = pa_stream_get_state (pulsesrc->stream);
- if (!PA_STREAM_IS_GOOD (state)) {
- GST_ELEMENT_ERROR (pulsesrc, RESOURCE, FAILED,
- ("Failed to connect stream: %s",
- pa_strerror (pa_context_errno (pulsesrc->context))), (NULL));
- goto unlock_and_fail;
- }
+ if (!PA_STREAM_IS_GOOD (state))
+ goto stream_is_bad;
if (state == PA_STREAM_READY)
break;
/* Wait until the stream is ready */
pa_threaded_mainloop_wait (pulsesrc->mainloop);
}
+ pulsesrc->stream_connected = TRUE;
/* store the source output index so it can be accessed via a property */
pulsesrc->source_output_idx = pa_stream_get_index (pulsesrc->stream);
g_object_notify (G_OBJECT (pulsesrc), "source-output-index");
-#ifdef HAVE_PULSE_1_0
if (pulsesrc->volume_set) {
gst_pulsesrc_set_stream_volume (pulsesrc, pulsesrc->volume);
pulsesrc->volume_set = FALSE;
}
-#endif
/* get the actual buffering properties now */
actual = pa_stream_get_buffer_attr (pulsesrc->stream);
}
spec->segtotal = actual->maxlength / spec->segsize;
+ if (!pulsesrc->paused) {
+ GST_DEBUG_OBJECT (pulsesrc, "uncorking because we are playing");
+ gst_pulsesrc_set_corked (pulsesrc, FALSE, FALSE);
+ }
pa_threaded_mainloop_unlock (pulsesrc->mainloop);
return TRUE;
+ /* ERRORS */
+connect_failed:
+ {
+ GST_ELEMENT_ERROR (pulsesrc, RESOURCE, FAILED,
+ ("Failed to connect stream: %s",
+ pa_strerror (pa_context_errno (pulsesrc->context))), (NULL));
+ goto unlock_and_fail;
+ }
+stream_is_bad:
+ {
+ GST_ELEMENT_ERROR (pulsesrc, RESOURCE, FAILED,
+ ("Failed to connect stream: %s",
+ pa_strerror (pa_context_errno (pulsesrc->context))), (NULL));
+ goto unlock_and_fail;
+ }
unlock_and_fail:
{
gst_pulsesrc_destroy_stream (pulsesrc);
gboolean res = FALSE;
GST_DEBUG_OBJECT (psrc, "setting corked state to %d", corked);
+ if (!psrc->stream_connected)
+ return TRUE;
+
if (psrc->corked != corked) {
if (!(o = pa_stream_cork (psrc->stream, corked,
gst_pulsesrc_success_cb, psrc)))
GstPulseMixerCtrl *mixer;
GstPulseProbe *probe;
-#ifdef HAVE_PULSE_1_0
gdouble volume;
gboolean volume_set:1;
gboolean mute:1;
gboolean mute_set:1;
gint notify; /* atomic */
-#endif
gboolean corked:1;
+ gboolean stream_connected:1;
gboolean operation_success:1;
gboolean paused:1;
gboolean in_read:1;
#endif
#include "pulseutil.h"
-#include <gst/audio/multichannel.h>
#ifdef HAVE_UNISTD_H
# include <unistd.h> /* getpid on UNIX */
# include <process.h> /* getpid on win32 */
#endif
-static const pa_channel_position_t gst_pos_to_pa[GST_AUDIO_CHANNEL_POSITION_NUM]
- = {
- [GST_AUDIO_CHANNEL_POSITION_FRONT_MONO] = PA_CHANNEL_POSITION_MONO,
- [GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT] = PA_CHANNEL_POSITION_FRONT_LEFT,
- [GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT] = PA_CHANNEL_POSITION_FRONT_RIGHT,
- [GST_AUDIO_CHANNEL_POSITION_REAR_CENTER] = PA_CHANNEL_POSITION_REAR_CENTER,
- [GST_AUDIO_CHANNEL_POSITION_REAR_LEFT] = PA_CHANNEL_POSITION_REAR_LEFT,
- [GST_AUDIO_CHANNEL_POSITION_REAR_RIGHT] = PA_CHANNEL_POSITION_REAR_RIGHT,
- [GST_AUDIO_CHANNEL_POSITION_LFE] = PA_CHANNEL_POSITION_LFE,
- [GST_AUDIO_CHANNEL_POSITION_FRONT_CENTER] = PA_CHANNEL_POSITION_FRONT_CENTER,
- [GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT_OF_CENTER] =
- PA_CHANNEL_POSITION_FRONT_LEFT_OF_CENTER,
- [GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT_OF_CENTER] =
- PA_CHANNEL_POSITION_FRONT_RIGHT_OF_CENTER,
- [GST_AUDIO_CHANNEL_POSITION_SIDE_LEFT] = PA_CHANNEL_POSITION_SIDE_LEFT,
- [GST_AUDIO_CHANNEL_POSITION_SIDE_RIGHT] = PA_CHANNEL_POSITION_SIDE_RIGHT,
- [GST_AUDIO_CHANNEL_POSITION_NONE] = PA_CHANNEL_POSITION_INVALID
+static const struct
+{
+ GstAudioChannelPosition gst_pos;
+ pa_channel_position_t pa_pos;
+} gst_pa_pos_table[] = {
+ {
+ GST_AUDIO_CHANNEL_POSITION_MONO, PA_CHANNEL_POSITION_MONO}, {
+ GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT, PA_CHANNEL_POSITION_FRONT_LEFT}, {
+ GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT, PA_CHANNEL_POSITION_FRONT_RIGHT}, {
+ GST_AUDIO_CHANNEL_POSITION_REAR_CENTER, PA_CHANNEL_POSITION_REAR_CENTER}, {
+ GST_AUDIO_CHANNEL_POSITION_REAR_LEFT, PA_CHANNEL_POSITION_REAR_LEFT}, {
+ GST_AUDIO_CHANNEL_POSITION_REAR_RIGHT, PA_CHANNEL_POSITION_REAR_RIGHT}, {
+ GST_AUDIO_CHANNEL_POSITION_LFE1, PA_CHANNEL_POSITION_LFE}, {
+ GST_AUDIO_CHANNEL_POSITION_FRONT_CENTER, PA_CHANNEL_POSITION_FRONT_CENTER}, {
+ GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT_OF_CENTER,
+ PA_CHANNEL_POSITION_FRONT_LEFT_OF_CENTER}, {
+ GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT_OF_CENTER,
+ PA_CHANNEL_POSITION_FRONT_RIGHT_OF_CENTER}, {
+ GST_AUDIO_CHANNEL_POSITION_SIDE_LEFT, PA_CHANNEL_POSITION_SIDE_LEFT}, {
+ GST_AUDIO_CHANNEL_POSITION_SIDE_RIGHT, PA_CHANNEL_POSITION_SIDE_RIGHT}, {
+ GST_AUDIO_CHANNEL_POSITION_TOP_CENTER, PA_CHANNEL_POSITION_TOP_CENTER}, {
+ GST_AUDIO_CHANNEL_POSITION_TOP_FRONT_LEFT,
+ PA_CHANNEL_POSITION_TOP_FRONT_LEFT}, {
+ GST_AUDIO_CHANNEL_POSITION_TOP_FRONT_RIGHT,
+ PA_CHANNEL_POSITION_TOP_FRONT_RIGHT}, {
+ GST_AUDIO_CHANNEL_POSITION_TOP_FRONT_CENTER,
+ PA_CHANNEL_POSITION_TOP_FRONT_CENTER}, {
+ GST_AUDIO_CHANNEL_POSITION_TOP_REAR_LEFT, PA_CHANNEL_POSITION_TOP_REAR_LEFT}, {
+ GST_AUDIO_CHANNEL_POSITION_TOP_REAR_RIGHT,
+ PA_CHANNEL_POSITION_TOP_REAR_RIGHT}, {
+ GST_AUDIO_CHANNEL_POSITION_TOP_REAR_CENTER,
+ PA_CHANNEL_POSITION_TOP_REAR_CENTER}, {
+ GST_AUDIO_CHANNEL_POSITION_NONE, PA_CHANNEL_POSITION_INVALID}
};
-/* All index are increased by one because PA_CHANNEL_POSITION_INVALID == -1 */
-static const GstAudioChannelPosition
- pa_to_gst_pos[GST_AUDIO_CHANNEL_POSITION_NUM]
- = {
- [PA_CHANNEL_POSITION_MONO + 1] = GST_AUDIO_CHANNEL_POSITION_FRONT_MONO,
- [PA_CHANNEL_POSITION_FRONT_LEFT + 1] = GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT,
- [PA_CHANNEL_POSITION_FRONT_RIGHT + 1] =
- GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT,
- [PA_CHANNEL_POSITION_REAR_CENTER + 1] =
- GST_AUDIO_CHANNEL_POSITION_REAR_CENTER,
- [PA_CHANNEL_POSITION_REAR_LEFT + 1] = GST_AUDIO_CHANNEL_POSITION_REAR_LEFT,
- [PA_CHANNEL_POSITION_REAR_RIGHT + 1] = GST_AUDIO_CHANNEL_POSITION_REAR_RIGHT,
- [PA_CHANNEL_POSITION_LFE + 1] = GST_AUDIO_CHANNEL_POSITION_LFE,
- [PA_CHANNEL_POSITION_FRONT_CENTER + 1] =
- GST_AUDIO_CHANNEL_POSITION_FRONT_CENTER,
- [PA_CHANNEL_POSITION_FRONT_LEFT_OF_CENTER + 1] =
- GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT_OF_CENTER,
- [PA_CHANNEL_POSITION_FRONT_RIGHT_OF_CENTER + 1] =
- GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT_OF_CENTER,
- [PA_CHANNEL_POSITION_SIDE_LEFT + 1] = GST_AUDIO_CHANNEL_POSITION_SIDE_LEFT,
- [PA_CHANNEL_POSITION_SIDE_RIGHT + 1] = GST_AUDIO_CHANNEL_POSITION_SIDE_RIGHT,
- [PA_CHANNEL_POSITION_INVALID + 1] = GST_AUDIO_CHANNEL_POSITION_NONE,
-};
+static gboolean
+gstaudioformat_to_pasampleformat (GstAudioFormat format,
+ pa_sample_format_t * sf)
+{
+ switch (format) {
+ case GST_AUDIO_FORMAT_U8:
+ *sf = PA_SAMPLE_U8;
+ break;
+ case GST_AUDIO_FORMAT_S16LE:
+ *sf = PA_SAMPLE_S16LE;
+ break;
+ case GST_AUDIO_FORMAT_S16BE:
+ *sf = PA_SAMPLE_S16BE;
+ break;
+ case GST_AUDIO_FORMAT_F32LE:
+ *sf = PA_SAMPLE_FLOAT32LE;
+ break;
+ case GST_AUDIO_FORMAT_F32BE:
+ *sf = PA_SAMPLE_FLOAT32BE;
+ break;
+ case GST_AUDIO_FORMAT_S32LE:
+ *sf = PA_SAMPLE_S32LE;
+ break;
+ case GST_AUDIO_FORMAT_S32BE:
+ *sf = PA_SAMPLE_S32BE;
+ break;
+ case GST_AUDIO_FORMAT_S24LE:
+ *sf = PA_SAMPLE_S24LE;
+ break;
+ case GST_AUDIO_FORMAT_S24BE:
+ *sf = PA_SAMPLE_S24BE;
+ break;
+ case GST_AUDIO_FORMAT_S24_32LE:
+ *sf = PA_SAMPLE_S24_32LE;
+ break;
+ case GST_AUDIO_FORMAT_S24_32BE:
+ *sf = PA_SAMPLE_S24_32BE;
+ break;
+ default:
+ return FALSE;
+ }
+ return TRUE;
+}
gboolean
-gst_pulse_fill_sample_spec (GstRingBufferSpec * spec, pa_sample_spec * ss)
+gst_pulse_fill_sample_spec (GstAudioRingBufferSpec * spec, pa_sample_spec * ss)
{
-
- if (spec->format == GST_MU_LAW && spec->width == 8)
+ if (spec->type == GST_AUDIO_RING_BUFFER_FORMAT_TYPE_RAW) {
+ if (!gstaudioformat_to_pasampleformat (GST_AUDIO_INFO_FORMAT (&spec->info),
+ &ss->format))
+ return FALSE;
+ } else if (spec->type == GST_AUDIO_RING_BUFFER_FORMAT_TYPE_MU_LAW) {
ss->format = PA_SAMPLE_ULAW;
- else if (spec->format == GST_A_LAW && spec->width == 8)
+ } else if (spec->type == GST_AUDIO_RING_BUFFER_FORMAT_TYPE_A_LAW) {
ss->format = PA_SAMPLE_ALAW;
- else if (spec->format == GST_U8 && spec->width == 8)
- ss->format = PA_SAMPLE_U8;
- else if (spec->format == GST_S16_LE && spec->width == 16)
- ss->format = PA_SAMPLE_S16LE;
- else if (spec->format == GST_S16_BE && spec->width == 16)
- ss->format = PA_SAMPLE_S16BE;
- else if (spec->format == GST_FLOAT32_LE && spec->width == 32)
- ss->format = PA_SAMPLE_FLOAT32LE;
- else if (spec->format == GST_FLOAT32_BE && spec->width == 32)
- ss->format = PA_SAMPLE_FLOAT32BE;
- else if (spec->format == GST_S32_LE && spec->width == 32)
- ss->format = PA_SAMPLE_S32LE;
- else if (spec->format == GST_S32_BE && spec->width == 32)
- ss->format = PA_SAMPLE_S32BE;
- else if (spec->format == GST_S24_3LE && spec->width == 24)
- ss->format = PA_SAMPLE_S24LE;
- else if (spec->format == GST_S24_3BE && spec->width == 24)
- ss->format = PA_SAMPLE_S24BE;
- else if (spec->format == GST_S24_LE && spec->width == 32)
- ss->format = PA_SAMPLE_S24_32LE;
- else if (spec->format == GST_S24_BE && spec->width == 32)
- ss->format = PA_SAMPLE_S24_32BE;
- else
+ } else
return FALSE;
- ss->channels = spec->channels;
- ss->rate = spec->rate;
+ ss->channels = GST_AUDIO_INFO_CHANNELS (&spec->info);
+ ss->rate = GST_AUDIO_INFO_RATE (&spec->info);
if (!pa_sample_spec_valid (ss))
return FALSE;
return TRUE;
}
-#ifdef HAVE_PULSE_1_0
gboolean
-gst_pulse_fill_format_info (GstRingBufferSpec * spec, pa_format_info ** f,
+gst_pulse_fill_format_info (GstAudioRingBufferSpec * spec, pa_format_info ** f,
guint * channels)
{
pa_format_info *format;
pa_sample_format_t sf = PA_SAMPLE_INVALID;
+ GstAudioInfo *ainfo = &spec->info;
format = pa_format_info_new ();
- if (spec->format == GST_MU_LAW && spec->width == 8) {
+ if (spec->type == GST_AUDIO_RING_BUFFER_FORMAT_TYPE_MU_LAW
+ && GST_AUDIO_INFO_WIDTH (ainfo) == 8) {
format->encoding = PA_ENCODING_PCM;
sf = PA_SAMPLE_ULAW;
- } else if (spec->format == GST_A_LAW && spec->width == 8) {
+ } else if (spec->type == GST_AUDIO_RING_BUFFER_FORMAT_TYPE_A_LAW
+ && GST_AUDIO_INFO_WIDTH (ainfo) == 8) {
format->encoding = PA_ENCODING_PCM;
sf = PA_SAMPLE_ALAW;
- } else if (spec->format == GST_U8 && spec->width == 8) {
- format->encoding = PA_ENCODING_PCM;
- sf = PA_SAMPLE_U8;
- } else if (spec->format == GST_S16_LE && spec->width == 16) {
- format->encoding = PA_ENCODING_PCM;
- sf = PA_SAMPLE_S16LE;
- } else if (spec->format == GST_S16_BE && spec->width == 16) {
- format->encoding = PA_ENCODING_PCM;
- sf = PA_SAMPLE_S16BE;
- } else if (spec->format == GST_FLOAT32_LE && spec->width == 32) {
+ } else if (spec->type == GST_AUDIO_RING_BUFFER_FORMAT_TYPE_RAW) {
format->encoding = PA_ENCODING_PCM;
- sf = PA_SAMPLE_FLOAT32LE;
- } else if (spec->format == GST_FLOAT32_BE && spec->width == 32) {
- format->encoding = PA_ENCODING_PCM;
- sf = PA_SAMPLE_FLOAT32BE;
- } else if (spec->format == GST_S32_LE && spec->width == 32) {
- format->encoding = PA_ENCODING_PCM;
- sf = PA_SAMPLE_S32LE;
- } else if (spec->format == GST_S32_BE && spec->width == 32) {
- format->encoding = PA_ENCODING_PCM;
- sf = PA_SAMPLE_S32BE;
- } else if (spec->format == GST_S24_3LE && spec->width == 24) {
- format->encoding = PA_ENCODING_PCM;
- sf = PA_SAMPLE_S24LE;
- } else if (spec->format == GST_S24_3BE && spec->width == 24) {
- format->encoding = PA_ENCODING_PCM;
- sf = PA_SAMPLE_S24BE;
- } else if (spec->format == GST_S24_LE && spec->width == 32) {
- format->encoding = PA_ENCODING_PCM;
- sf = PA_SAMPLE_S24_32LE;
- } else if (spec->format == GST_S24_BE && spec->width == 32) {
- format->encoding = PA_ENCODING_PCM;
- sf = PA_SAMPLE_S24_32BE;
- } else if (spec->format == GST_AC3) {
+ if (!gstaudioformat_to_pasampleformat (GST_AUDIO_INFO_FORMAT (ainfo), &sf))
+ goto fail;
+ } else if (spec->type == GST_AUDIO_RING_BUFFER_FORMAT_TYPE_AC3) {
format->encoding = PA_ENCODING_AC3_IEC61937;
- } else if (spec->format == GST_EAC3) {
+ } else if (spec->type == GST_AUDIO_RING_BUFFER_FORMAT_TYPE_EAC3) {
format->encoding = PA_ENCODING_EAC3_IEC61937;
- } else if (spec->format == GST_DTS) {
+ } else if (spec->type == GST_AUDIO_RING_BUFFER_FORMAT_TYPE_DTS) {
format->encoding = PA_ENCODING_DTS_IEC61937;
- } else if (spec->format == GST_MPEG) {
+ } else if (spec->type == GST_AUDIO_RING_BUFFER_FORMAT_TYPE_MPEG) {
format->encoding = PA_ENCODING_MPEG_IEC61937;
} else {
goto fail;
if (format->encoding == PA_ENCODING_PCM) {
pa_format_info_set_sample_format (format, sf);
- pa_format_info_set_channels (format, spec->channels);
+ pa_format_info_set_channels (format, GST_AUDIO_INFO_CHANNELS (ainfo));
}
- pa_format_info_set_rate (format, spec->rate);
+ pa_format_info_set_rate (format, GST_AUDIO_INFO_RATE (ainfo));
if (!pa_format_info_valid (format))
goto fail;
*f = format;
- *channels = spec->channels;
+ *channels = GST_AUDIO_INFO_CHANNELS (ainfo);
return TRUE;
pa_format_info_free (format);
return FALSE;
}
-#endif
/* PATH_MAX is not defined everywhere, e.g. on GNU Hurd */
#ifndef PATH_MAX
pa_channel_map *
gst_pulse_gst_to_channel_map (pa_channel_map * map,
- const GstRingBufferSpec * spec)
+ const GstAudioRingBufferSpec * spec)
{
- int i;
- GstAudioChannelPosition *pos;
+ gint i, j;
+ gint channels;
+ const GstAudioChannelPosition *pos;
pa_channel_map_init (map);
- if (!(pos =
- gst_audio_get_channel_positions (gst_caps_get_structure (spec->caps,
- 0)))) {
- return NULL;
- }
+ channels = GST_AUDIO_INFO_CHANNELS (&spec->info);
+ pos = spec->info.position;
- for (i = 0; i < spec->channels; i++) {
- if (pos[i] == GST_AUDIO_CHANNEL_POSITION_NONE) {
- /* no valid mappings for these channels */
- g_free (pos);
+ for (j = 0; j < channels; j++) {
+ for (i = 0; i < G_N_ELEMENTS (gst_pa_pos_table); i++) {
+ if (pos[j] == gst_pa_pos_table[i].gst_pos) {
+ map->map[j] = gst_pa_pos_table[i].pa_pos;
+ break;
+ }
+ }
+ if (i == G_N_ELEMENTS (gst_pa_pos_table))
return NULL;
- } else if (pos[i] < GST_AUDIO_CHANNEL_POSITION_NUM)
- map->map[i] = gst_pos_to_pa[pos[i]];
- else
- map->map[i] = PA_CHANNEL_POSITION_INVALID;
}
- g_free (pos);
- map->channels = spec->channels;
+ if (j != spec->info.channels) {
+ return NULL;
+ }
+
+ map->channels = spec->info.channels;
if (!pa_channel_map_valid (map)) {
return NULL;
return map;
}
-GstRingBufferSpec *
+GstAudioRingBufferSpec *
gst_pulse_channel_map_to_gst (const pa_channel_map * map,
- GstRingBufferSpec * spec)
+ GstAudioRingBufferSpec * spec)
{
- int i;
- GstAudioChannelPosition *pos;
+ gint i, j;
gboolean invalid = FALSE;
+ gint channels;
+ GstAudioChannelPosition *pos;
- g_return_val_if_fail (map->channels == spec->channels, NULL);
+ channels = GST_AUDIO_INFO_CHANNELS (&spec->info);
- pos = g_new0 (GstAudioChannelPosition, spec->channels + 1);
+ g_return_val_if_fail (map->channels == channels, NULL);
- for (i = 0; i < spec->channels; i++) {
- if (map->map[i] == PA_CHANNEL_POSITION_INVALID) {
- invalid = TRUE;
- break;
- } else if ((int) map->map[i] < (int) GST_AUDIO_CHANNEL_POSITION_NUM) {
- pos[i] = pa_to_gst_pos[map->map[i] + 1];
- } else {
- invalid = TRUE;
- break;
+ pos = spec->info.position;
+
+ for (j = 0; j < channels; j++) {
+ for (i = 0; j < channels && i < G_N_ELEMENTS (gst_pa_pos_table); i++) {
+ if (map->map[j] == gst_pa_pos_table[i].pa_pos) {
+ pos[j] = gst_pa_pos_table[i].gst_pos;
+ break;
+ }
}
+ if (i == G_N_ELEMENTS (gst_pa_pos_table))
+ return NULL;
}
- if (!invalid && !gst_audio_check_channel_positions (pos, spec->channels))
+ if (!invalid
+ && !gst_audio_check_valid_channel_positions (pos, channels, FALSE))
invalid = TRUE;
if (invalid) {
- for (i = 0; i < spec->channels; i++)
+ for (i = 0; i < channels; i++)
pos[i] = GST_AUDIO_CHANNEL_POSITION_NONE;
}
- gst_audio_set_channel_positions (gst_caps_get_structure (spec->caps, 0), pos);
-
- g_free (pos);
-
return spec;
}
#include <gst/gst.h>
#include <pulse/pulseaudio.h>
+#include <gst/audio/gstaudioringbuffer.h>
#include <gst/audio/gstaudiosink.h>
-gboolean gst_pulse_fill_sample_spec (GstRingBufferSpec * spec,
+gboolean gst_pulse_fill_sample_spec (GstAudioRingBufferSpec * spec,
pa_sample_spec * ss);
-#ifdef HAVE_PULSE_1_0
-gboolean gst_pulse_fill_format_info (GstRingBufferSpec * spec,
+gboolean gst_pulse_fill_format_info (GstAudioRingBufferSpec * spec,
pa_format_info ** f, guint * channels);
-#endif
gchar *gst_pulse_client_name (void);
pa_channel_map *gst_pulse_gst_to_channel_map (pa_channel_map * map,
- const GstRingBufferSpec * spec);
+ const GstAudioRingBufferSpec * spec);
-GstRingBufferSpec *gst_pulse_channel_map_to_gst (const pa_channel_map * map,
- GstRingBufferSpec * spec);
+GstAudioRingBufferSpec *gst_pulse_channel_map_to_gst (const pa_channel_map * map,
+ GstAudioRingBufferSpec * spec);
void gst_pulse_cvolume_from_linear (pa_cvolume *v, unsigned channels, gdouble volume);
{
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
- gst_element_class_add_static_pad_template (element_class, &src_factory);
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&src_factory));
gst_element_class_set_details_simple (element_class,
"Firewire (1394) DV video source", "Source/Video",
error_while_polling:
{
GST_ELEMENT_ERROR (dv1394src, RESOURCE, READ, (NULL), GST_ERROR_SYSTEM);
- return GST_FLOW_UNEXPECTED;
+ return GST_FLOW_EOS;
}
told_to_stop:
{
GST_DEBUG_OBJECT (dv1394src, "told to stop, shutting down");
- return GST_FLOW_WRONG_STATE;
+ return GST_FLOW_FLUSHING;
}
}
{
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
- gst_element_class_add_static_pad_template (element_class, &src_factory);
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&src_factory));
gst_element_class_set_details_simple (element_class,
"Firewire (1394) HDV video source", "Source/Video",
error_while_polling:
{
GST_ELEMENT_ERROR (dv1394src, RESOURCE, READ, (NULL), GST_ERROR_SYSTEM);
- return GST_FLOW_UNEXPECTED;
+ return GST_FLOW_EOS;
}
told_to_stop:
{
GST_DEBUG_OBJECT (dv1394src, "told to stop, shutting down");
- return GST_FLOW_WRONG_STATE;
+ return GST_FLOW_FLUSHING;
}
}
{
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
- gst_element_class_add_static_pad_template (element_class, &sink_template);
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&sink_template));
+
gst_element_class_set_details_simple (element_class, "Icecast network sink",
"Sink/Network", "Sends data to an icecast server",
"Wim Taymans <wim.taymans@chello.be>, "
/* class initialization */
-#define DEBUG_INIT(bla) \
- GST_DEBUG_CATEGORY_INIT (souphttpclientsink_dbg, "souphttpclientsink", 0, \
- "souphttpclientsink element");
-
-GST_BOILERPLATE_FULL (GstSoupHttpClientSink, gst_soup_http_client_sink,
- GstBaseSink, GST_TYPE_BASE_SINK, DEBUG_INIT);
-
-static void
-gst_soup_http_client_sink_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_static_pad_template (element_class,
- &gst_soup_http_client_sink_sink_template);
-
- gst_element_class_set_details_simple (element_class, "HTTP client sink",
- "Generic", "Sends streams to HTTP server via PUT",
- "David Schleef <ds@entropywave.com>");
-}
+#define gst_soup_http_client_sink_parent_class parent_class
+G_DEFINE_TYPE (GstSoupHttpClientSink, gst_soup_http_client_sink,
+ GST_TYPE_BASE_SINK);
static void
gst_soup_http_client_sink_class_init (GstSoupHttpClientSinkClass * klass)
{
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
+ GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
GstBaseSinkClass *base_sink_class = GST_BASE_SINK_CLASS (klass);
gobject_class->set_property = gst_soup_http_client_sink_set_property;
gobject_class->get_property = gst_soup_http_client_sink_get_property;
gobject_class->dispose = gst_soup_http_client_sink_dispose;
gobject_class->finalize = gst_soup_http_client_sink_finalize;
- base_sink_class->set_caps =
- GST_DEBUG_FUNCPTR (gst_soup_http_client_sink_set_caps);
- if (0)
- base_sink_class->get_times =
- GST_DEBUG_FUNCPTR (gst_soup_http_client_sink_get_times);
- base_sink_class->start = GST_DEBUG_FUNCPTR (gst_soup_http_client_sink_start);
- base_sink_class->stop = GST_DEBUG_FUNCPTR (gst_soup_http_client_sink_stop);
- base_sink_class->unlock =
- GST_DEBUG_FUNCPTR (gst_soup_http_client_sink_unlock);
- base_sink_class->event = GST_DEBUG_FUNCPTR (gst_soup_http_client_sink_event);
- if (0)
- base_sink_class->preroll =
- GST_DEBUG_FUNCPTR (gst_soup_http_client_sink_preroll);
- base_sink_class->render =
- GST_DEBUG_FUNCPTR (gst_soup_http_client_sink_render);
g_object_class_install_property (gobject_class,
PROP_LOCATION,
g_param_spec_boxed ("cookies", "Cookies", "HTTP request cookies",
G_TYPE_STRV, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_soup_http_client_sink_sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class, "HTTP client sink",
+ "Generic", "Sends streams to HTTP server via PUT",
+ "David Schleef <ds@entropywave.com>");
+
+ base_sink_class->set_caps =
+ GST_DEBUG_FUNCPTR (gst_soup_http_client_sink_set_caps);
+ if (0)
+ base_sink_class->get_times =
+ GST_DEBUG_FUNCPTR (gst_soup_http_client_sink_get_times);
+ base_sink_class->start = GST_DEBUG_FUNCPTR (gst_soup_http_client_sink_start);
+ base_sink_class->stop = GST_DEBUG_FUNCPTR (gst_soup_http_client_sink_stop);
+ base_sink_class->unlock =
+ GST_DEBUG_FUNCPTR (gst_soup_http_client_sink_unlock);
+ base_sink_class->event = GST_DEBUG_FUNCPTR (gst_soup_http_client_sink_event);
+ if (0)
+ base_sink_class->preroll =
+ GST_DEBUG_FUNCPTR (gst_soup_http_client_sink_preroll);
+ base_sink_class->render =
+ GST_DEBUG_FUNCPTR (gst_soup_http_client_sink_render);
+
+ GST_DEBUG_CATEGORY_INIT (souphttpclientsink_dbg, "souphttpclientsink", 0,
+ "souphttpclientsink element");
+
}
static void
-gst_soup_http_client_sink_init (GstSoupHttpClientSink * souphttpsink,
- GstSoupHttpClientSinkClass * souphttpsink_class)
+gst_soup_http_client_sink_init (GstSoupHttpClientSink * souphttpsink)
{
const char *proxy;
- souphttpsink->mutex = g_mutex_new ();
- souphttpsink->cond = g_cond_new ();
+ g_mutex_init (&souphttpsink->mutex);
+ g_cond_init (&souphttpsink->cond);
souphttpsink->location = NULL;
souphttpsink->automatic_redirect = TRUE;
{
GstSoupHttpClientSink *souphttpsink = GST_SOUP_HTTP_CLIENT_SINK (object);
- g_mutex_lock (souphttpsink->mutex);
+ g_mutex_lock (&souphttpsink->mutex);
switch (property_id) {
case PROP_SESSION:
if (souphttpsink->prop_session) {
break;
}
done:
- g_mutex_unlock (souphttpsink->mutex);
+ g_mutex_unlock (&souphttpsink->mutex);
}
void
soup_uri_free (souphttpsink->proxy);
g_free (souphttpsink->location);
- g_cond_free (souphttpsink->cond);
- g_mutex_free (souphttpsink->mutex);
+ g_cond_clear (&souphttpsink->cond);
+ g_mutex_clear (&souphttpsink->mutex);
G_OBJECT_CLASS (parent_class)->finalize (object);
}
GST_LOG_OBJECT (souphttpsink, "thread ready");
- g_mutex_lock (souphttpsink->mutex);
- g_cond_signal (souphttpsink->cond);
- g_mutex_unlock (souphttpsink->mutex);
+ g_mutex_lock (&souphttpsink->mutex);
+ g_cond_signal (&souphttpsink->cond);
+ g_mutex_unlock (&souphttpsink->mutex);
return FALSE; /* only run once */
}
souphttpsink->loop = g_main_loop_new (souphttpsink->context, TRUE);
- g_mutex_lock (souphttpsink->mutex);
+ g_mutex_lock (&souphttpsink->mutex);
/* FIXME: error handling */
-#if !GLIB_CHECK_VERSION (2, 31, 0)
- souphttpsink->thread = g_thread_create (thread_func, souphttpsink,
- TRUE, &error);
-#else
souphttpsink->thread = g_thread_try_new ("souphttpclientsink-thread",
thread_func, souphttpsink, &error);
-#endif
GST_LOG_OBJECT (souphttpsink, "waiting for main loop thread to start up");
- g_cond_wait (souphttpsink->cond, souphttpsink->mutex);
- g_mutex_unlock (souphttpsink->mutex);
+ g_cond_wait (&souphttpsink->cond, &souphttpsink->mutex);
+ g_mutex_unlock (&souphttpsink->mutex);
GST_LOG_OBJECT (souphttpsink, "main loop thread running");
souphttpsink->session =
if (GST_EVENT_TYPE (event) == GST_EVENT_EOS) {
GST_DEBUG_OBJECT (souphttpsink, "got eos");
- g_mutex_lock (souphttpsink->mutex);
+ g_mutex_lock (&souphttpsink->mutex);
while (souphttpsink->message) {
GST_DEBUG_OBJECT (souphttpsink, "waiting");
- g_cond_wait (souphttpsink->cond, souphttpsink->mutex);
+ g_cond_wait (&souphttpsink->cond, &souphttpsink->mutex);
}
- g_mutex_unlock (souphttpsink->mutex);
+ g_mutex_unlock (&souphttpsink->mutex);
GST_DEBUG_OBJECT (souphttpsink, "finished eos");
}
- return TRUE;
+ return GST_BASE_SINK_CLASS (parent_class)->event (sink, event);
}
static GstFlowReturn
if (souphttpsink->offset == 0) {
for (g = souphttpsink->streamheader_buffers; g; g = g_list_next (g)) {
GstBuffer *buffer = g->data;
+ GstMapInfo map;
+
+ /* FIXME, lifetime of the buffer? */
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
soup_message_body_append (souphttpsink->message->request_body,
- SOUP_MEMORY_STATIC, GST_BUFFER_DATA (buffer),
- GST_BUFFER_SIZE (buffer));
- n += GST_BUFFER_SIZE (buffer);
+ SOUP_MEMORY_STATIC, map.data, map.size);
+ n += map.size;
+ gst_buffer_unmap (buffer, &map);
}
}
for (g = souphttpsink->queued_buffers; g; g = g_list_next (g)) {
GstBuffer *buffer = g->data;
- if (!GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_IN_CAPS)) {
+ if (!GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_HEADER)) {
+ GstMapInfo map;
+
+ /* FIXME, lifetime of the buffer? */
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
soup_message_body_append (souphttpsink->message->request_body,
- SOUP_MEMORY_STATIC, GST_BUFFER_DATA (buffer),
- GST_BUFFER_SIZE (buffer));
- n += GST_BUFFER_SIZE (buffer);
+ SOUP_MEMORY_STATIC, map.data, map.size);
+ n += map.size;
+ gst_buffer_unmap (buffer, &map);
}
}
static gboolean
send_message (GstSoupHttpClientSink * souphttpsink)
{
- g_mutex_lock (souphttpsink->mutex);
+ g_mutex_lock (&souphttpsink->mutex);
send_message_locked (souphttpsink);
- g_mutex_unlock (souphttpsink->mutex);
+ g_mutex_unlock (&souphttpsink->mutex);
return FALSE;
}
GST_DEBUG_OBJECT (souphttpsink, "callback status=%d %s",
msg->status_code, msg->reason_phrase);
- g_mutex_lock (souphttpsink->mutex);
- g_cond_signal (souphttpsink->cond);
+ g_mutex_lock (&souphttpsink->mutex);
+ g_cond_signal (&souphttpsink->cond);
souphttpsink->message = NULL;
if (!SOUP_STATUS_IS_SUCCESSFUL (msg->status_code)) {
souphttpsink->status_code = msg->status_code;
souphttpsink->reason_phrase = g_strdup (msg->reason_phrase);
- g_mutex_unlock (souphttpsink->mutex);
+ g_mutex_unlock (&souphttpsink->mutex);
return;
}
souphttpsink->sent_buffers = NULL;
send_message_locked (souphttpsink);
- g_mutex_unlock (souphttpsink->mutex);
+ g_mutex_unlock (&souphttpsink->mutex);
}
static GstFlowReturn
return GST_FLOW_ERROR;
}
- g_mutex_lock (souphttpsink->mutex);
+ g_mutex_lock (&souphttpsink->mutex);
if (souphttpsink->location != NULL) {
wake = (souphttpsink->queued_buffers == NULL);
souphttpsink->queued_buffers =
g_source_unref (source);
}
}
- g_mutex_unlock (souphttpsink->mutex);
+ g_mutex_unlock (&souphttpsink->mutex);
return GST_FLOW_OK;
}
{
GstBaseSink base_souphttpsink;
- GMutex *mutex;
- GCond *cond;
+ GMutex mutex;
+ GCond cond;
GMainContext *context;
GMainLoop *loop;
GThread *thread;
PROP_PROXY_ID,
PROP_PROXY_PW,
PROP_COOKIES,
- PROP_IRADIO_MODE,
- PROP_IRADIO_NAME,
- PROP_IRADIO_GENRE,
- PROP_IRADIO_URL,
- PROP_IRADIO_TITLE,
PROP_TIMEOUT,
PROP_EXTRA_HEADERS
};
static gboolean gst_soup_http_src_unlock (GstBaseSrc * bsrc);
static gboolean gst_soup_http_src_unlock_stop (GstBaseSrc * bsrc);
static gboolean gst_soup_http_src_set_location (GstSoupHTTPSrc * src,
- const gchar * uri);
+ const gchar * uri, GError ** error);
static gboolean gst_soup_http_src_set_proxy (GstSoupHTTPSrc * src,
const gchar * uri);
static char *gst_soup_http_src_unicodify (const char *str);
SoupMessage * msg, SoupAuth * auth, gboolean retrying,
GstSoupHTTPSrc * src);
-static void
-_do_init (GType type)
-{
- static const GInterfaceInfo urihandler_info = {
- gst_soup_http_src_uri_handler_init,
- NULL,
- NULL
- };
-
- g_type_add_interface_static (type, GST_TYPE_URI_HANDLER, &urihandler_info);
-
- GST_DEBUG_CATEGORY_INIT (souphttpsrc_debug, "souphttpsrc", 0,
- "SOUP HTTP src");
-}
-
-GST_BOILERPLATE_FULL (GstSoupHTTPSrc, gst_soup_http_src, GstPushSrc,
- GST_TYPE_PUSH_SRC, _do_init);
-
-static void
-gst_soup_http_src_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_static_pad_template (element_class, &srctemplate);
-
- gst_element_class_set_details_simple (element_class, "HTTP client source",
- "Source/Network",
- "Receive data as a client over the network via HTTP using SOUP",
- "Wouter Cloetens <wouter@mind.be>");
-}
+#define gst_soup_http_src_parent_class parent_class
+G_DEFINE_TYPE_WITH_CODE (GstSoupHTTPSrc, gst_soup_http_src, GST_TYPE_PUSH_SRC,
+ G_IMPLEMENT_INTERFACE (GST_TYPE_URI_HANDLER,
+ gst_soup_http_src_uri_handler_init));
static void
gst_soup_http_src_class_init (GstSoupHTTPSrcClass * klass)
{
GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
GstBaseSrcClass *gstbasesrc_class;
GstPushSrcClass *gstpushsrc_class;
gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
gstbasesrc_class = (GstBaseSrcClass *) klass;
gstpushsrc_class = (GstPushSrcClass *) klass;
"Extra headers to append to the HTTP request",
GST_TYPE_STRUCTURE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
- /* icecast stuff */
- g_object_class_install_property (gobject_class,
- PROP_IRADIO_MODE,
- g_param_spec_boolean ("iradio-mode",
- "iradio-mode",
- "Enable internet radio mode (extraction of shoutcast/icecast metadata)",
- FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
- g_object_class_install_property (gobject_class,
- PROP_IRADIO_NAME,
- g_param_spec_string ("iradio-name",
- "iradio-name", "Name of the stream", NULL,
- G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
- g_object_class_install_property (gobject_class,
- PROP_IRADIO_GENRE,
- g_param_spec_string ("iradio-genre",
- "iradio-genre", "Genre of the stream", NULL,
- G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
- g_object_class_install_property (gobject_class,
- PROP_IRADIO_URL,
- g_param_spec_string ("iradio-url",
- "iradio-url",
- "Homepage URL for radio stream", NULL,
- G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
- g_object_class_install_property (gobject_class,
- PROP_IRADIO_TITLE,
- g_param_spec_string ("iradio-title",
- "iradio-title",
- "Name of currently playing song", NULL,
- G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&srctemplate));
+
+ gst_element_class_set_details_simple (gstelement_class, "HTTP client source",
+ "Source/Network",
+ "Receive data as a client over the network via HTTP using SOUP",
+ "Wouter Cloetens <wouter@mind.be>");
gstbasesrc_class->start = GST_DEBUG_FUNCPTR (gst_soup_http_src_start);
gstbasesrc_class->stop = GST_DEBUG_FUNCPTR (gst_soup_http_src_stop);
gstbasesrc_class->query = GST_DEBUG_FUNCPTR (gst_soup_http_src_query);
gstpushsrc_class->create = GST_DEBUG_FUNCPTR (gst_soup_http_src_create);
+
+ GST_DEBUG_CATEGORY_INIT (souphttpsrc_debug, "souphttpsrc", 0,
+ "SOUP HTTP src");
}
static void
src->iradio_genre = NULL;
g_free (src->iradio_url);
src->iradio_url = NULL;
- g_free (src->iradio_title);
- src->iradio_title = NULL;
}
static void
-gst_soup_http_src_init (GstSoupHTTPSrc * src, GstSoupHTTPSrcClass * g_class)
+gst_soup_http_src_init (GstSoupHTTPSrc * src)
{
const gchar *proxy;
src->proxy_id = NULL;
src->proxy_pw = NULL;
src->cookies = NULL;
- src->iradio_mode = FALSE;
src->loop = NULL;
src->context = NULL;
src->session = NULL;
GST_WARNING ("location property cannot be NULL");
goto done;
}
- if (!gst_soup_http_src_set_location (src, location)) {
+ if (!gst_soup_http_src_set_location (src, location, NULL)) {
GST_WARNING ("badly formatted location");
goto done;
}
g_free (src->user_agent);
src->user_agent = g_value_dup_string (value);
break;
- case PROP_IRADIO_MODE:
- src->iradio_mode = g_value_get_boolean (value);
- break;
case PROP_AUTOMATIC_REDIRECT:
src->automatic_redirect = g_value_get_boolean (value);
break;
case PROP_IS_LIVE:
g_value_set_boolean (value, gst_base_src_is_live (GST_BASE_SRC (src)));
break;
- case PROP_IRADIO_MODE:
- g_value_set_boolean (value, src->iradio_mode);
- break;
- case PROP_IRADIO_NAME:
- g_value_set_string (value, src->iradio_name);
- break;
- case PROP_IRADIO_GENRE:
- g_value_set_string (value, src->iradio_genre);
- break;
- case PROP_IRADIO_URL:
- g_value_set_string (value, src->iradio_url);
- break;
- case PROP_IRADIO_TITLE:
- g_value_set_string (value, src->iradio_title);
- break;
case PROP_USER_ID:
g_value_set_string (value, src->user_id);
break;
GST_DEBUG_OBJECT (src, "size = %" G_GUINT64_FORMAT, src->content_size);
basesrc = GST_BASE_SRC_CAST (src);
- gst_segment_set_duration (&basesrc->segment, GST_FORMAT_BYTES,
- src->content_size);
+ basesrc->segment.duration = src->content_size;
gst_element_post_message (GST_ELEMENT (src),
gst_message_new_duration (GST_OBJECT (src), GST_FORMAT_BYTES,
src->content_size));
}
/* Icecast stuff */
- tag_list = gst_tag_list_new ();
+ tag_list = gst_tag_list_new_empty ();
if ((value =
soup_message_headers_get (msg->response_headers,
src->src_caps = gst_caps_new_simple ("application/x-icy",
"metadata-interval", G_TYPE_INT, icy_metaint, NULL);
+
+ gst_base_src_set_caps (GST_BASE_SRC (src), src->src_caps);
}
}
if ((value =
if (param != NULL)
rate = atol (param);
- src->src_caps = gst_caps_new_simple ("audio/x-raw-int",
- "channels", G_TYPE_INT, channels,
- "rate", G_TYPE_INT, rate,
- "width", G_TYPE_INT, 16,
- "depth", G_TYPE_INT, 16,
- "signed", G_TYPE_BOOLEAN, TRUE,
- "endianness", G_TYPE_INT, G_BIG_ENDIAN, NULL);
+ src->src_caps = gst_caps_new_simple ("audio/x-raw",
+ "format", G_TYPE_STRING, "S16BE",
+ "layout", G_TYPE_STRING, "interleaved",
+ "channels", G_TYPE_INT, channels, "rate", G_TYPE_INT, rate, NULL);
+
+ gst_base_src_set_caps (GST_BASE_SRC (src), src->src_caps);
} else {
/* Set the Content-Type field on the caps */
- if (src->src_caps)
+ if (src->src_caps) {
+ src->src_caps = gst_caps_make_writable (src->src_caps);
gst_caps_set_simple (src->src_caps, "content-type", G_TYPE_STRING,
value, NULL);
+ gst_base_src_set_caps (GST_BASE_SRC (src), src->src_caps);
+ }
}
}
g_free (src->iradio_name);
src->iradio_name = gst_soup_http_src_unicodify (value);
if (src->iradio_name) {
- g_object_notify (G_OBJECT (src), "iradio-name");
gst_tag_list_add (tag_list, GST_TAG_MERGE_REPLACE, GST_TAG_ORGANIZATION,
src->iradio_name, NULL);
}
g_free (src->iradio_genre);
src->iradio_genre = gst_soup_http_src_unicodify (value);
if (src->iradio_genre) {
- g_object_notify (G_OBJECT (src), "iradio-genre");
gst_tag_list_add (tag_list, GST_TAG_MERGE_REPLACE, GST_TAG_GENRE,
src->iradio_genre, NULL);
}
g_free (src->iradio_url);
src->iradio_url = gst_soup_http_src_unicodify (value);
if (src->iradio_url) {
- g_object_notify (G_OBJECT (src), "iradio-url");
gst_tag_list_add (tag_list, GST_TAG_MERGE_REPLACE, GST_TAG_LOCATION,
src->iradio_url, NULL);
}
if (!gst_tag_list_is_empty (tag_list)) {
GST_DEBUG_OBJECT (src,
"calling gst_element_found_tags with %" GST_PTR_FORMAT, tag_list);
- gst_element_found_tags (GST_ELEMENT_CAST (src), tag_list);
+ gst_pad_push_event (GST_BASE_SRC_PAD (src), gst_event_new_tag (tag_list));
} else {
gst_tag_list_free (tag_list);
}
return;
}
GST_DEBUG_OBJECT (src, "got body");
- src->ret = GST_FLOW_UNEXPECTED;
+ src->ret = GST_FLOW_EOS;
if (src->loop)
g_main_loop_quit (src->loop);
gst_soup_http_src_session_pause_message (src);
return;
}
GST_DEBUG_OBJECT (src, "finished");
- src->ret = GST_FLOW_UNEXPECTED;
+ src->ret = GST_FLOW_EOS;
if (src->session_io_status == GST_SOUP_HTTP_SRC_SESSION_IO_STATUS_CANCELLED) {
/* gst_soup_http_src_cancel_message() triggered this; probably a seek
* that occurred in the QUEUEING state; i.e. before the connection setup
* refcount to 0, freeing it.
*/
+typedef struct
+{
+ GstBuffer *buffer;
+ GstMapInfo map;
+} SoupGstChunk;
+
static void
-gst_soup_http_src_chunk_free (gpointer gstbuf)
+gst_soup_http_src_chunk_free (gpointer user_data)
{
- gst_buffer_unref (GST_BUFFER_CAST (gstbuf));
+ SoupGstChunk *chunk = (SoupGstChunk *) user_data;
+
+ gst_buffer_unmap (chunk->buffer, &chunk->map);
+ gst_buffer_unref (chunk->buffer);
+ g_slice_free (SoupGstChunk, chunk);
}
static SoupBuffer *
SoupBuffer *soupbuf;
gsize length;
GstFlowReturn rc;
+ SoupGstChunk *chunk;
if (max_len)
length = MIN (basesrc->blocksize, max_len);
GST_DEBUG_OBJECT (src, "alloc %" G_GSIZE_FORMAT " bytes <= %" G_GSIZE_FORMAT,
length, max_len);
-
- rc = gst_pad_alloc_buffer (GST_BASE_SRC_PAD (basesrc),
- GST_BUFFER_OFFSET_NONE, length,
- src->src_caps ? src->src_caps :
- GST_PAD_CAPS (GST_BASE_SRC_PAD (basesrc)), &gstbuf);
+ rc = GST_BASE_SRC_CLASS (parent_class)->alloc (basesrc, -1, length, &gstbuf);
if (G_UNLIKELY (rc != GST_FLOW_OK)) {
/* Failed to allocate buffer. Stall SoupSession and return error code
* to create(). */
return NULL;
}
- soupbuf = soup_buffer_new_with_owner (GST_BUFFER_DATA (gstbuf), length,
- gstbuf, gst_soup_http_src_chunk_free);
+ chunk = g_slice_new0 (SoupGstChunk);
+ chunk->buffer = gstbuf;
+ gst_buffer_map (gstbuf, &chunk->map, GST_MAP_READWRITE);
+
+ soupbuf = soup_buffer_new_with_owner (chunk->map.data, chunk->map.size,
+ chunk, gst_soup_http_src_chunk_free);
return soupbuf;
}
{
GstBaseSrc *basesrc;
guint64 new_position;
+ SoupGstChunk *gchunk;
if (G_UNLIKELY (msg != src->msg)) {
GST_DEBUG_OBJECT (src, "got chunk, but not for current message");
chunk->length);
/* Extract the GstBuffer from the SoupBuffer and set its fields. */
- *src->outbuf = GST_BUFFER_CAST (soup_buffer_get_owner (chunk));
-
- GST_BUFFER_SIZE (*src->outbuf) = chunk->length;
- GST_BUFFER_OFFSET (*src->outbuf) = basesrc->segment.last_stop;
+ gchunk = (SoupGstChunk *) soup_buffer_get_owner (chunk);
+ *src->outbuf = gchunk->buffer;
- gst_buffer_set_caps (*src->outbuf,
- (src->src_caps) ? src->src_caps :
- GST_PAD_CAPS (GST_BASE_SRC_PAD (basesrc)));
+ gst_buffer_resize (*src->outbuf, 0, chunk->length);
+ GST_BUFFER_OFFSET (*src->outbuf) = basesrc->segment.position;
gst_buffer_ref (*src->outbuf);
src->session_io_status = GST_SOUP_HTTP_SRC_SESSION_IO_STATUS_IDLE;
soup_message_headers_append (src->msg->request_headers, "Connection",
"close");
- if (src->iradio_mode) {
- soup_message_headers_append (src->msg->request_headers, "icy-metadata",
- "1");
- }
+ soup_message_headers_append (src->msg->request_headers, "icy-metadata", "1");
+
if (src->cookies) {
gchar **cookie;
if (src->msg && (src->request_position != src->read_position)) {
if (src->content_size != 0 && src->request_position >= src->content_size) {
GST_WARNING_OBJECT (src, "Seeking behind the end of file -- EOS");
- return GST_FLOW_UNEXPECTED;
+ return GST_FLOW_EOS;
} else if (src->session_io_status ==
GST_SOUP_HTTP_SRC_SESSION_IO_STATUS_IDLE) {
gst_soup_http_src_add_range_header (src, src->request_position);
} while (src->ret == GST_FLOW_CUSTOM_ERROR);
if (src->ret == GST_FLOW_CUSTOM_ERROR)
- src->ret = GST_FLOW_UNEXPECTED;
+ src->ret = GST_FLOW_EOS;
return src->ret;
}
}
static gboolean
-gst_soup_http_src_set_location (GstSoupHTTPSrc * src, const gchar * uri)
+gst_soup_http_src_set_location (GstSoupHTTPSrc * src, const gchar * uri,
+ GError ** error)
{
if (src->location) {
g_free (src->location);
}
static guint
-gst_soup_http_src_uri_get_type (void)
+gst_soup_http_src_uri_get_type (GType type)
{
return GST_URI_SRC;
}
-static gchar **
-gst_soup_http_src_uri_get_protocols (void)
+static const gchar *const *
+gst_soup_http_src_uri_get_protocols (GType type)
{
static const gchar *protocols[] = { "http", "https", NULL };
- return (gchar **) protocols;
+
+ return protocols;
}
-static const gchar *
+static gchar *
gst_soup_http_src_uri_get_uri (GstURIHandler * handler)
{
GstSoupHTTPSrc *src = GST_SOUP_HTTP_SRC (handler);
- return src->location;
+ /* FIXME: make thread-safe */
+ return g_strdup (src->location);
}
static gboolean
-gst_soup_http_src_uri_set_uri (GstURIHandler * handler, const gchar * uri)
+gst_soup_http_src_uri_set_uri (GstURIHandler * handler, const gchar * uri,
+ GError ** error)
{
GstSoupHTTPSrc *src = GST_SOUP_HTTP_SRC (handler);
- return gst_soup_http_src_set_location (src, uri);
+ return gst_soup_http_src_set_location (src, uri, error);
}
static void
guint64 request_position; /* Seek to this position. */
/* Shoutcast/icecast metadata extraction handling. */
- gboolean iradio_mode;
GstCaps *src_caps;
gchar *iradio_name;
gchar *iradio_genre;
gchar *iradio_url;
- gchar *iradio_title;
GstStructure *extra_headers;
#include <stdlib.h>
#include <string.h>
#include <gst/tag/tag.h>
+#include <gst/audio/audio.h>
GST_DEBUG_CATEGORY_STATIC (speexdec_debug);
#define GST_CAT_DEFAULT speexdec_debug
ARG_ENH
};
+#define FORMAT_STR GST_AUDIO_NE(S16)
+
static GstStaticPadTemplate speex_dec_src_factory =
GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("audio/x-raw-int, "
- "rate = (int) [ 6000, 48000 ], "
- "channels = (int) [ 1, 2 ], "
- "endianness = (int) BYTE_ORDER, "
- "signed = (boolean) true, " "width = (int) 16, " "depth = (int) 16")
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "format = (string) " FORMAT_STR ", "
+ "layout = (string) interleaved, "
+ "rate = (int) [ 6000, 48000 ], " "channels = (int) [ 1, 2 ]")
);
static GstStaticPadTemplate speex_dec_sink_factory =
GST_STATIC_CAPS ("audio/x-speex")
);
-GST_BOILERPLATE (GstSpeexDec, gst_speex_dec, GstAudioDecoder,
- GST_TYPE_AUDIO_DECODER);
-
+#define gst_speex_dec_parent_class parent_class
+G_DEFINE_TYPE (GstSpeexDec, gst_speex_dec, GST_TYPE_AUDIO_DECODER);
static gboolean gst_speex_dec_start (GstAudioDecoder * dec);
static gboolean gst_speex_dec_stop (GstAudioDecoder * dec);
const GValue * value, GParamSpec * pspec);
static void
-gst_speex_dec_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_static_pad_template (element_class,
- &speex_dec_src_factory);
- gst_element_class_add_static_pad_template (element_class,
- &speex_dec_sink_factory);
- gst_element_class_set_details_simple (element_class, "Speex audio decoder",
- "Codec/Decoder/Audio",
- "decode speex streams to audio", "Wim Taymans <wim@fluendo.com>");
-}
-
-static void
gst_speex_dec_class_init (GstSpeexDecClass * klass)
{
GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
GstAudioDecoderClass *base_class;
gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
base_class = (GstAudioDecoderClass *) klass;
gobject_class->set_property = gst_speex_dec_set_property;
g_param_spec_boolean ("enh", "Enh", "Enable perceptual enhancement",
DEFAULT_ENH, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&speex_dec_src_factory));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&speex_dec_sink_factory));
+ gst_element_class_set_details_simple (gstelement_class, "Speex audio decoder",
+ "Codec/Decoder/Audio",
+ "decode speex streams to audio", "Wim Taymans <wim@fluendo.com>");
+
GST_DEBUG_CATEGORY_INIT (speexdec_debug, "speexdec", 0,
"speex decoding element");
}
}
static void
-gst_speex_dec_init (GstSpeexDec * dec, GstSpeexDecClass * g_class)
+gst_speex_dec_init (GstSpeexDec * dec)
{
dec->enh = DEFAULT_ENH;
static GstFlowReturn
gst_speex_dec_parse_header (GstSpeexDec * dec, GstBuffer * buf)
{
- GstCaps *caps;
+ GstMapInfo map;
+ GstAudioInfo info;
+ static const GstAudioChannelPosition chan_pos[2][2] = {
+ {GST_AUDIO_CHANNEL_POSITION_MONO},
+ {GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT,
+ GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT}
+ };
/* get the header */
- dec->header = speex_packet_to_header ((char *) GST_BUFFER_DATA (buf),
- GST_BUFFER_SIZE (buf));
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ dec->header = speex_packet_to_header ((gchar *) map.data, map.size);
+ gst_buffer_unmap (buf, &map);
if (!dec->header)
goto no_header;
speex_bits_init (&dec->bits);
/* set caps */
- caps = gst_caps_new_simple ("audio/x-raw-int",
- "rate", G_TYPE_INT, dec->header->rate,
- "channels", G_TYPE_INT, dec->header->nb_channels,
- "signed", G_TYPE_BOOLEAN, TRUE,
- "endianness", G_TYPE_INT, G_BYTE_ORDER,
- "width", G_TYPE_INT, 16, "depth", G_TYPE_INT, 16, NULL);
-
- if (!gst_pad_set_caps (GST_AUDIO_DECODER_SRC_PAD (dec), caps))
+ gst_audio_info_init (&info);
+ gst_audio_info_set_format (&info,
+ GST_AUDIO_FORMAT_S16,
+ dec->header->rate,
+ dec->header->nb_channels, chan_pos[dec->header->nb_channels - 1]);
+
+ if (!gst_audio_decoder_set_output_format (GST_AUDIO_DECODER (dec), &info))
goto nego_failed;
- gst_caps_unref (caps);
return GST_FLOW_OK;
/* ERRORS */
{
GST_ELEMENT_ERROR (GST_ELEMENT (dec), STREAM, DECODE,
(NULL), ("couldn't negotiate format"));
- gst_caps_unref (caps);
return GST_FLOW_NOT_NEGOTIATED;
}
}
if (!list) {
GST_WARNING_OBJECT (dec, "couldn't decode comments");
- list = gst_tag_list_new ();
+ list = gst_tag_list_new_empty ();
}
if (encoder) {
GST_INFO_OBJECT (dec, "tags: %" GST_PTR_FORMAT, list);
- gst_element_found_tags_for_pad (GST_ELEMENT (dec),
- GST_AUDIO_DECODER_SRC_PAD (dec), list);
+ gst_pad_push_event (GST_AUDIO_DECODER_SRC_PAD (dec),
+ gst_event_new_tag (list));
g_free (encoder);
g_free (ver);
{
GstFlowReturn res = GST_FLOW_OK;
gint i, fpp;
- guint size;
- guint8 *data;
SpeexBits *bits;
+ GstMapInfo map;
if (!dec->frame_duration)
goto not_negotiated;
- if (G_LIKELY (GST_BUFFER_SIZE (buf))) {
- data = GST_BUFFER_DATA (buf);
- size = GST_BUFFER_SIZE (buf);
-
+ if (G_LIKELY (gst_buffer_get_size (buf))) {
/* send data to the bitstream */
- speex_bits_read_from (&dec->bits, (char *) data, size);
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ speex_bits_read_from (&dec->bits, (gchar *) map.data, map.size);
+ gst_buffer_unmap (buf, &map);
fpp = dec->header->frames_per_packet;
bits = &dec->bits;
- GST_DEBUG_OBJECT (dec, "received buffer of size %u, fpp %d, %d bits",
- size, fpp, speex_bits_remaining (bits));
+ GST_DEBUG_OBJECT (dec, "received buffer of size %" G_GSIZE_FORMAT
+ ", fpp %d, %d bits", map.size, fpp, speex_bits_remaining (bits));
} else {
/* FIXME ? actually consider how much concealment is needed */
/* concealment data, pass NULL as the bits parameters */
/* now decode each frame, catering for unknown number of them (e.g. rtp) */
for (i = 0; i < fpp; i++) {
GstBuffer *outbuf;
- gint16 *out_data;
gint ret;
GST_LOG_OBJECT (dec, "decoding frame %d/%d, %d bits remaining", i, fpp,
bits ? speex_bits_remaining (bits) : -1);
-
+#if 0
res =
gst_pad_alloc_buffer_and_set_caps (GST_AUDIO_DECODER_SRC_PAD (dec),
GST_BUFFER_OFFSET_NONE, dec->frame_size * dec->header->nb_channels * 2,
GST_DEBUG_OBJECT (dec, "buf alloc flow: %s", gst_flow_get_name (res));
return res;
}
+#endif
+ /* FIXME, we can use a bufferpool because we have fixed size buffers. We
+ * could also use an allocator */
+ outbuf =
+ gst_buffer_new_allocate (NULL,
+ dec->frame_size * dec->header->nb_channels * 2, 0);
- out_data = (gint16 *) GST_BUFFER_DATA (outbuf);
+ gst_buffer_map (outbuf, &map, GST_MAP_WRITE);
+ ret = speex_decode_int (dec->state, bits, (spx_int16_t *) map.data);
- ret = speex_decode_int (dec->state, bits, out_data);
if (ret == -1) {
/* uh? end of stream */
if (fpp == 0 && speex_bits_remaining (bits) < 8) {
gst_buffer_unref (outbuf);
}
if (dec->header->nb_channels == 2)
- speex_decode_stereo_int (out_data, dec->frame_size, dec->stereo);
+ speex_decode_stereo_int ((spx_int16_t *) map.data, dec->frame_size,
+ dec->stereo);
+
+ gst_buffer_unmap (outbuf, &map);
res = gst_audio_decoder_finish_frame (GST_AUDIO_DECODER (dec), outbuf, 1);
}
}
+static gboolean
+memcmp_buffers (GstBuffer * buf1, GstBuffer * buf2)
+{
+ GstMapInfo map;
+ gsize size1, size2;
+ gboolean res;
+
+ size1 = gst_buffer_get_size (buf1);
+ size2 = gst_buffer_get_size (buf2);
+
+ if (size1 != size2)
+ return FALSE;
+
+ gst_buffer_map (buf1, &map, GST_MAP_READ);
+ res = gst_buffer_memcmp (buf2, 0, map.data, map.size) == 0;
+ gst_buffer_unmap (buf1, &map);
+
+ return res;
+}
+
static GstFlowReturn
gst_speex_dec_handle_frame (GstAudioDecoder * bdec, GstBuffer * buf)
{
/* If we have the streamheader and vorbiscomment from the caps already
* ignore them here */
if (dec->streamheader && dec->vorbiscomment) {
- if (GST_BUFFER_SIZE (dec->streamheader) == GST_BUFFER_SIZE (buf)
- && memcmp (GST_BUFFER_DATA (dec->streamheader), GST_BUFFER_DATA (buf),
- GST_BUFFER_SIZE (buf)) == 0) {
+ if (memcmp_buffers (dec->streamheader, buf)) {
GST_DEBUG_OBJECT (dec, "found streamheader");
gst_audio_decoder_finish_frame (bdec, NULL, 1);
res = GST_FLOW_OK;
- } else if (GST_BUFFER_SIZE (dec->vorbiscomment) == GST_BUFFER_SIZE (buf)
- && memcmp (GST_BUFFER_DATA (dec->vorbiscomment), GST_BUFFER_DATA (buf),
- GST_BUFFER_SIZE (buf)) == 0) {
+ } else if (memcmp_buffers (dec->vorbiscomment, buf)) {
GST_DEBUG_OBJECT (dec, "found vorbiscomments");
gst_audio_decoder_finish_frame (bdec, NULL, 1);
res = GST_FLOW_OK;
GST_DEBUG_CATEGORY_STATIC (speexenc_debug);
#define GST_CAT_DEFAULT speexenc_debug
+#define FORMAT_STR GST_AUDIO_NE(S16)
+
static GstStaticPadTemplate sink_factory = GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("audio/x-raw-int, "
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "format = (string) " FORMAT_STR ", "
+ "layout = (string) interleaved, "
+ "rate = (int) [ 6000, 48000 ], "
+ "channels = (int) 1; "
+ "audio/x-raw, "
+ "format = (string) " FORMAT_STR ", "
+ "layout = (string) interleaved, "
"rate = (int) [ 6000, 48000 ], "
- "channels = (int) [ 1, 2 ], "
- "endianness = (int) BYTE_ORDER, "
- "signed = (boolean) TRUE, " "width = (int) 16, " "depth = (int) 16")
+ "channels = (int) 2, " "channel-mask = (bitmask) 0x3")
);
static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src",
static GstFlowReturn
gst_speex_enc_pre_push (GstAudioEncoder * benc, GstBuffer ** buffer);
-static void
-gst_speex_enc_setup_interfaces (GType speexenc_type)
-{
- static const GInterfaceInfo tag_setter_info = { NULL, NULL, NULL };
-
- g_type_add_interface_static (speexenc_type, GST_TYPE_TAG_SETTER,
- &tag_setter_info);
-
- GST_DEBUG_CATEGORY_INIT (speexenc_debug, "speexenc", 0, "Speex encoder");
-}
-
-GST_BOILERPLATE_FULL (GstSpeexEnc, gst_speex_enc, GstAudioEncoder,
- GST_TYPE_AUDIO_ENCODER, gst_speex_enc_setup_interfaces);
-
-static void
-gst_speex_enc_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_static_pad_template (element_class, &src_factory);
- gst_element_class_add_static_pad_template (element_class, &sink_factory);
- gst_element_class_set_details_simple (element_class, "Speex audio encoder",
- "Codec/Encoder/Audio",
- "Encodes audio in Speex format", "Wim Taymans <wim@fluendo.com>");
-}
+#define gst_speex_enc_parent_class parent_class
+G_DEFINE_TYPE_WITH_CODE (GstSpeexEnc, gst_speex_enc, GST_TYPE_AUDIO_ENCODER,
+ G_IMPLEMENT_INTERFACE (GST_TYPE_TAG_SETTER, NULL);
+ G_IMPLEMENT_INTERFACE (GST_TYPE_PRESET, NULL));
static void
gst_speex_enc_class_init (GstSpeexEncClass * klass)
{
GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
GstAudioEncoderClass *base_class;
gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
base_class = (GstAudioEncoderClass *) klass;
+ gobject_class->finalize = gst_speex_enc_finalize;
gobject_class->set_property = gst_speex_enc_set_property;
gobject_class->get_property = gst_speex_enc_get_property;
"The last status message", NULL,
G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
- gobject_class->finalize = gst_speex_enc_finalize;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&src_factory));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&sink_factory));
+ gst_element_class_set_details_simple (gstelement_class, "Speex audio encoder",
+ "Codec/Encoder/Audio",
+ "Encodes audio in Speex format", "Wim Taymans <wim@fluendo.com>");
+
+ GST_DEBUG_CATEGORY_INIT (speexenc_debug, "speexenc", 0, "Speex encoder");
}
static void
}
static void
-gst_speex_enc_init (GstSpeexEnc * enc, GstSpeexEncClass * klass)
+gst_speex_enc_init (GstSpeexEnc * enc)
{
GstAudioEncoder *benc = GST_AUDIO_ENCODER (enc);
GST_DEBUG_OBJECT (enc, "start");
speex_bits_init (&enc->bits);
- enc->tags = gst_tag_list_new ();
+ enc->tags = gst_tag_list_new_empty ();
enc->header_sent = FALSE;
return TRUE;
gst_tag_setter_get_tag_merge_mode (GST_TAG_SETTER (enc)));
if (merged_tags == NULL)
- merged_tags = gst_tag_list_new ();
+ merged_tags = gst_tag_list_new_empty ();
GST_DEBUG_OBJECT (enc, "merged tags = %" GST_PTR_FORMAT, merged_tags);
comments = gst_tag_list_to_vorbiscomment_buffer (merged_tags, NULL,
static GstFlowReturn
gst_speex_enc_push_buffer (GstSpeexEnc * enc, GstBuffer * buffer)
{
- guint size;
-
- size = GST_BUFFER_SIZE (buffer);
- GST_DEBUG_OBJECT (enc, "pushing output buffer of size %u", size);
+ GST_DEBUG_OBJECT (enc, "pushing output buffer of size %" G_GSIZE_FORMAT,
+ gst_buffer_get_size (buffer));
- gst_buffer_set_caps (buffer, GST_PAD_CAPS (GST_AUDIO_ENCODER_SRC_PAD (enc)));
return gst_pad_push (GST_AUDIO_ENCODER_SRC_PAD (enc), buffer);
}
gst_speex_enc_encode (GstSpeexEnc * enc, GstBuffer * buf)
{
gint frame_size = enc->frame_size;
- gint bytes = frame_size * 2 * enc->channels, samples, size;
+ gint bytes = frame_size * 2 * enc->channels, samples;
gint outsize, written, dtx_ret = 0;
- guint8 *data, *data0 = NULL;
+ GstMapInfo map;
+ guint8 *data, *data0 = NULL, *bdata;
+ gsize bsize, size;
GstBuffer *outbuf;
GstFlowReturn ret = GST_FLOW_OK;
if (G_LIKELY (buf)) {
- data = GST_BUFFER_DATA (buf);
- size = GST_BUFFER_SIZE (buf);
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ bdata = map.data;
+ bsize = map.size;
- if (G_UNLIKELY (size % bytes)) {
+ if (G_UNLIKELY (bsize % bytes)) {
GST_DEBUG_OBJECT (enc, "draining; adding silence samples");
- size = ((size / bytes) + 1) * bytes;
+
+ size = ((bsize / bytes) + 1) * bytes;
data0 = data = g_malloc0 (size);
- memcpy (data, GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (buf));
+ memcpy (data, bdata, bsize);
+ gst_buffer_unmap (buf, &map);
+ bdata = NULL;
+ } else {
+ data = bdata;
+ size = bsize;
}
} else {
GST_DEBUG_OBJECT (enc, "nothing to drain");
speex_bits_insert_terminator (&enc->bits);
outsize = speex_bits_nbytes (&enc->bits);
+ if (bdata)
+ gst_buffer_unmap (buf, &map);
+
+#if 0
ret = gst_pad_alloc_buffer_and_set_caps (GST_AUDIO_ENCODER_SRC_PAD (enc),
GST_BUFFER_OFFSET_NONE, outsize,
GST_PAD_CAPS (GST_AUDIO_ENCODER_SRC_PAD (enc)), &outbuf);
if ((GST_FLOW_OK != ret))
goto done;
+#endif
+ outbuf = gst_buffer_new_allocate (NULL, outsize, 0);
+ gst_buffer_map (outbuf, &map, GST_MAP_WRITE);
- written = speex_bits_write (&enc->bits,
- (gchar *) GST_BUFFER_DATA (outbuf), outsize);
+ written = speex_bits_write (&enc->bits, (gchar *) map.data, outsize);
if (G_UNLIKELY (written < outsize)) {
GST_ERROR_OBJECT (enc, "short write: %d < %d bytes", written, outsize);
- GST_BUFFER_SIZE (outbuf) = written;
} else if (G_UNLIKELY (written > outsize)) {
GST_ERROR_OBJECT (enc, "overrun: %d > %d bytes", written, outsize);
+ written = outsize;
}
+ gst_buffer_unmap (outbuf, &map);
+ gst_buffer_resize (outbuf, 0, written);
if (!dtx_ret)
GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_GAP);
va_start (va, buf);
/* put buffers in a fixed list */
while (buf) {
- g_assert (gst_buffer_is_metadata_writable (buf));
+ g_assert (gst_buffer_is_writable (buf));
/* mark buffer */
- GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_IN_CAPS);
+ GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_HEADER);
g_value_init (&value, GST_TYPE_BUFFER);
buf = gst_buffer_copy (buf);
- GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_IN_CAPS);
+ GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_HEADER);
gst_value_set_buffer (&value, buf);
gst_buffer_unref (buf);
gst_value_array_append_value (&array, &value);
/* create header buffer */
data = (guint8 *) speex_header_to_packet (&enc->header, &data_len);
- buf1 = gst_buffer_new ();
- GST_BUFFER_DATA (buf1) = GST_BUFFER_MALLOCDATA (buf1) = data;
- GST_BUFFER_SIZE (buf1) = data_len;
+ buf1 = gst_buffer_new_wrapped (data, data_len);
GST_BUFFER_OFFSET_END (buf1) = 0;
GST_BUFFER_OFFSET (buf1) = 0;
/* negotiate with these caps */
GST_DEBUG_OBJECT (enc, "here are the caps: %" GST_PTR_FORMAT, caps);
- gst_buffer_set_caps (buf1, caps);
- gst_buffer_set_caps (buf2, caps);
- gst_pad_set_caps (GST_AUDIO_ENCODER_SRC_PAD (enc), caps);
+ gst_audio_encoder_set_output_format (GST_AUDIO_ENCODER (enc), caps);
gst_caps_unref (caps);
/* push out buffers */
enc->header_sent = TRUE;
}
- GST_DEBUG_OBJECT (enc, "received buffer %p of %u bytes", buf,
- buf ? GST_BUFFER_SIZE (buf) : 0);
+ GST_DEBUG_OBJECT (enc, "received buffer %p of %" G_GSIZE_FORMAT " bytes", buf,
+ buf ? gst_buffer_get_size (buf) : 0);
ret = gst_speex_enc_encode (enc, buf);
GST_PAD_ALWAYS,
GST_STATIC_CAPS ("ANY"));
-GST_BOILERPLATE (GstApev2Mux, gst_apev2_mux, GstTagMux,
- GST_TYPE_TAG_MUX);
+G_DEFINE_TYPE (GstApev2Mux, gst_apev2_mux, GST_TYPE_TAG_MUX);
static GstBuffer *gst_apev2_mux_render_tag (GstTagMux * mux,
const GstTagList * taglist);
const GstTagList * taglist);
static void
-gst_apev2_mux_base_init (gpointer g_class)
+gst_apev2_mux_class_init (GstApev2MuxClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
+ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+
+ GST_TAG_MUX_CLASS (klass)->render_start_tag =
+ GST_DEBUG_FUNCPTR (gst_apev2_mux_render_tag);
+ GST_TAG_MUX_CLASS (klass)->render_end_tag =
+ GST_DEBUG_FUNCPTR (gst_apev2_mux_render_end_tag);
- gst_element_class_add_static_pad_template (element_class, &sink_template);
- gst_element_class_add_static_pad_template (element_class, &src_template);
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&sink_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&src_template));
gst_element_class_set_details_simple (element_class,
"TagLib-based APEv2 Muxer", "Formatter/Metadata",
}
static void
-gst_apev2_mux_class_init (GstApev2MuxClass * klass)
-{
- GST_TAG_MUX_CLASS (klass)->render_start_tag =
- GST_DEBUG_FUNCPTR (gst_apev2_mux_render_tag);
- GST_TAG_MUX_CLASS (klass)->render_end_tag =
- GST_DEBUG_FUNCPTR (gst_apev2_mux_render_end_tag);
-}
-
-static void
-gst_apev2_mux_init (GstApev2Mux * apev2mux, GstApev2MuxClass * apev2mux_class)
+gst_apev2_mux_init (GstApev2Mux * apev2mux)
{
/* nothing to do */
}
APE::Tag apev2tag;
ByteVector rendered_tag;
GstBuffer *buf;
- GstCaps *caps;
guint tag_size;
/* Render the tag */
/* Create buffer with tag */
buf = gst_buffer_new_and_alloc (tag_size);
- memcpy (GST_BUFFER_DATA (buf), rendered_tag.data (), tag_size);
-
- caps = gst_static_pad_template_get_caps (&src_template);
- gst_buffer_set_caps (buf, caps);
- gst_caps_unref (caps);
+ gst_buffer_fill (buf, 0, rendered_tag.data (), tag_size);
return buf;
}
GST_PAD_ALWAYS,
GST_STATIC_CAPS ("ANY"));
-GST_BOILERPLATE (GstId3v2Mux, gst_id3v2_mux, GstTagMux, GST_TYPE_TAG_MUX);
+G_DEFINE_TYPE (GstId3v2Mux, gst_id3v2_mux, GST_TYPE_TAG_MUX);
static GstBuffer *gst_id3v2_mux_render_tag (GstTagMux * mux,
const GstTagList * taglist);
const GstTagList * taglist);
static void
-gst_id3v2_mux_base_init (gpointer g_class)
+gst_id3v2_mux_class_init (GstId3v2MuxClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
+ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+
+ GST_TAG_MUX_CLASS (klass)->render_start_tag =
+ GST_DEBUG_FUNCPTR (gst_id3v2_mux_render_tag);
+ GST_TAG_MUX_CLASS (klass)->render_end_tag =
+ GST_DEBUG_FUNCPTR (gst_id3v2_mux_render_end_tag);
- gst_element_class_add_static_pad_template (element_class, &sink_template);
- gst_element_class_add_static_pad_template (element_class, &src_template);
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&sink_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&src_template));
gst_element_class_set_details_simple (element_class,
"TagLib-based ID3v2 Muxer", "Formatter/Metadata",
}
static void
-gst_id3v2_mux_class_init (GstId3v2MuxClass * klass)
-{
- GST_TAG_MUX_CLASS (klass)->render_start_tag =
- GST_DEBUG_FUNCPTR (gst_id3v2_mux_render_tag);
- GST_TAG_MUX_CLASS (klass)->render_end_tag =
- GST_DEBUG_FUNCPTR (gst_id3v2_mux_render_end_tag);
-}
-
-static void
-gst_id3v2_mux_init (GstId3v2Mux * id3v2mux, GstId3v2MuxClass * id3v2mux_class)
+gst_id3v2_mux_init (GstId3v2Mux * id3v2mux)
{
/* nothing to do */
}
ID3v2::Frame * frame;
const GValue *val;
GstBuffer *buf;
+ GstSample *sample;
val = gst_tag_list_get_value_index (list, tag, i);
- buf = (GstBuffer *) gst_value_get_mini_object (val);
+ sample = (GstSample *) g_value_get_boxed (val);
- if (buf && GST_BUFFER_CAPS (buf)) {
+ if (sample && (buf = gst_sample_get_buffer (sample)) &&
+ gst_sample_get_caps (sample)) {
GstStructure *s;
gint version = 0;
- s = gst_caps_get_structure (GST_BUFFER_CAPS (buf), 0);
+ s = gst_caps_get_structure (gst_sample_get_caps (sample), 0);
if (s && gst_structure_get_int (s, "version", &version) && version > 0) {
- ByteVector bytes ((char *) GST_BUFFER_DATA (buf),
- GST_BUFFER_SIZE (buf));
+ GstMapInfo map;
+ gst_buffer_map (buf, &map, GST_MAP_READ);
GST_DEBUG ("Injecting ID3v2.%u frame %u/%u of length %u and type %"
- GST_PTR_FORMAT, version, i, num_tags, GST_BUFFER_SIZE (buf), s);
+ GST_PTR_FORMAT, version, i, num_tags, map.size, s);
- frame = factory->createFrame (bytes, (TagLib::uint) version);
+ frame = factory->createFrame (ByteVector ((const char *) map.data,
+ map.size), (TagLib::uint) version);
if (frame)
id3v2tag->addFrame (frame);
+
+ gst_buffer_unmap (buf, &map);
}
}
}
for (n = 0; n < num_tags; ++n) {
const GValue *val;
+ GstSample *sample;
GstBuffer *image;
GST_DEBUG ("image %u/%u", n + 1, num_tags);
val = gst_tag_list_get_value_index (list, tag, n);
- image = (GstBuffer *) gst_value_get_mini_object (val);
+ sample = (GstSample *) g_value_get_boxed (val);
- if (GST_IS_BUFFER (image) && GST_BUFFER_SIZE (image) > 0 &&
- GST_BUFFER_CAPS (image) != NULL &&
- !gst_caps_is_empty (GST_BUFFER_CAPS (image))) {
+ if (GST_IS_SAMPLE (image) && (image = gst_sample_get_buffer (sample)) &&
+ GST_IS_BUFFER (image) && gst_buffer_get_size (image) > 0 &&
+ gst_sample_get_caps (sample) != NULL &&
+ !gst_caps_is_empty (gst_sample_get_caps (sample))) {
const gchar *mime_type;
GstStructure *s;
- s = gst_caps_get_structure (GST_BUFFER_CAPS (image), 0);
+ s = gst_caps_get_structure (gst_sample_get_caps (sample), 0);
mime_type = gst_structure_get_name (s);
if (mime_type != NULL) {
ID3v2::AttachedPictureFrame * frame;
const gchar *desc;
+ GstMapInfo map;
if (strcmp (mime_type, "text/uri-list") == 0)
mime_type = "-->";
frame = new ID3v2::AttachedPictureFrame ();
+ gst_buffer_map (image, &map, GST_MAP_READ);
+
GST_DEBUG ("Attaching picture of %u bytes and mime type %s",
- GST_BUFFER_SIZE (image), mime_type);
+ map.size, mime_type);
id3v2tag->addFrame (frame);
- frame->setPicture (ByteVector ((const char *) GST_BUFFER_DATA (image),
- GST_BUFFER_SIZE (image)));
+ frame->setPicture (ByteVector ((const char *) map.data, map.size));
frame->setTextEncoding (String::UTF8);
frame->setMimeType (mime_type);
+ gst_buffer_unmap (image, &map);
+
desc = gst_structure_get_string (s, "image-description");
frame->setDescription ((desc) ? desc : "");
}
}
} else {
- GST_WARNING ("NULL image or no caps on image buffer (%p, caps=%"
- GST_PTR_FORMAT ")", image, (image) ? GST_BUFFER_CAPS (image) : NULL);
+ GST_WARNING ("NULL image or no caps on image sample (%p, caps=%"
+ GST_PTR_FORMAT ")", sample,
+ (sample) ? gst_sample_get_caps (sample) : NULL);
}
}
}
ID3v2::Tag id3v2tag;
ByteVector rendered_tag;
GstBuffer *buf;
- GstCaps *caps;
guint tag_size;
/* write all strings as UTF-8 by default */
/* Create buffer with tag */
buf = gst_buffer_new_and_alloc (tag_size);
- memcpy (GST_BUFFER_DATA (buf), rendered_tag.data (), tag_size);
-
- caps = gst_static_pad_template_get_caps (&src_template);
- gst_buffer_set_caps (buf, caps);
- gst_caps_unref (caps);
+ gst_buffer_fill (buf, 0, rendered_tag.data (), tag_size);
return buf;
}
{
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
- gst_element_class_add_static_pad_template (element_class, &src_factory);
- gst_element_class_add_static_pad_template (element_class, &sink_factory);
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&src_factory));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&sink_factory));
gst_element_class_set_details_simple (element_class, "Wavpack audio decoder",
"Codec/Decoder/Audio",
"Decodes Wavpack audio data",
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
/* add pad templates */
- gst_element_class_add_static_pad_template (element_class, &sink_factory);
- gst_element_class_add_static_pad_template (element_class, &src_factory);
- gst_element_class_add_static_pad_template (element_class,
- &wvcsrc_factory);
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&sink_factory));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&src_factory));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&wvcsrc_factory));
/* set element details */
gst_element_class_set_details_simple (element_class, "Wavpack audio encoder",
} else if ((enc->srcpad_last_return == GST_FLOW_NOT_LINKED) &&
(enc->wvcsrcpad_last_return == GST_FLOW_NOT_LINKED)) {
ret = GST_FLOW_NOT_LINKED;
- } else if ((enc->srcpad_last_return == GST_FLOW_WRONG_STATE) &&
- (enc->wvcsrcpad_last_return == GST_FLOW_WRONG_STATE)) {
- ret = GST_FLOW_WRONG_STATE;
+ } else if ((enc->srcpad_last_return == GST_FLOW_FLUSHING) &&
+ (enc->wvcsrcpad_last_return == GST_FLOW_FLUSHING)) {
+ ret = GST_FLOW_FLUSHING;
} else {
GST_ELEMENT_ERROR (enc, LIBRARY, ENCODE, (NULL),
("encoding samples failed"));
#include "config.h"
#endif
-/* FIXME 0.11: suppress warnings for deprecated API such as GStaticRecMutex
- * with newer GLib versions (>= 2.31.0) */
-#define GLIB_DISABLE_DEPRECATION_WARNINGS
-
#include <gst/gst.h>
#include <gst/gst-i18n-plugin.h>
{
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
- gst_element_class_add_static_pad_template (element_class, &src_factory);
- gst_element_class_add_static_pad_template (element_class,
- &wvc_src_factory);
- gst_element_class_add_static_pad_template (element_class, &sink_factory);
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&src_factory));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&wvc_src_factory));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&sink_factory));
gst_element_class_set_details_simple (element_class, "Wavpack parser",
"Codec/Demuxer/Audio",
if (offset + size > wvparse->upstream_length) {
GST_DEBUG_OBJECT (wvparse, "EOS: %" G_GINT64_FORMAT " + %u > %"
G_GINT64_FORMAT, offset, size, wvparse->upstream_length);
- flow_ret = GST_FLOW_UNEXPECTED;
+ flow_ret = GST_FLOW_EOS;
goto done;
}
}
", got only %u of %u bytes", offset, GST_BUFFER_SIZE (buf), size);
gst_buffer_unref (buf);
buf = NULL;
- flow_ret = GST_FLOW_UNEXPECTED;
+ flow_ret = GST_FLOW_EOS;
}
done:
static GstFlowReturn
gst_wavpack_parse_resync_loop (GstWavpackParse * parse, WavpackHeader * header)
{
- GstFlowReturn flow_ret = GST_FLOW_UNEXPECTED;
+ GstFlowReturn flow_ret = GST_FLOW_EOS;
GstBuffer *buf = NULL;
GST_LOG_OBJECT (parse, "pausing task, reason %s", reason);
gst_pad_pause_task (parse->sinkpad);
- if (flow_ret == GST_FLOW_UNEXPECTED && parse->srcpad) {
+ if (flow_ret == GST_FLOW_EOS && parse->srcpad) {
if (parse->segment.flags & GST_SEEK_FLAG_SEGMENT) {
GstClockTime stop;
GST_LOG_OBJECT (parse, "Sending EOS, at end of stream");
gst_pad_push_event (parse->srcpad, gst_event_new_eos ());
}
- } else if (flow_ret == GST_FLOW_NOT_LINKED
- || flow_ret < GST_FLOW_UNEXPECTED) {
+ } else if (flow_ret == GST_FLOW_NOT_LINKED || flow_ret < GST_FLOW_EOS) {
GST_ELEMENT_ERROR (parse, STREAM, FAILED,
(_("Internal data stream error.")), ("stream stopped, reason %s",
reason));
+++ /dev/null
-gstreamer-*.schemas
-gstreamer.schemas
+++ /dev/null
-GST_SCHEMA_FILES = gstreamer-@GST_MAJORMINOR@.schemas
-
-if USE_GCONF
-schemadir = @GCONF_SCHEMA_FILE_DIR@
-schema_DATA = $(GST_SCHEMA_FILES)
-endif
-
-gstreamer-@GST_MAJORMINOR@.schemas: gstreamer.schemas
- cp gstreamer.schemas gstreamer-@GST_MAJORMINOR@.schemas
-
-if USE_GCONF
-if GCONF_SCHEMAS_INSTALL
-install-data-local:
- @GCONF_CONFIG_SOURCE=$(GCONF_SCHEMA_CONFIG_SOURCE) $(GCONFTOOL) \
- --makefile-install-rule $(builddir)/$(schema_DATA) || \
- (echo ;\
- echo "*****************************************************"; \
- echo "Installation of schemas failed, install them manually"; \
- echo "*****************************************************";)
- @true
-else
-install-data-local:
- @echo "***************************************************************"
- @echo "Not installing schemas, disabled with --disable-schemas-install"
- @echo "***************************************************************"
- @true
-endif
-endif # USE_GCONF
-
-CLEANFILES = $(GST_SCHEMA_FILES)
-EXTRA_DIST = $(GST_SCHEMA_FILES)
+++ /dev/null
-<gconfschemafile>
- <schemalist>
- <schema>
- <key>/schemas/system/gstreamer/@GST_MAJORMINOR@/default/audiosink</key>
- <applyto>/system/gstreamer/@GST_MAJORMINOR@/default/audiosink</applyto>
- <owner>gstreamer</owner>
- <type>string</type>
- <default>@DEFAULT_AUDIOSINK@</default>
- <locale name="C">
- <short>default GStreamer audiosink</short>
- <long>GStreamer can play audio using any number of output elements. Some possible choices are osssink, esdsink and alsasink. The audiosink can be a partial pipeline instead of just one element.</long>
- </locale>
- </schema>
- <schema>
- <key>/schemas/system/gstreamer/@GST_MAJORMINOR@/default/audiosink_description</key>
- <applyto>/system/gstreamer/@GST_MAJORMINOR@/default/audiosink_description</applyto>
- <owner>gstreamer</owner>
- <type>string</type>
- <default>Default</default>
- <locale name="C">
- <short>description for default GStreamer audiosink</short>
- <long>Describes the selected output element.</long>
- </locale>
- </schema>
- <schema>
- <key>/schemas/system/gstreamer/@GST_MAJORMINOR@/default/musicaudiosink</key>
- <applyto>/system/gstreamer/@GST_MAJORMINOR@/default/musicaudiosink</applyto>
- <owner>gstreamer</owner>
- <type>string</type>
- <default>@DEFAULT_AUDIOSINK@</default>
- <locale name="C">
- <short>GStreamer audiosink for Music and Movies</short>
- <long>GStreamer can play audio using any number of output elements. Some possible choices are osssink, esdsink and alsasink. The audiosink can be a partial pipeline instead of just one element.</long>
- </locale>
- </schema>
- <schema>
- <key>/schemas/system/gstreamer/@GST_MAJORMINOR@/default/musicaudiosink_description</key>
- <applyto>/system/gstreamer/@GST_MAJORMINOR@/default/musicaudiosink_description</applyto>
- <owner>gstreamer</owner>
- <type>string</type>
- <default>Default</default>
- <locale name="C">
- <short>description for GStreamer audiosink for Music and Movies</short>
- <long>Describes the selected output element for Music and Movies.</long>
- </locale>
- </schema>
- <schema>
- <key>/schemas/system/gstreamer/@GST_MAJORMINOR@/default/chataudiosink</key>
- <applyto>/system/gstreamer/@GST_MAJORMINOR@/default/chataudiosink</applyto>
- <owner>gstreamer</owner>
- <type>string</type>
- <default>@DEFAULT_AUDIOSINK@</default>
- <locale name="C">
- <short>GStreamer audiosink for Audio/Video Conferencing</short>
- <long>GStreamer can play audio using any number of output elements. Some possible choices are osssink, esdsink and alsasink. The audiosink can be a partial pipeline instead of just one element.</long>
- </locale>
- </schema>
- <schema>
- <key>/schemas/system/gstreamer/@GST_MAJORMINOR@/default/chataudiosink_description</key>
- <applyto>/system/gstreamer/@GST_MAJORMINOR@/default/chataudiosink_description</applyto>
- <owner>gstreamer</owner>
- <type>string</type>
- <default>Default</default>
- <locale name="C">
- <short>description for GStreamer audiosink for Audio/Video Conferencing</short>
- <long>Describes the selected output element for Audio/Video Conferencing.</long>
- </locale>
- </schema>
- <schema>
- <key>/schemas/system/gstreamer/@GST_MAJORMINOR@/default/audiosink_description</key>
- <applyto>/system/gstreamer/@GST_MAJORMINOR@/default/audiosink_description</applyto>
- <owner>gstreamer</owner>
- <type>string</type>
- <default>Default</default>
- <locale name="C">
- <short>description for default GStreamer audiosink</short>
- <long>Describes the selected output element.</long>
- </locale>
- </schema>
- <schema>
- <key>/schemas/system/gstreamer/@GST_MAJORMINOR@/default/musicaudiosink</key>
- <applyto>/system/gstreamer/@GST_MAJORMINOR@/default/musicaudiosink</applyto>
- <owner>gstreamer</owner>
- <type>string</type>
- <default>@DEFAULT_AUDIOSINK@</default>
- <locale name="C">
- <short>GStreamer audiosink for Music and Movies</short>
- <long>GStreamer can play audio using any number of output elements. Some possible choices are osssink, esdsink and alsasink. The audiosink can be a partial pipeline instead of just one element.</long>
- </locale>
- </schema>
- <schema>
- <key>/schemas/system/gstreamer/@GST_MAJORMINOR@/default/musicaudiosink_description</key>
- <applyto>/system/gstreamer/@GST_MAJORMINOR@/default/musicaudiosink_description</applyto>
- <owner>gstreamer</owner>
- <type>string</type>
- <default>Default</default>
- <locale name="C">
- <short>description for GStreamer audiosink for Music and Movies</short>
- <long>Describes the selected output element for Music and Movies.</long>
- </locale>
- </schema>
- <schema>
- <key>/schemas/system/gstreamer/@GST_MAJORMINOR@/default/chataudiosink</key>
- <applyto>/system/gstreamer/@GST_MAJORMINOR@/default/chataudiosink</applyto>
- <owner>gstreamer</owner>
- <type>string</type>
- <default>@DEFAULT_AUDIOSINK@</default>
- <locale name="C">
- <short>GStreamer audiosink for Audio/Video Conferencing</short>
- <long>GStreamer can play audio using any number of output elements. Some possible choices are osssink, esdsink and alsasink. The audiosink can be a partial pipeline instead of just one element.</long>
- </locale>
- </schema>
- <schema>
- <key>/schemas/system/gstreamer/@GST_MAJORMINOR@/default/chataudiosink_description</key>
- <applyto>/system/gstreamer/@GST_MAJORMINOR@/default/chataudiosink_description</applyto>
- <owner>gstreamer</owner>
- <type>string</type>
- <default>Default</default>
- <locale name="C">
- <short>description for GStreamer audiosink for Audio/Video Conferencing</short>
- <long>Describes the selected output element for Audio/Video Conferencing.</long>
- </locale>
- </schema>
- <schema>
- <key>/schemas/system/gstreamer/@GST_MAJORMINOR@/default/videosink</key>
- <applyto>/system/gstreamer/@GST_MAJORMINOR@/default/videosink</applyto>
- <owner>gstreamer</owner>
- <type>string</type>
- <default>@DEFAULT_VIDEOSINK@</default>
- <locale name="C">
- <short>default GStreamer videosink</short>
- <long>GStreamer can play video using any number of output elements. Some possible choices are xvimagesink, ximagesink, sdlvideosink and aasink. The videosink can be a partial pipeline instead of just one element.</long>
- </locale>
- </schema>
- <schema>
- <key>/schemas/system/gstreamer/@GST_MAJORMINOR@/default/audiosrc</key>
- <applyto>/system/gstreamer/@GST_MAJORMINOR@/default/audiosrc</applyto>
- <owner>gstreamer</owner>
- <type>string</type>
- <default>@DEFAULT_AUDIOSRC@</default>
- <locale name="C">
- <short>default GStreamer audio source</short>
- <long>GStreamer can record audio using any number of input elements. Some possible choices are osssrc, esdsrc and alsasrc. The audio source can be a partial pipeline instead of just one element.</long>
- </locale>
- </schema>
- <schema>
- <key>/schemas/system/gstreamer/@GST_MAJORMINOR@/default/audiosrc_description</key>
- <applyto>/system/gstreamer/@GST_MAJORMINOR@/default/audiosrc_description</applyto>
- <owner>gstreamer</owner>
- <type>string</type>
- <default>Default</default>
- <locale name="C">
- <short>description for default GStreamer audiosrc</short>
- <long>Describes the selected input element.</long>
- </locale>
- </schema>
- <schema>
- <key>/schemas/system/gstreamer/@GST_MAJORMINOR@/default/audiosrc_description</key>
- <applyto>/system/gstreamer/@GST_MAJORMINOR@/default/audiosrc_description</applyto>
- <owner>gstreamer</owner>
- <type>string</type>
- <default>Default</default>
- <locale name="C">
- <short>description for default GStreamer audiosrc</short>
- <long>Describes the selected input element.</long>
- </locale>
- </schema>
- <schema>
- <key>/schemas/system/gstreamer/@GST_MAJORMINOR@/default/videosrc</key>
- <applyto>/system/gstreamer/@GST_MAJORMINOR@/default/videosrc</applyto>
- <owner>gstreamer</owner>
- <type>string</type>
- <default>@DEFAULT_VIDEOSRC@</default>
- <locale name="C">
- <short>default GStreamer video source</short>
- <long>GStreamer can record video from any number of input elements. Some possible choices are v4lsrc and videotestsrc. The video source can be a partial pipeline instead of just one element.</long>
- </locale>
- </schema>
- <schema>
- <key>/schemas/system/gstreamer/@GST_MAJORMINOR@/default/visualization</key>
- <applyto>/system/gstreamer/@GST_MAJORMINOR@/default/visualization</applyto>
- <owner>gstreamer</owner>
- <type>string</type>
- <default>@DEFAULT_VISUALIZER@</default>
- <locale name="C">
- <short>default GStreamer visualization plugin</short>
- <long>GStreamer can put visualization plugins in a pipeline to transform audio stream in video frames. Default is goom but more visualization plugins will be ported soon. The visualization plugin can be a partial pipeline instead of just one element.</long>
- </locale>
- </schema>
- </schemalist>
-</gconfschemafile>
G_BEGIN_DECLS
-#if !GLIB_CHECK_VERSION(2,25,0)
-
-#if defined (_MSC_VER) && !defined(_WIN64)
-typedef struct _stat32 GStatBuf;
-#else
-typedef struct stat GStatBuf;
-#endif
-
-#endif
-
-#if GLIB_CHECK_VERSION(2,26,0)
-#define GLIB_HAS_GDATETIME
-#endif
-
-/* See bug #651514 */
-#if GLIB_CHECK_VERSION(2,29,5)
-#define G_ATOMIC_POINTER_COMPARE_AND_EXCHANGE(a,b,c) \
- g_atomic_pointer_compare_and_exchange ((a),(b),(c))
-#define G_ATOMIC_INT_COMPARE_AND_EXCHANGE(a,b,c) \
- g_atomic_int_compare_and_exchange ((a),(b),(c))
-#else
-#define G_ATOMIC_POINTER_COMPARE_AND_EXCHANGE(a,b,c) \
- g_atomic_pointer_compare_and_exchange ((volatile gpointer *)(a),(b),(c))
-#define G_ATOMIC_INT_COMPARE_AND_EXCHANGE(a,b,c) \
- g_atomic_int_compare_and_exchange ((volatile int *)(a),(b),(c))
-#endif
-
-/* See bug #651514 */
-#if GLIB_CHECK_VERSION(2,29,5)
-#define G_ATOMIC_INT_ADD(a,b) g_atomic_int_add ((a),(b))
-#else
-#define G_ATOMIC_INT_ADD(a,b) g_atomic_int_exchange_and_add ((a),(b))
-#endif
-
/* copies */
-#if GLIB_CHECK_VERSION (2, 31, 0)
-#define g_mutex_new gst_g_mutex_new
-static inline GMutex *
-gst_g_mutex_new (void)
-{
- GMutex *mutex = g_slice_new (GMutex);
- g_mutex_init (mutex);
- return mutex;
-}
-#define g_mutex_free gst_g_mutex_free
-static inline void
-gst_g_mutex_free (GMutex *mutex)
-{
- g_mutex_clear (mutex);
- g_slice_free (GMutex, mutex);
-}
-#define g_static_rec_mutex_init gst_g_static_rec_mutex_init
-static inline void
-gst_g_static_rec_mutex_init (GStaticRecMutex *mutex)
-{
- static const GStaticRecMutex init_mutex = G_STATIC_REC_MUTEX_INIT;
-
- *mutex = init_mutex;
-}
-#define g_cond_new gst_g_cond_new
-static inline GCond *
-gst_g_cond_new (void)
-{
- GCond *cond = g_slice_new (GCond);
- g_cond_init (cond);
- return cond;
-}
-#define g_cond_free gst_g_cond_free
-static inline void
-gst_g_cond_free (GCond *cond)
-{
- g_cond_clear (cond);
- g_slice_free (GCond, cond);
-}
-#define g_cond_timed_wait gst_g_cond_timed_wait
-static inline gboolean
-gst_g_cond_timed_wait (GCond *cond, GMutex *mutex, GTimeVal *abs_time)
-{
- gint64 end_time;
-
- if (abs_time == NULL) {
- g_cond_wait (cond, mutex);
- return TRUE;
- }
-
- end_time = abs_time->tv_sec;
- end_time *= 1000000;
- end_time += abs_time->tv_usec;
-
- /* would be nice if we had clock_rtoffset, but that didn't seem to
- * make it into the kernel yet...
- */
- /* if CLOCK_MONOTONIC is not defined then g_get_montonic_time() and
- * g_get_real_time() are returning the same clock and we'd add ~0
- */
- end_time += g_get_monotonic_time () - g_get_real_time ();
- return g_cond_wait_until (cond, mutex, end_time);
-}
-#endif /* GLIB_CHECK_VERSION (2, 31, 0) */
-
/* adaptations */
G_END_DECLS
%define majorminor @GST_MAJORMINOR@
-%define gstreamer gstreamer
+%define gstreamer gstreamer011
-%define gst_minver 0.10.0
+%define gst_minver 0.11.0
Name: %{gstreamer}-plugins-good
Version: @VERSION@
BuildRequires: gcc-c++
-@USE_ESD_TRUE@BuildRequires: esound-devel >= 0.2.8
-@USE_ESD_TRUE@Obsoletes: gstreamer-esd
-@USE_ESD_TRUE@
-@USE_ESD_TRUE@Provides: gstreamer-audiosrc
-@USE_ESD_TRUE@Provides: gstreamer-audiosink
@USE_FLAC_TRUE@BuildRequires: flac-devel >= 1.0.3
-@USE_GCONF_TRUE@BuildRequires: GConf2-devel
@USE_JPEG_TRUE@BuildRequires: libjpeg-devel
@USE_LIBCACA_TRUE@BuildRequires: libcaca-devel
@USE_LIBDV_TRUE@BuildRequires: libdv-devel
@USE_LIBPNG_TRUE@BuildRequires: libpng-devel >= 1.2.0
@USE_OSS_TRUE@BuildRequires: glibc-devel
@USE_SPEEX_TRUE@BuildRequires: speex-devel
-@USE_HAL_TRUE@BuildRequires: hal-devel
@USE_SHOUT2_TRUE@BuildRequires: libshout-devel >= 2.0
@USE_AALIB_TRUE@BuildRequires: aalib-devel >= 1.3
@USE_AALIB_TRUE@Provides: gstreamer-aasink = %{version}-%{release}
--enable-debug \
--enable-DEBUG
-make %{?_smp_mflags}
+make %{?_smp_mflags} CFLAGS+="-Wno-error" CXXFLAGS+="-Wno-error"
%install
rm -rf $RPM_BUILD_ROOT
-export GCONF_DISABLE_MAKEFILE_SCHEMA_INSTALL=1
%makeinstall
-unset GCONF_DISABLE_MAKEFILE_SCHEMA_INSTALL
# Clean out files that should not be part of the rpm.
rm -f $RPM_BUILD_ROOT%{_libdir}/gstreamer-%{majorminor}/*.la
rm -rf $RPM_BUILD_ROOT
%post
-@USE_GCONF_TRUE@export GCONF_CONFIG_SOURCE=`gconftool-2 --get-default-source`
-@USE_GCONF_TRUE@gconftool-2 --makefile-install-rule %{_sysconfdir}/gconf/schemas/gstreamer-%{majorminor}.schemas > /dev/null
%files -f gst-plugins-good-%{majorminor}.lang
%defattr(-, root, root)
%{_libdir}/gstreamer-%{majorminor}/libgsteffectv.so
%{_libdir}/gstreamer-%{majorminor}/libgstgoom.so
%{_libdir}/gstreamer-%{majorminor}/libgstlevel.so
-%{_libdir}/gstreamer-%{majorminor}/libgstefence.so
%{_libdir}/gstreamer-%{majorminor}/libgstmulaw.so
%{_libdir}/gstreamer-%{majorminor}/libgstisomp4.so
%{_libdir}/gstreamer-%{majorminor}/libgstrtp.so
%{_libdir}/gstreamer-%{majorminor}/libgstrtpmanager.so
%{_libdir}/gstreamer-%{majorminor}/libgstrtsp.so
-%{_libdir}/gstreamer-%{majorminor}/libgstsmpte.so
+# %{_libdir}/gstreamer-%{majorminor}/libgstsmpte.so
%{_libdir}/gstreamer-%{majorminor}/libgstudp.so
-%{_libdir}/gstreamer-%{majorminor}/libgstvideobox.so
+# %{_libdir}/gstreamer-%{majorminor}/libgstvideobox.so
%{_libdir}/gstreamer-%{majorminor}/libgstwavenc.so
%{_libdir}/gstreamer-%{majorminor}/libgstwavparse.so
%{_libdir}/gstreamer-%{majorminor}/libgstauparse.so
%{_libdir}/gstreamer-%{majorminor}/libgstnavigationtest.so
%{_libdir}/gstreamer-%{majorminor}/libgstalphacolor.so
@USE_CAIRO_TRUE@%{_libdir}/gstreamer-%{majorminor}/libgstcairo.so
-%{_libdir}/gstreamer-%{majorminor}/libgstflxdec.so
+# %{_libdir}/gstreamer-%{majorminor}/libgstflxdec.so
%{_libdir}/gstreamer-%{majorminor}/libgstmatroska.so
%{_libdir}/gstreamer-%{majorminor}/libgstvideomixer.so
%{_libdir}/gstreamer-%{majorminor}/libgstcutter.so
%{_libdir}/gstreamer-%{majorminor}/libgstequalizer.so
%{_libdir}/gstreamer-%{majorminor}/libgstmultifile.so
%{_libdir}/gstreamer-%{majorminor}/libgstspectrum.so
-%{_libdir}/gstreamer-%{majorminor}/libgstgoom2k1.so
-%{_libdir}/gstreamer-%{majorminor}/libgstinterleave.so
+# %{_libdir}/gstreamer-%{majorminor}/libgstgoom2k1.so
+# %{_libdir}/gstreamer-%{majorminor}/libgstinterleave.so
%{_libdir}/gstreamer-%{majorminor}/libgstreplaygain.so
-%{_libdir}/gstreamer-%{majorminor}/libgstdeinterlace.so
+# %{_libdir}/gstreamer-%{majorminor}/libgstdeinterlace.so
%{_libdir}/gstreamer-%{majorminor}/libgstflv.so
%{_libdir}/gstreamer-%{majorminor}/libgsty4menc.so
-%{_libdir}/gstreamer-%{majorminor}/libgstoss4audio.so
-%{_libdir}/gstreamer-%{majorminor}/libgstimagefreeze.so
+# %{_libdir}/gstreamer-%{majorminor}/libgstoss4audio.so
+# %{_libdir}/gstreamer-%{majorminor}/libgstimagefreeze.so
%{_libdir}/gstreamer-%{majorminor}/libgstshapewipe.so
%{_libdir}/gstreamer-%{majorminor}/libgstvideofilter.so
%{_libdir}/gstreamer-%{majorminor}/libgstaudioparsers.so
# gstreamer-plugins with external dependencies but in the main package
@USE_LIBCACA_TRUE@%{_libdir}/gstreamer-%{majorminor}/libgstcacasink.so
-@USE_ESD_TRUE@%{_libdir}/gstreamer-%{majorminor}/libgstesd.so
@USE_FLAC_TRUE@%{_libdir}/gstreamer-%{majorminor}/libgstflac.so
@USE_JACK_TRUE@%{_libdir}/gstreamer-%{majorminor}/libgstjack.so
@USE_JPEG_TRUE@%{_libdir}/gstreamer-%{majorminor}/libgstjpeg.so
@USE_LIBPNG_TRUE@%{_libdir}/gstreamer-%{majorminor}/libgstpng.so
@USE_OSS_TRUE@%{_libdir}/gstreamer-%{majorminor}/libgstossaudio.so
@USE_SPEEX_TRUE@%{_libdir}/gstreamer-%{majorminor}/libgstspeex.so
-@USE_GCONF_TRUE@%{_libdir}/gstreamer-%{majorminor}/libgstgconfelements.so
-@USE_HAL_TRUE@%{_libdir}/gstreamer-%{majorminor}/libgsthalelements.so
@USE_SHOUT2_TRUE@%{_libdir}/gstreamer-%{majorminor}/libgstshout2.so
@USE_AALIB_TRUE@%{_libdir}/gstreamer-%{majorminor}/libgstaasink.so
@USE_LIBDV_TRUE@%{_libdir}/gstreamer-%{majorminor}/libgstdv.so
@USE_SOUP_TRUE@%{_libdir}/gstreamer-%{majorminor}/libgstsouphttpsrc.so
@USE_PULSE_TRUE@%{_libdir}/gstreamer-%{majorminor}/libgstpulse.so
-# schema files
-@USE_GCONF_TRUE@%{_sysconfdir}/gconf/schemas/gstreamer-%{majorminor}.schemas
-
%changelog
* Tue Jun 12 2007 Jan Schmidt <jan at fluendo dot com>
- wavpack and qtdemux have moved from bad
libgstalpha_la_SOURCES = gstalpha.c
libgstalpha_la_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) \
- $(GST_CONTROLLER_CFLAGS) $(GST_BASE_CFLAGS) $(GST_CFLAGS)
+ $(GST_BASE_CFLAGS) $(GST_CFLAGS)
libgstalpha_la_LIBADD = $(GST_PLUGINS_BASE_LIBS) -lgstvideo-$(GST_MAJORMINOR) \
- $(GST_CONTROLLER_LIBS) $(GST_BASE_LIBS) $(GST_LIBS) $(LIBM)
+ $(GST_BASE_LIBS) $(GST_LIBS) $(LIBM)
libgstalpha_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
libgstalpha_la_LIBTOOLFLAGS = --tag=disable-static
* Sample pipeline:
* |[
* gst-launch videotestsrc pattern=smpte75 ! alpha method=green ! \
- * videomixer name=mixer ! ffmpegcolorspace ! autovideosink \
+ * videomixer name=mixer ! videoconvert ! autovideosink \
* videotestsrc pattern=snow ! mixer.
* ]| This pipeline adds a alpha channel to the SMPTE color bars
* with green as the transparent color and mixes the output with
};
static GstStaticPadTemplate gst_alpha_src_template =
- GST_STATIC_PAD_TEMPLATE ("src",
+GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("AYUV") ";"
- GST_VIDEO_CAPS_ARGB ";" GST_VIDEO_CAPS_BGRA ";"
- GST_VIDEO_CAPS_ABGR ";" GST_VIDEO_CAPS_RGBA
- ";" GST_VIDEO_CAPS_YUV ("Y444")
- ";" GST_VIDEO_CAPS_xRGB ";" GST_VIDEO_CAPS_BGRx ";" GST_VIDEO_CAPS_xBGR
- ";" GST_VIDEO_CAPS_RGBx ";" GST_VIDEO_CAPS_RGB ";" GST_VIDEO_CAPS_BGR
- ";" GST_VIDEO_CAPS_YUV ("Y42B") ";" GST_VIDEO_CAPS_YUV ("YUY2")
- ";" GST_VIDEO_CAPS_YUV ("YVYU") ";" GST_VIDEO_CAPS_YUV ("UYVY")
- ";" GST_VIDEO_CAPS_YUV ("I420") ";" GST_VIDEO_CAPS_YUV ("YV12")
- ";" GST_VIDEO_CAPS_YUV ("Y41B"))
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ AYUV, "
+ "ARGB, BGRA, ABGR, RGBA, Y444, xRGB, BGRx, xBGR, "
+ "RGBx, RGB, BGR, Y42B, YUY2, YVYU, UYVY, I420, YV12, Y41B } "))
);
static GstStaticPadTemplate gst_alpha_sink_template =
- GST_STATIC_PAD_TEMPLATE ("sink",
+GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("AYUV")
- ";" GST_VIDEO_CAPS_ARGB ";" GST_VIDEO_CAPS_BGRA ";" GST_VIDEO_CAPS_ABGR
- ";" GST_VIDEO_CAPS_RGBA ";" GST_VIDEO_CAPS_YUV ("Y444")
- ";" GST_VIDEO_CAPS_xRGB ";" GST_VIDEO_CAPS_BGRx ";" GST_VIDEO_CAPS_xBGR
- ";" GST_VIDEO_CAPS_RGBx ";" GST_VIDEO_CAPS_RGB ";" GST_VIDEO_CAPS_BGR
- ";" GST_VIDEO_CAPS_YUV ("Y42B") ";" GST_VIDEO_CAPS_YUV ("YUY2")
- ";" GST_VIDEO_CAPS_YUV ("YVYU") ";" GST_VIDEO_CAPS_YUV ("UYVY")
- ";" GST_VIDEO_CAPS_YUV ("I420") ";" GST_VIDEO_CAPS_YUV ("YV12")
- ";" GST_VIDEO_CAPS_YUV ("Y41B")
- )
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ AYUV, "
+ "ARGB, BGRA, ABGR, RGBA, Y444, xRGB, BGRx, xBGR, "
+ "RGBx, RGB, BGR, Y42B, YUY2, YVYU, UYVY, I420, YV12, " "Y41B } "))
);
static GstStaticCaps gst_alpha_alpha_caps =
- GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("AYUV")
- ";" GST_VIDEO_CAPS_ARGB ";" GST_VIDEO_CAPS_BGRA ";" GST_VIDEO_CAPS_ABGR ";"
- GST_VIDEO_CAPS_RGBA);
+GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ AYUV, ARGB, BGRA, ABGR, RGBA }"));
/* FIXME: why do we need our own lock for this? */
-#if !GLIB_CHECK_VERSION (2, 31, 0)
-#define GST_ALPHA_LOCK(alpha) G_STMT_START { \
- GST_LOG_OBJECT (alpha, "Locking alpha from thread %p", g_thread_self ()); \
- g_static_mutex_lock (&alpha->lock); \
- GST_LOG_OBJECT (alpha, "Locked alpha from thread %p", g_thread_self ()); \
-} G_STMT_END
-
-#define GST_ALPHA_UNLOCK(alpha) G_STMT_START { \
- GST_LOG_OBJECT (alpha, "Unlocking alpha from thread %p", g_thread_self ()); \
- g_static_mutex_unlock (&alpha->lock); \
-} G_STMT_END
-#else
#define GST_ALPHA_LOCK(alpha) G_STMT_START { \
GST_LOG_OBJECT (alpha, "Locking alpha from thread %p", g_thread_self ()); \
g_mutex_lock (&alpha->lock); \
GST_LOG_OBJECT (alpha, "Unlocking alpha from thread %p", g_thread_self ()); \
g_mutex_unlock (&alpha->lock); \
} G_STMT_END
-#endif
-static gboolean gst_alpha_start (GstBaseTransform * trans);
-static gboolean gst_alpha_get_unit_size (GstBaseTransform * btrans,
- GstCaps * caps, guint * size);
static GstCaps *gst_alpha_transform_caps (GstBaseTransform * btrans,
- GstPadDirection direction, GstCaps * caps);
-static gboolean gst_alpha_set_caps (GstBaseTransform * btrans,
- GstCaps * incaps, GstCaps * outcaps);
-static GstFlowReturn gst_alpha_transform (GstBaseTransform * btrans,
- GstBuffer * in, GstBuffer * out);
+ GstPadDirection direction, GstCaps * caps, GstCaps * filter);
static void gst_alpha_before_transform (GstBaseTransform * btrans,
GstBuffer * buf);
+static gboolean gst_alpha_set_info (GstVideoFilter * filter,
+ GstCaps * incaps, GstVideoInfo * in_info, GstCaps * outcaps,
+ GstVideoInfo * out_info);
+static GstFlowReturn gst_alpha_transform_frame (GstVideoFilter * filter,
+ GstVideoFrame * in_frame, GstVideoFrame * out_frame);
+
+static void gst_alpha_init_params_full (GstAlpha * alpha,
+ const GstVideoFormatInfo * in_info, const GstVideoFormatInfo * out_info);
static void gst_alpha_init_params (GstAlpha * alpha);
static gboolean gst_alpha_set_process_function (GstAlpha * alpha);
+static gboolean gst_alpha_set_process_function_full (GstAlpha * alpha,
+ GstVideoInfo * in_info, GstVideoInfo * out_info);
static void gst_alpha_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
GValue * value, GParamSpec * pspec);
static void gst_alpha_finalize (GObject * object);
-GST_BOILERPLATE (GstAlpha, gst_alpha, GstVideoFilter, GST_TYPE_VIDEO_FILTER);
+#define gst_alpha_parent_class parent_class
+G_DEFINE_TYPE (GstAlpha, gst_alpha, GST_TYPE_VIDEO_FILTER);
#define GST_TYPE_ALPHA_METHOD (gst_alpha_method_get_type())
static GType
}
static void
-gst_alpha_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_set_details_simple (element_class, "Alpha filter",
- "Filter/Effect/Video",
- "Adds an alpha channel to video - uniform or via chroma-keying",
- "Wim Taymans <wim@fluendo.com>\n"
- "Edward Hervey <edward.hervey@collabora.co.uk>\n"
- "Jan Schmidt <thaytan@noraisin.net>");
-
- gst_element_class_add_static_pad_template (element_class,
- &gst_alpha_sink_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_alpha_src_template);
-
- GST_DEBUG_CATEGORY_INIT (gst_alpha_debug, "alpha", 0,
- "alpha - Element for adding alpha channel to streams");
-}
-
-static void
gst_alpha_class_init (GstAlphaClass * klass)
{
GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
GstBaseTransformClass *btrans_class = (GstBaseTransformClass *) klass;
+ GstVideoFilterClass *vfilter_class = (GstVideoFilterClass *) klass;
+
+ GST_DEBUG_CATEGORY_INIT (gst_alpha_debug, "alpha", 0,
+ "alpha - Element for adding alpha channel to streams");
gobject_class->set_property = gst_alpha_set_property;
gobject_class->get_property = gst_alpha_get_property;
DEFAULT_PREFER_PASSTHROUGH,
G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
- btrans_class->start = GST_DEBUG_FUNCPTR (gst_alpha_start);
- btrans_class->transform = GST_DEBUG_FUNCPTR (gst_alpha_transform);
+ gst_element_class_set_details_simple (gstelement_class, "Alpha filter",
+ "Filter/Effect/Video",
+ "Adds an alpha channel to video - uniform or via chroma-keying",
+ "Wim Taymans <wim.taymans@gmail.com>\n"
+ "Edward Hervey <edward.hervey@collabora.co.uk>\n"
+ "Jan Schmidt <thaytan@noraisin.net>");
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_alpha_sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_alpha_src_template));
+
btrans_class->before_transform =
GST_DEBUG_FUNCPTR (gst_alpha_before_transform);
- btrans_class->get_unit_size = GST_DEBUG_FUNCPTR (gst_alpha_get_unit_size);
btrans_class->transform_caps = GST_DEBUG_FUNCPTR (gst_alpha_transform_caps);
- btrans_class->set_caps = GST_DEBUG_FUNCPTR (gst_alpha_set_caps);
+
+ vfilter_class->set_info = GST_DEBUG_FUNCPTR (gst_alpha_set_info);
+ vfilter_class->transform_frame =
+ GST_DEBUG_FUNCPTR (gst_alpha_transform_frame);
}
static void
-gst_alpha_init (GstAlpha * alpha, GstAlphaClass * klass)
+gst_alpha_init (GstAlpha * alpha)
{
alpha->alpha = DEFAULT_ALPHA;
alpha->method = DEFAULT_METHOD;
alpha->black_sensitivity = DEFAULT_BLACK_SENSITIVITY;
alpha->white_sensitivity = DEFAULT_WHITE_SENSITIVITY;
-#if !GLIB_CHECK_VERSION (2, 31, 0)
- g_static_mutex_init (&alpha->lock);
-#else
g_mutex_init (&alpha->lock);
-#endif
}
static void
{
GstAlpha *alpha = GST_ALPHA (object);
-#if !GLIB_CHECK_VERSION (2, 31, 0)
- g_static_mutex_free (&alpha->lock);
-#else
g_mutex_clear (&alpha->lock);
-#endif
G_OBJECT_CLASS (parent_class)->finalize (object);
}
}
}
-static gboolean
-gst_alpha_get_unit_size (GstBaseTransform * btrans,
- GstCaps * caps, guint * size)
-{
- GstVideoFormat format;
- gint width, height;
-
- if (!gst_video_format_parse_caps (caps, &format, &width, &height))
- return FALSE;
-
- *size = gst_video_format_get_size (format, width, height);
-
- GST_DEBUG_OBJECT (btrans, "unit size = %d for format %d w %d height %d",
- *size, format, width, height);
-
- return TRUE;
-}
-
static GstCaps *
gst_alpha_transform_caps (GstBaseTransform * btrans,
- GstPadDirection direction, GstCaps * caps)
+ GstPadDirection direction, GstCaps * caps, GstCaps * filter)
{
GstAlpha *alpha = GST_ALPHA (btrans);
GstCaps *ret, *tmp, *tmp2;
structure = gst_structure_copy (gst_caps_get_structure (caps, i));
gst_structure_remove_field (structure, "format");
- gst_structure_remove_field (structure, "endianness");
- gst_structure_remove_field (structure, "depth");
- gst_structure_remove_field (structure, "bpp");
- gst_structure_remove_field (structure, "red_mask");
- gst_structure_remove_field (structure, "green_mask");
- gst_structure_remove_field (structure, "blue_mask");
- gst_structure_remove_field (structure, "alpha_mask");
gst_structure_remove_field (structure, "color-matrix");
gst_structure_remove_field (structure, "chroma-site");
- gst_structure_set_name (structure, "video/x-raw-yuv");
- gst_caps_append_structure (tmp, gst_structure_copy (structure));
- gst_structure_set_name (structure, "video/x-raw-rgb");
gst_caps_append_structure (tmp, structure);
}
GST_DEBUG_OBJECT (alpha,
"Transformed %" GST_PTR_FORMAT " -> %" GST_PTR_FORMAT, caps, ret);
+ if (filter) {
+ GstCaps *intersection;
+
+ GST_DEBUG_OBJECT (alpha, "Using filter caps %" GST_PTR_FORMAT, filter);
+ intersection =
+ gst_caps_intersect_full (filter, ret, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (ret);
+ ret = intersection;
+ GST_DEBUG_OBJECT (alpha, "Intersection %" GST_PTR_FORMAT, ret);
+ }
+
+
GST_ALPHA_UNLOCK (alpha);
return ret;
}
static gboolean
-gst_alpha_set_caps (GstBaseTransform * btrans,
- GstCaps * incaps, GstCaps * outcaps)
+gst_alpha_set_info (GstVideoFilter * filter,
+ GstCaps * incaps, GstVideoInfo * in_info, GstCaps * outcaps,
+ GstVideoInfo * out_info)
{
- GstAlpha *alpha = GST_ALPHA (btrans);
- const gchar *matrix;
+ GstAlpha *alpha = GST_ALPHA (filter);
gboolean passthrough;
GST_ALPHA_LOCK (alpha);
- if (!gst_video_format_parse_caps (incaps, &alpha->in_format,
- &alpha->width, &alpha->height) ||
- !gst_video_format_parse_caps (outcaps, &alpha->out_format,
- &alpha->width, &alpha->height)) {
- GST_WARNING_OBJECT (alpha,
- "Failed to parse caps %" GST_PTR_FORMAT " -> %" GST_PTR_FORMAT, incaps,
- outcaps);
- GST_ALPHA_UNLOCK (alpha);
- return FALSE;
- }
-
- matrix = gst_video_parse_caps_color_matrix (incaps);
- alpha->in_sdtv = matrix ? g_str_equal (matrix, "sdtv") : TRUE;
-
- matrix = gst_video_parse_caps_color_matrix (outcaps);
- alpha->out_sdtv = matrix ? g_str_equal (matrix, "sdtv") : TRUE;
+ alpha->in_sdtv = in_info->colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_BT601;
+ alpha->out_sdtv = out_info->colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_BT601;
passthrough = alpha->prefer_passthrough &&
- alpha->in_format == alpha->out_format && alpha->in_sdtv == alpha->out_sdtv
- && alpha->method == ALPHA_METHOD_SET && alpha->alpha == 1.0;
+ GST_VIDEO_INFO_FORMAT (in_info) == GST_VIDEO_INFO_FORMAT (out_info)
+ && alpha->in_sdtv == alpha->out_sdtv && alpha->method == ALPHA_METHOD_SET
+ && alpha->alpha == 1.0;
GST_DEBUG_OBJECT (alpha,
"Setting caps %" GST_PTR_FORMAT " -> %" GST_PTR_FORMAT
" (passthrough: %d)", incaps, outcaps, passthrough);
- gst_base_transform_set_passthrough (btrans, passthrough);
+ gst_base_transform_set_passthrough (GST_BASE_TRANSFORM_CAST (filter),
+ passthrough);
- if (!gst_alpha_set_process_function (alpha) && !passthrough) {
- GST_WARNING_OBJECT (alpha,
- "No processing function for this caps and no passthrough mode");
- GST_ALPHA_UNLOCK (alpha);
- return FALSE;
- }
+ if (!gst_alpha_set_process_function_full (alpha, in_info, out_info)
+ && !passthrough)
+ goto no_process;
- gst_alpha_init_params (alpha);
+ gst_alpha_init_params_full (alpha, in_info->finfo, out_info->finfo);
GST_ALPHA_UNLOCK (alpha);
return TRUE;
+
+ /* ERRORS */
+no_process:
+ {
+ GST_WARNING_OBJECT (alpha,
+ "No processing function for this caps and no passthrough mode");
+ GST_ALPHA_UNLOCK (alpha);
+ return FALSE;
+ }
}
/* based on http://www.cs.utah.edu/~michael/chroma/
#define APPLY_MATRIX(m,o,v1,v2,v3) ((m[o*4] * v1 + m[o*4+1] * v2 + m[o*4+2] * v3 + m[o*4+3]) >> 8)
static void
-gst_alpha_set_argb_ayuv (const guint8 * src, guint8 * dest, gint width,
- gint height, GstAlpha * alpha)
+gst_alpha_set_argb_ayuv (const GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame, GstAlpha * alpha)
{
gint s_alpha = CLAMP ((gint) (alpha->alpha * 256), 0, 256);
+ const guint8 *src;
+ guint8 *dest;
+ gint width, height;
gint i, j;
gint matrix[12];
gint y, u, v;
gint o[4];
- o[0] =
- gst_video_format_get_component_offset (alpha->in_format, 3, width,
- height);
- o[1] =
- gst_video_format_get_component_offset (alpha->in_format, 0, width,
- height);
- o[2] =
- gst_video_format_get_component_offset (alpha->in_format, 1, width,
- height);
- o[3] =
- gst_video_format_get_component_offset (alpha->in_format, 2, width,
- height);
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (in_frame);
+
+ o[0] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 3);
+ o[1] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 0);
+ o[2] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 1);
+ o[3] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 2);
memcpy (matrix,
alpha->out_sdtv ? cog_rgb_to_ycbcr_matrix_8bit_sdtv :
}
static void
-gst_alpha_chroma_key_argb_ayuv (const guint8 * src, guint8 * dest, gint width,
- gint height, GstAlpha * alpha)
+gst_alpha_chroma_key_argb_ayuv (const GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame, GstAlpha * alpha)
{
+ const guint8 *src;
+ guint8 *dest;
+ gint width, height;
gint i, j;
gint a, y, u, v;
gint r, g, b;
gint matrix[12];
gint o[4];
- o[0] =
- gst_video_format_get_component_offset (alpha->in_format, 3, width,
- height);
- o[1] =
- gst_video_format_get_component_offset (alpha->in_format, 0, width,
- height);
- o[2] =
- gst_video_format_get_component_offset (alpha->in_format, 1, width,
- height);
- o[3] =
- gst_video_format_get_component_offset (alpha->in_format, 2, width,
- height);
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (in_frame);
+
+ o[0] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 3);
+ o[1] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 0);
+ o[2] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 1);
+ o[3] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 2);
smin = 128 - alpha->black_sensitivity;
smax = 128 + alpha->white_sensitivity;
}
static void
-gst_alpha_set_argb_argb (const guint8 * src, guint8 * dest, gint width,
- gint height, GstAlpha * alpha)
+gst_alpha_set_argb_argb (const GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame, GstAlpha * alpha)
{
+ const guint8 *src;
+ guint8 *dest;
+ gint width, height;
gint s_alpha = CLAMP ((gint) (alpha->alpha * 256), 0, 256);
gint i, j;
gint p[4], o[4];
- p[0] =
- gst_video_format_get_component_offset (alpha->out_format, 3, width,
- height);
- p[1] =
- gst_video_format_get_component_offset (alpha->out_format, 0, width,
- height);
- p[2] =
- gst_video_format_get_component_offset (alpha->out_format, 1, width,
- height);
- p[3] =
- gst_video_format_get_component_offset (alpha->out_format, 2, width,
- height);
-
- o[0] =
- gst_video_format_get_component_offset (alpha->in_format, 3, width,
- height);
- o[1] =
- gst_video_format_get_component_offset (alpha->in_format, 0, width,
- height);
- o[2] =
- gst_video_format_get_component_offset (alpha->in_format, 1, width,
- height);
- o[3] =
- gst_video_format_get_component_offset (alpha->in_format, 2, width,
- height);
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (in_frame);
+
+ p[0] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 3);
+ p[1] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 0);
+ p[2] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 1);
+ p[3] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 2);
+
+ o[0] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 3);
+ o[1] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 0);
+ o[2] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 1);
+ o[3] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 2);
for (i = 0; i < height; i++) {
for (j = 0; j < width; j++) {
}
static void
-gst_alpha_chroma_key_argb_argb (const guint8 * src, guint8 * dest, gint width,
- gint height, GstAlpha * alpha)
+gst_alpha_chroma_key_argb_argb (const GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame, GstAlpha * alpha)
{
+ const guint8 *src;
+ guint8 *dest;
+ gint width, height;
gint i, j;
gint a, y, u, v;
gint r, g, b;
gint matrix[12], matrix2[12];
gint p[4], o[4];
- p[0] =
- gst_video_format_get_component_offset (alpha->out_format, 3, width,
- height);
- p[1] =
- gst_video_format_get_component_offset (alpha->out_format, 0, width,
- height);
- p[2] =
- gst_video_format_get_component_offset (alpha->out_format, 1, width,
- height);
- p[3] =
- gst_video_format_get_component_offset (alpha->out_format, 2, width,
- height);
-
- o[0] =
- gst_video_format_get_component_offset (alpha->in_format, 3, width,
- height);
- o[1] =
- gst_video_format_get_component_offset (alpha->in_format, 0, width,
- height);
- o[2] =
- gst_video_format_get_component_offset (alpha->in_format, 1, width,
- height);
- o[3] =
- gst_video_format_get_component_offset (alpha->in_format, 2, width,
- height);
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (in_frame);
+
+ p[0] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 3);
+ p[1] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 0);
+ p[2] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 1);
+ p[3] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 2);
+
+ o[0] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 3);
+ o[1] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 0);
+ o[2] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 1);
+ o[3] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 2);
smin = 128 - alpha->black_sensitivity;
smax = 128 + alpha->white_sensitivity;
}
static void
-gst_alpha_set_ayuv_argb (const guint8 * src, guint8 * dest, gint width,
- gint height, GstAlpha * alpha)
+gst_alpha_set_ayuv_argb (const GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame, GstAlpha * alpha)
{
+ const guint8 *src;
+ guint8 *dest;
+ gint width, height;
gint s_alpha = CLAMP ((gint) (alpha->alpha * 256), 0, 256);
gint y, x;
gint matrix[12];
gint r, g, b;
gint p[4];
- p[0] =
- gst_video_format_get_component_offset (alpha->out_format, 3, width,
- height);
- p[1] =
- gst_video_format_get_component_offset (alpha->out_format, 0, width,
- height);
- p[2] =
- gst_video_format_get_component_offset (alpha->out_format, 1, width,
- height);
- p[3] =
- gst_video_format_get_component_offset (alpha->out_format, 2, width,
- height);
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (in_frame);
+
+ p[0] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 3);
+ p[1] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 0);
+ p[2] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 1);
+ p[3] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 2);
memcpy (matrix,
alpha->in_sdtv ? cog_ycbcr_to_rgb_matrix_8bit_sdtv :
}
static void
-gst_alpha_chroma_key_ayuv_argb (const guint8 * src, guint8 * dest, gint width,
- gint height, GstAlpha * alpha)
+gst_alpha_chroma_key_ayuv_argb (const GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame, GstAlpha * alpha)
{
+ const guint8 *src;
+ guint8 *dest;
+ gint width, height;
gint i, j;
gint a, y, u, v;
gint r, g, b;
gint matrix[12];
gint p[4];
- p[0] =
- gst_video_format_get_component_offset (alpha->out_format, 3, width,
- height);
- p[1] =
- gst_video_format_get_component_offset (alpha->out_format, 0, width,
- height);
- p[2] =
- gst_video_format_get_component_offset (alpha->out_format, 1, width,
- height);
- p[3] =
- gst_video_format_get_component_offset (alpha->out_format, 2, width,
- height);
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (in_frame);
+
+ p[0] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 3);
+ p[1] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 0);
+ p[2] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 1);
+ p[3] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 2);
smin = 128 - alpha->black_sensitivity;
smax = 128 + alpha->white_sensitivity;
}
static void
-gst_alpha_set_ayuv_ayuv (const guint8 * src, guint8 * dest, gint width,
- gint height, GstAlpha * alpha)
+gst_alpha_set_ayuv_ayuv (const GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame, GstAlpha * alpha)
{
+ const guint8 *src;
+ guint8 *dest;
+ gint width, height;
gint s_alpha = CLAMP ((gint) (alpha->alpha * 256), 0, 256);
gint y, x;
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (in_frame);
+
if (alpha->in_sdtv == alpha->out_sdtv) {
for (y = 0; y < height; y++) {
for (x = 0; x < width; x++) {
}
static void
-gst_alpha_chroma_key_ayuv_ayuv (const guint8 * src, guint8 * dest,
- gint width, gint height, GstAlpha * alpha)
+gst_alpha_chroma_key_ayuv_ayuv (const GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame, GstAlpha * alpha)
{
+ const guint8 *src;
+ guint8 *dest;
+ gint width, height;
gint i, j;
gint a, y, u, v;
gint smin, smax;
guint8 kfgy_scale = alpha->kfgy_scale;
guint noise_level2 = alpha->noise_level2;
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (in_frame);
+
smin = 128 - alpha->black_sensitivity;
smax = 128 + alpha->white_sensitivity;
}
static void
-gst_alpha_set_rgb_ayuv (const guint8 * src, guint8 * dest, gint width,
- gint height, GstAlpha * alpha)
+gst_alpha_set_rgb_ayuv (const GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame, GstAlpha * alpha)
{
+ const guint8 *src;
+ guint8 *dest;
+ gint width, height;
gint s_alpha = CLAMP ((gint) (alpha->alpha * 255), 0, 255);
gint i, j;
gint matrix[12];
gint o[3];
gint bpp;
- bpp = gst_video_format_get_pixel_stride (alpha->in_format, 0);
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
- o[0] =
- gst_video_format_get_component_offset (alpha->in_format, 0, width,
- height);
- o[1] =
- gst_video_format_get_component_offset (alpha->in_format, 1, width,
- height);
- o[2] =
- gst_video_format_get_component_offset (alpha->in_format, 2, width,
- height);
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (in_frame);
+
+ bpp = GST_VIDEO_FRAME_COMP_PSTRIDE (in_frame, 0);
+ o[0] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 0);
+ o[1] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 1);
+ o[2] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 2);
memcpy (matrix,
alpha->out_sdtv ? cog_rgb_to_ycbcr_matrix_8bit_sdtv :
}
static void
-gst_alpha_chroma_key_rgb_ayuv (const guint8 * src, guint8 * dest, gint width,
- gint height, GstAlpha * alpha)
+gst_alpha_chroma_key_rgb_ayuv (const GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame, GstAlpha * alpha)
{
+ const guint8 *src;
+ guint8 *dest;
+ gint width, height;
gint i, j;
gint a, y, u, v;
gint r, g, b;
gint o[3];
gint bpp;
- bpp = gst_video_format_get_pixel_stride (alpha->in_format, 0);
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (in_frame);
- o[0] =
- gst_video_format_get_component_offset (alpha->in_format, 0, width,
- height);
- o[1] =
- gst_video_format_get_component_offset (alpha->in_format, 1, width,
- height);
- o[2] =
- gst_video_format_get_component_offset (alpha->in_format, 2, width,
- height);
+ bpp = GST_VIDEO_FRAME_COMP_PSTRIDE (in_frame, 0);
+
+ o[0] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 0);
+ o[1] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 1);
+ o[2] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 2);
smin = 128 - alpha->black_sensitivity;
smax = 128 + alpha->white_sensitivity;
}
static void
-gst_alpha_set_rgb_argb (const guint8 * src, guint8 * dest, gint width,
- gint height, GstAlpha * alpha)
+gst_alpha_set_rgb_argb (const GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame, GstAlpha * alpha)
{
+ const guint8 *src;
+ guint8 *dest;
+ gint width, height;
gint s_alpha = CLAMP ((gint) (alpha->alpha * 255), 0, 255);
gint i, j;
gint p[4], o[3];
gint bpp;
- bpp = gst_video_format_get_pixel_stride (alpha->in_format, 0);
-
- o[0] =
- gst_video_format_get_component_offset (alpha->in_format, 0, width,
- height);
- o[1] =
- gst_video_format_get_component_offset (alpha->in_format, 1, width,
- height);
- o[2] =
- gst_video_format_get_component_offset (alpha->in_format, 2, width,
- height);
-
- p[0] =
- gst_video_format_get_component_offset (alpha->out_format, 3, width,
- height);
- p[1] =
- gst_video_format_get_component_offset (alpha->out_format, 0, width,
- height);
- p[2] =
- gst_video_format_get_component_offset (alpha->out_format, 1, width,
- height);
- p[3] =
- gst_video_format_get_component_offset (alpha->out_format, 2, width,
- height);
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (in_frame);
+
+ bpp = GST_VIDEO_FRAME_COMP_PSTRIDE (in_frame, 0);
+
+ o[0] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 0);
+ o[1] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 1);
+ o[2] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 2);
+
+ p[0] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 3);
+ p[1] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 0);
+ p[2] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 1);
+ p[3] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 2);
for (i = 0; i < height; i++) {
for (j = 0; j < width; j++) {
}
static void
-gst_alpha_chroma_key_rgb_argb (const guint8 * src, guint8 * dest, gint width,
- gint height, GstAlpha * alpha)
+gst_alpha_chroma_key_rgb_argb (const GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame, GstAlpha * alpha)
{
+ const guint8 *src;
+ guint8 *dest;
+ gint width, height;
gint i, j;
gint a, y, u, v;
gint r, g, b;
gint p[4], o[3];
gint bpp;
- bpp = gst_video_format_get_pixel_stride (alpha->in_format, 0);
-
- o[0] =
- gst_video_format_get_component_offset (alpha->in_format, 0, width,
- height);
- o[1] =
- gst_video_format_get_component_offset (alpha->in_format, 1, width,
- height);
- o[2] =
- gst_video_format_get_component_offset (alpha->in_format, 2, width,
- height);
-
- p[0] =
- gst_video_format_get_component_offset (alpha->out_format, 3, width,
- height);
- p[1] =
- gst_video_format_get_component_offset (alpha->out_format, 0, width,
- height);
- p[2] =
- gst_video_format_get_component_offset (alpha->out_format, 1, width,
- height);
- p[3] =
- gst_video_format_get_component_offset (alpha->out_format, 2, width,
- height);
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (in_frame);
+
+ bpp = GST_VIDEO_FRAME_COMP_PSTRIDE (in_frame, 0);
+
+ o[0] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 0);
+ o[1] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 1);
+ o[2] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 2);
+
+ p[0] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 3);
+ p[1] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 0);
+ p[2] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 1);
+ p[3] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 2);
smin = 128 - alpha->black_sensitivity;
smax = 128 + alpha->white_sensitivity;
}
static void
-gst_alpha_set_planar_yuv_ayuv (const guint8 * src, guint8 * dest, gint width,
- gint height, GstAlpha * alpha)
+gst_alpha_set_planar_yuv_ayuv (const GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame, GstAlpha * alpha)
{
+ const guint8 *src;
+ guint8 *dest;
+ gint width, height;
gint b_alpha = CLAMP ((gint) (alpha->alpha * 255), 0, 255);
const guint8 *srcY, *srcY_tmp;
const guint8 *srcU, *srcU_tmp;
gint y_stride, uv_stride;
gint v_subs, h_subs;
- y_stride = gst_video_format_get_row_stride (alpha->in_format, 0, width);
- uv_stride = gst_video_format_get_row_stride (alpha->in_format, 1, width);
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (in_frame);
+
+ y_stride = GST_VIDEO_FRAME_COMP_STRIDE (in_frame, 0);
+ uv_stride = GST_VIDEO_FRAME_COMP_STRIDE (in_frame, 1);
srcY_tmp = srcY = src;
- srcU_tmp = srcU =
- src + gst_video_format_get_component_offset (alpha->in_format, 1, width,
- height);
- srcV_tmp = srcV =
- src + gst_video_format_get_component_offset (alpha->in_format, 2, width,
- height);
-
- switch (alpha->in_format) {
+ srcU_tmp = srcU = src + GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 1);
+ srcV_tmp = srcV = src + GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 2);
+
+ switch (GST_VIDEO_FRAME_FORMAT (in_frame)) {
case GST_VIDEO_FORMAT_I420:
case GST_VIDEO_FORMAT_YV12:
v_subs = h_subs = 2;
}
static void
-gst_alpha_chroma_key_planar_yuv_ayuv (const guint8 * src, guint8 * dest,
- gint width, gint height, GstAlpha * alpha)
+gst_alpha_chroma_key_planar_yuv_ayuv (const GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame, GstAlpha * alpha)
{
+ const guint8 *src;
+ guint8 *dest;
+ gint width, height;
gint b_alpha = CLAMP ((gint) (alpha->alpha * 255), 0, 255);
const guint8 *srcY, *srcY_tmp;
const guint8 *srcU, *srcU_tmp;
guint8 kfgy_scale = alpha->kfgy_scale;
guint noise_level2 = alpha->noise_level2;
- y_stride = gst_video_format_get_row_stride (alpha->in_format, 0, width);
- uv_stride = gst_video_format_get_row_stride (alpha->in_format, 1, width);
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (in_frame);
+
+ y_stride = GST_VIDEO_FRAME_COMP_STRIDE (in_frame, 0);
+ uv_stride = GST_VIDEO_FRAME_COMP_STRIDE (in_frame, 1);
srcY_tmp = srcY = src;
- srcU_tmp = srcU =
- src + gst_video_format_get_component_offset (alpha->in_format, 1, width,
- height);
- srcV_tmp = srcV =
- src + gst_video_format_get_component_offset (alpha->in_format, 2, width,
- height);
-
- switch (alpha->in_format) {
+ srcU_tmp = srcU = src + GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 1);
+ srcV_tmp = srcV = src + GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 2);
+
+ switch (GST_VIDEO_FRAME_FORMAT (in_frame)) {
case GST_VIDEO_FORMAT_I420:
case GST_VIDEO_FORMAT_YV12:
v_subs = h_subs = 2;
}
static void
-gst_alpha_set_planar_yuv_argb (const guint8 * src, guint8 * dest, gint width,
- gint height, GstAlpha * alpha)
+gst_alpha_set_planar_yuv_argb (const GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame, GstAlpha * alpha)
{
+ const guint8 *src;
+ guint8 *dest;
+ gint width, height;
gint b_alpha = CLAMP ((gint) (alpha->alpha * 255), 0, 255);
const guint8 *srcY, *srcY_tmp;
const guint8 *srcU, *srcU_tmp;
gint r, g, b;
gint p[4];
- p[0] =
- gst_video_format_get_component_offset (alpha->out_format, 3, width,
- height);
- p[1] =
- gst_video_format_get_component_offset (alpha->out_format, 0, width,
- height);
- p[2] =
- gst_video_format_get_component_offset (alpha->out_format, 1, width,
- height);
- p[3] =
- gst_video_format_get_component_offset (alpha->out_format, 2, width,
- height);
-
- y_stride = gst_video_format_get_row_stride (alpha->in_format, 0, width);
- uv_stride = gst_video_format_get_row_stride (alpha->in_format, 1, width);
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (in_frame);
+
+ p[0] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 3);
+ p[1] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 0);
+ p[2] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 1);
+ p[3] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 2);
+
+ y_stride = GST_VIDEO_FRAME_COMP_STRIDE (in_frame, 0);
+ uv_stride = GST_VIDEO_FRAME_COMP_STRIDE (in_frame, 1);
srcY_tmp = srcY = src;
- srcU_tmp = srcU =
- src + gst_video_format_get_component_offset (alpha->in_format, 1, width,
- height);
- srcV_tmp = srcV =
- src + gst_video_format_get_component_offset (alpha->in_format, 2, width,
- height);
-
- switch (alpha->in_format) {
+ srcU_tmp = srcU = src + GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 1);
+ srcV_tmp = srcV = src + GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 2);
+
+ switch (GST_VIDEO_FRAME_FORMAT (in_frame)) {
case GST_VIDEO_FORMAT_I420:
case GST_VIDEO_FORMAT_YV12:
v_subs = h_subs = 2;
}
static void
-gst_alpha_chroma_key_planar_yuv_argb (const guint8 * src, guint8 * dest,
- gint width, gint height, GstAlpha * alpha)
+gst_alpha_chroma_key_planar_yuv_argb (const GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame, GstAlpha * alpha)
{
+ const guint8 *src;
+ guint8 *dest;
+ gint width, height;
gint b_alpha = CLAMP ((gint) (alpha->alpha * 255), 0, 255);
const guint8 *srcY, *srcY_tmp;
const guint8 *srcU, *srcU_tmp;
gint matrix[12];
gint p[4];
- p[0] =
- gst_video_format_get_component_offset (alpha->out_format, 3, width,
- height);
- p[1] =
- gst_video_format_get_component_offset (alpha->out_format, 0, width,
- height);
- p[2] =
- gst_video_format_get_component_offset (alpha->out_format, 1, width,
- height);
- p[3] =
- gst_video_format_get_component_offset (alpha->out_format, 2, width,
- height);
-
- y_stride = gst_video_format_get_row_stride (alpha->in_format, 0, width);
- uv_stride = gst_video_format_get_row_stride (alpha->in_format, 1, width);
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (in_frame);
+
+ p[0] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 3);
+ p[1] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 0);
+ p[2] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 1);
+ p[3] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 2);
+
+ y_stride = GST_VIDEO_FRAME_COMP_STRIDE (in_frame, 0);
+ uv_stride = GST_VIDEO_FRAME_COMP_STRIDE (in_frame, 1);
srcY_tmp = srcY = src;
- srcU_tmp = srcU =
- src + gst_video_format_get_component_offset (alpha->in_format, 1, width,
- height);
- srcV_tmp = srcV =
- src + gst_video_format_get_component_offset (alpha->in_format, 2, width,
- height);
-
- switch (alpha->in_format) {
+ srcU_tmp = srcU = src + GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 1);
+ srcV_tmp = srcV = src + GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 2);
+
+ switch (GST_VIDEO_FRAME_FORMAT (in_frame)) {
case GST_VIDEO_FORMAT_I420:
case GST_VIDEO_FORMAT_YV12:
v_subs = h_subs = 2;
}
static void
-gst_alpha_set_packed_422_ayuv (const guint8 * src, guint8 * dest, gint width,
- gint height, GstAlpha * alpha)
+gst_alpha_set_packed_422_ayuv (const GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame, GstAlpha * alpha)
{
+ const guint8 *src;
+ guint8 *dest;
+ gint width, height;
gint s_alpha = CLAMP ((gint) (alpha->alpha * 255), 0, 255);
gint i, j;
gint y, u, v;
gint src_stride;
const guint8 *src_tmp;
- src_stride = gst_video_format_get_row_stride (alpha->in_format, 0, width);
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (in_frame);
- p[0] =
- gst_video_format_get_component_offset (alpha->in_format, 0, width,
- height);
+ src_stride = GST_VIDEO_FRAME_COMP_STRIDE (in_frame, 0);
+
+ p[0] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 0);
p[2] = p[0] + 2;
- p[1] =
- gst_video_format_get_component_offset (alpha->in_format, 1, width,
- height);
- p[3] =
- gst_video_format_get_component_offset (alpha->in_format, 2, width,
- height);
+ p[1] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 1);
+ p[3] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 2);
if (alpha->in_sdtv != alpha->out_sdtv) {
gint matrix[12];
}
static void
-gst_alpha_chroma_key_packed_422_ayuv (const guint8 * src, guint8 * dest,
- gint width, gint height, GstAlpha * alpha)
+gst_alpha_chroma_key_packed_422_ayuv (const GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame, GstAlpha * alpha)
{
+ const guint8 *src;
+ guint8 *dest;
+ gint width, height;
gint i, j;
gint a, y, u, v;
gint smin, smax;
gint src_stride;
const guint8 *src_tmp;
- src_stride = gst_video_format_get_row_stride (alpha->in_format, 0, width);
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (in_frame);
- p[0] =
- gst_video_format_get_component_offset (alpha->in_format, 0, width,
- height);
+ src_stride = GST_VIDEO_FRAME_COMP_STRIDE (in_frame, 0);
+
+ p[0] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 0);
p[2] = p[0] + 2;
- p[1] =
- gst_video_format_get_component_offset (alpha->in_format, 1, width,
- height);
- p[3] =
- gst_video_format_get_component_offset (alpha->in_format, 2, width,
- height);
+ p[1] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 1);
+ p[3] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 2);
smin = 128 - alpha->black_sensitivity;
smax = 128 + alpha->white_sensitivity;
}
static void
-gst_alpha_set_packed_422_argb (const guint8 * src, guint8 * dest, gint width,
- gint height, GstAlpha * alpha)
+gst_alpha_set_packed_422_argb (const GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame, GstAlpha * alpha)
{
+ const guint8 *src;
+ guint8 *dest;
+ gint width, height;
gint s_alpha = CLAMP ((gint) (alpha->alpha * 255), 0, 255);
gint i, j;
gint p[4], o[4];
gint matrix[12];
gint r, g, b;
- src_stride = gst_video_format_get_row_stride (alpha->in_format, 0, width);
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (in_frame);
- o[0] =
- gst_video_format_get_component_offset (alpha->in_format, 0, width,
- height);
+ src_stride = GST_VIDEO_FRAME_COMP_STRIDE (in_frame, 0);
+
+ o[0] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 0);
o[2] = o[0] + 2;
- o[1] =
- gst_video_format_get_component_offset (alpha->in_format, 1, width,
- height);
- o[3] =
- gst_video_format_get_component_offset (alpha->in_format, 2, width,
- height);
-
- p[0] =
- gst_video_format_get_component_offset (alpha->out_format, 3, width,
- height);
- p[1] =
- gst_video_format_get_component_offset (alpha->out_format, 0, width,
- height);
- p[2] =
- gst_video_format_get_component_offset (alpha->out_format, 1, width,
- height);
- p[3] =
- gst_video_format_get_component_offset (alpha->out_format, 2, width,
- height);
+ o[1] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 1);
+ o[3] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 2);
+
+ p[0] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 3);
+ p[1] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 0);
+ p[2] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 1);
+ p[3] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 2);
memcpy (matrix,
alpha->in_sdtv ? cog_ycbcr_to_rgb_matrix_8bit_sdtv :
}
static void
-gst_alpha_chroma_key_packed_422_argb (const guint8 * src, guint8 * dest,
- gint width, gint height, GstAlpha * alpha)
+gst_alpha_chroma_key_packed_422_argb (const GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame, GstAlpha * alpha)
{
+ const guint8 *src;
+ guint8 *dest;
+ gint width, height;
gint i, j;
gint a, y, u, v;
gint r, g, b;
const guint8 *src_tmp;
gint matrix[12];
- src_stride = gst_video_format_get_row_stride (alpha->in_format, 0, width);
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (in_frame);
+
+ src_stride = GST_VIDEO_FRAME_COMP_STRIDE (in_frame, 0);
- o[0] =
- gst_video_format_get_component_offset (alpha->in_format, 0, width,
- height);
+ o[0] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 0);
o[2] = o[0] + 2;
- o[1] =
- gst_video_format_get_component_offset (alpha->in_format, 1, width,
- height);
- o[3] =
- gst_video_format_get_component_offset (alpha->in_format, 2, width,
- height);
-
- p[0] =
- gst_video_format_get_component_offset (alpha->out_format, 3, width,
- height);
- p[1] =
- gst_video_format_get_component_offset (alpha->out_format, 0, width,
- height);
- p[2] =
- gst_video_format_get_component_offset (alpha->out_format, 1, width,
- height);
- p[3] =
- gst_video_format_get_component_offset (alpha->out_format, 2, width,
- height);
+ o[1] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 1);
+ o[3] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 2);
+
+ p[0] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 3);
+ p[1] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 0);
+ p[2] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 1);
+ p[3] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 2);
memcpy (matrix,
alpha->in_sdtv ? cog_ycbcr_to_rgb_matrix_8bit_sdtv :
/* Protected with the alpha lock */
static void
-gst_alpha_init_params (GstAlpha * alpha)
+gst_alpha_init_params_full (GstAlpha * alpha,
+ const GstVideoFormatInfo * in_info, const GstVideoFormatInfo * out_info)
{
gfloat kgl;
gfloat tmp;
* RGB->YUV: convert to YUV, chroma keying
* YUV->YUV: convert matrix, chroma keying
*/
- if (gst_video_format_is_rgb (alpha->in_format)
- && gst_video_format_is_rgb (alpha->out_format))
+ if (GST_VIDEO_FORMAT_INFO_IS_RGB (in_info)
+ && GST_VIDEO_FORMAT_INFO_IS_RGB (out_info))
matrix = cog_rgb_to_ycbcr_matrix_8bit_sdtv;
- else if (gst_video_format_is_yuv (alpha->in_format)
- && gst_video_format_is_rgb (alpha->out_format))
+ else if (GST_VIDEO_FORMAT_INFO_IS_YUV (in_info)
+ && GST_VIDEO_FORMAT_INFO_IS_RGB (out_info))
matrix =
(alpha->in_sdtv) ? cog_rgb_to_ycbcr_matrix_8bit_sdtv :
cog_rgb_to_ycbcr_matrix_8bit_hdtv;
- else if (gst_video_format_is_rgb (alpha->in_format)
- && gst_video_format_is_yuv (alpha->out_format))
+ else if (GST_VIDEO_FORMAT_INFO_IS_RGB (in_info)
+ && GST_VIDEO_FORMAT_INFO_IS_YUV (out_info))
matrix =
(alpha->out_sdtv) ? cog_rgb_to_ycbcr_matrix_8bit_sdtv :
cog_rgb_to_ycbcr_matrix_8bit_hdtv;
alpha->noise_level2 = alpha->noise_level * alpha->noise_level;
}
+static void
+gst_alpha_init_params (GstAlpha * alpha)
+{
+ gst_alpha_init_params_full (alpha, GST_VIDEO_FILTER (alpha)->in_info.finfo,
+ GST_VIDEO_FILTER (alpha)->out_info.finfo);
+}
+
/* Protected with the alpha lock */
static gboolean
-gst_alpha_set_process_function (GstAlpha * alpha)
+gst_alpha_set_process_function_full (GstAlpha * alpha, GstVideoInfo * in_info,
+ GstVideoInfo * out_info)
{
alpha->process = NULL;
switch (alpha->method) {
case ALPHA_METHOD_SET:
- switch (alpha->out_format) {
+ switch (GST_VIDEO_INFO_FORMAT (out_info)) {
case GST_VIDEO_FORMAT_AYUV:
- switch (alpha->in_format) {
+ switch (GST_VIDEO_INFO_FORMAT (in_info)) {
case GST_VIDEO_FORMAT_AYUV:
alpha->process = gst_alpha_set_ayuv_ayuv;
break;
case GST_VIDEO_FORMAT_ABGR:
case GST_VIDEO_FORMAT_RGBA:
case GST_VIDEO_FORMAT_BGRA:
- switch (alpha->in_format) {
+ switch (GST_VIDEO_INFO_FORMAT (in_info)) {
case GST_VIDEO_FORMAT_AYUV:
alpha->process = gst_alpha_set_ayuv_argb;
break;
case ALPHA_METHOD_GREEN:
case ALPHA_METHOD_BLUE:
case ALPHA_METHOD_CUSTOM:
- switch (alpha->out_format) {
+ switch (GST_VIDEO_INFO_FORMAT (out_info)) {
case GST_VIDEO_FORMAT_AYUV:
- switch (alpha->in_format) {
+ switch (GST_VIDEO_INFO_FORMAT (in_info)) {
case GST_VIDEO_FORMAT_AYUV:
alpha->process = gst_alpha_chroma_key_ayuv_ayuv;
break;
case GST_VIDEO_FORMAT_ABGR:
case GST_VIDEO_FORMAT_RGBA:
case GST_VIDEO_FORMAT_BGRA:
- switch (alpha->in_format) {
+ switch (GST_VIDEO_INFO_FORMAT (in_info)) {
case GST_VIDEO_FORMAT_AYUV:
alpha->process = gst_alpha_chroma_key_ayuv_argb;
break;
}
static gboolean
-gst_alpha_start (GstBaseTransform * btrans)
+gst_alpha_set_process_function (GstAlpha * alpha)
{
- GstAlpha *alpha = GST_ALPHA (btrans);
-
- GST_ALPHA_LOCK (alpha);
- gst_alpha_init_params (alpha);
- GST_ALPHA_UNLOCK (alpha);
-
- return TRUE;
+ return gst_alpha_set_process_function_full (alpha,
+ &GST_VIDEO_FILTER_CAST (alpha)->in_info,
+ &GST_VIDEO_FILTER_CAST (alpha)->out_info);
}
static void
GST_BUFFER_TIMESTAMP (buf));
GST_LOG ("Got stream time of %" GST_TIME_FORMAT, GST_TIME_ARGS (timestamp));
if (GST_CLOCK_TIME_IS_VALID (timestamp))
- gst_object_sync_values (G_OBJECT (alpha), timestamp);
+ gst_object_sync_values (GST_OBJECT (alpha), timestamp);
}
static GstFlowReturn
-gst_alpha_transform (GstBaseTransform * btrans, GstBuffer * in, GstBuffer * out)
+gst_alpha_transform_frame (GstVideoFilter * filter, GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame)
{
- GstAlpha *alpha = GST_ALPHA (btrans);
- gint width, height;
+ GstAlpha *alpha = GST_ALPHA (filter);
GST_ALPHA_LOCK (alpha);
- if (G_UNLIKELY (!alpha->process)) {
- GST_ERROR_OBJECT (alpha, "Not negotiated yet");
- GST_ALPHA_UNLOCK (alpha);
- return GST_FLOW_NOT_NEGOTIATED;
- }
+ if (G_UNLIKELY (!alpha->process))
+ goto not_negotiated;
- width = alpha->width;
- height = alpha->height;
-
- alpha->process (GST_BUFFER_DATA (in),
- GST_BUFFER_DATA (out), width, height, alpha);
+ alpha->process (in_frame, out_frame, alpha);
GST_ALPHA_UNLOCK (alpha);
return GST_FLOW_OK;
+
+ /* ERRORS */
+not_negotiated:
+ {
+ GST_ERROR_OBJECT (alpha, "Not negotiated yet");
+ GST_ALPHA_UNLOCK (alpha);
+ return GST_FLOW_NOT_NEGOTIATED;
+ }
}
static gboolean
plugin_init (GstPlugin * plugin)
{
- gst_controller_init (NULL, NULL);
-
return gst_element_register (plugin, "alpha", GST_RANK_NONE, GST_TYPE_ALPHA);
}
#include <gst/gst.h>
#include <gst/video/video.h>
#include <gst/video/gstvideofilter.h>
-#include <gst/controller/gstcontroller.h>
G_BEGIN_DECLS
/* <private> */
/* caps */
-#if !GLIB_CHECK_VERSION (2, 31, 0)
- GStaticMutex lock;
-#else
GMutex lock;
-#endif
- GstVideoFormat in_format, out_format;
- gint width, height;
gboolean in_sdtv, out_sdtv;
/* properties */
gboolean prefer_passthrough;
/* processing function */
- void (*process) (const guint8 *src, guint8 *dest, gint width, gint height, GstAlpha *alpha);
+ void (*process) (const GstVideoFrame *in_frame, GstVideoFrame *out_frame, GstAlpha *alpha);
/* precalculated values for chroma keying */
gint8 cb, cr;
*
* Sample pipeline:
* |[
- * gst-launch videotestsrc ! "video/x-raw-yuv,format=(fourcc)AYUV" ! \
- * alphacolor ! "video/x-raw-rgb" ! ffmpegcolorspace ! autovideosink
+ * gst-launch videotestsrc ! "video/x-raw,format=(fourcc)AYUV" ! \
+ * alphacolor ! videoconvert ! autovideosink
* ]|
*/
static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS (GST_VIDEO_CAPS_RGBA ";" GST_VIDEO_CAPS_BGRA ";"
- GST_VIDEO_CAPS_ARGB ";" GST_VIDEO_CAPS_ABGR ";"
- GST_VIDEO_CAPS_YUV ("AYUV"))
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ RGBA, BGRA, ARGB, ABGR, AYUV }"))
);
static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS (GST_VIDEO_CAPS_RGBA ";" GST_VIDEO_CAPS_BGRA ";"
- GST_VIDEO_CAPS_ARGB ";" GST_VIDEO_CAPS_ABGR ";"
- GST_VIDEO_CAPS_YUV ("AYUV"))
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ RGBA, BGRA, ARGB, ABGR, AYUV }"))
);
-GST_BOILERPLATE (GstAlphaColor, gst_alpha_color, GstVideoFilter,
- GST_TYPE_VIDEO_FILTER);
+G_DEFINE_TYPE (GstAlphaColor, gst_alpha_color, GST_TYPE_VIDEO_FILTER);
static GstCaps *gst_alpha_color_transform_caps (GstBaseTransform * btrans,
- GstPadDirection direction, GstCaps * caps);
-static gboolean gst_alpha_color_set_caps (GstBaseTransform * btrans,
- GstCaps * incaps, GstCaps * outcaps);
-static GstFlowReturn gst_alpha_color_transform_ip (GstBaseTransform * btrans,
- GstBuffer * inbuf);
+ GstPadDirection direction, GstCaps * caps, GstCaps * filter);
-static void
-gst_alpha_color_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_set_details_simple (element_class, "Alpha color filter",
- "Filter/Converter/Video",
- "ARGB from/to AYUV colorspace conversion preserving the alpha channel",
- "Wim Taymans <wim@fluendo.com>");
-
- gst_element_class_add_static_pad_template (element_class,
- &sink_template);
- gst_element_class_add_static_pad_template (element_class, &src_template);
-}
+static gboolean gst_alpha_color_set_info (GstVideoFilter * filter,
+ GstCaps * incaps, GstVideoInfo * in_info, GstCaps * outcaps,
+ GstVideoInfo * out_info);
+static GstFlowReturn gst_alpha_color_transform_frame_ip (GstVideoFilter *
+ filter, GstVideoFrame * frame);
static void
gst_alpha_color_class_init (GstAlphaColorClass * klass)
{
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
GstBaseTransformClass *gstbasetransform_class =
(GstBaseTransformClass *) klass;
+ GstVideoFilterClass *gstvideofilter_class = (GstVideoFilterClass *) klass;
+
+ GST_DEBUG_CATEGORY_INIT (alpha_color_debug, "alphacolor", 0,
+ "ARGB<->AYUV colorspace conversion preserving the alpha channels");
+
+ gst_element_class_set_details_simple (gstelement_class, "Alpha color filter",
+ "Filter/Converter/Video",
+ "ARGB from/to AYUV colorspace conversion preserving the alpha channel",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&src_template));
gstbasetransform_class->transform_caps =
GST_DEBUG_FUNCPTR (gst_alpha_color_transform_caps);
- gstbasetransform_class->set_caps =
- GST_DEBUG_FUNCPTR (gst_alpha_color_set_caps);
- gstbasetransform_class->transform_ip =
- GST_DEBUG_FUNCPTR (gst_alpha_color_transform_ip);
- GST_DEBUG_CATEGORY_INIT (alpha_color_debug, "alphacolor", 0,
- "ARGB<->AYUV colorspace conversion preserving the alpha channels");
+ gstvideofilter_class->set_info = GST_DEBUG_FUNCPTR (gst_alpha_color_set_info);
+ gstvideofilter_class->transform_frame_ip =
+ GST_DEBUG_FUNCPTR (gst_alpha_color_transform_frame_ip);
}
static void
-gst_alpha_color_init (GstAlphaColor * alpha, GstAlphaColorClass * g_class)
+gst_alpha_color_init (GstAlphaColor * alpha)
{
GstBaseTransform *btrans = GST_BASE_TRANSFORM (alpha);
static GstCaps *
gst_alpha_color_transform_caps (GstBaseTransform * btrans,
- GstPadDirection direction, GstCaps * caps)
+ GstPadDirection direction, GstCaps * caps, GstCaps * filter)
{
const GstCaps *tmpl_caps = NULL;
GstCaps *result = NULL, *local_caps = NULL;
/* Remove any specific parameter from the structure */
gst_structure_remove_field (structure, "format");
- gst_structure_remove_field (structure, "endianness");
- gst_structure_remove_field (structure, "depth");
- gst_structure_remove_field (structure, "bpp");
- gst_structure_remove_field (structure, "red_mask");
- gst_structure_remove_field (structure, "green_mask");
- gst_structure_remove_field (structure, "blue_mask");
- gst_structure_remove_field (structure, "alpha_mask");
gst_structure_remove_field (structure, "color-matrix");
gst_structure_remove_field (structure, "chroma-site");
- gst_structure_set_name (structure, "video/x-raw-rgb");
+ gst_structure_set_name (structure, "video/x-raw");
gst_caps_append_structure (local_caps, gst_structure_copy (structure));
- gst_structure_set_name (structure, "video/x-raw-yuv");
- gst_caps_append_structure (local_caps, structure);
}
/* Get the appropriate template */
GST_LOG_OBJECT (btrans, "transformed %" GST_PTR_FORMAT " to %" GST_PTR_FORMAT,
caps, result);
+ if (filter) {
+ GstCaps *intersection;
+
+ GST_DEBUG_OBJECT (btrans, "Using filter caps %" GST_PTR_FORMAT, filter);
+ intersection =
+ gst_caps_intersect_full (filter, result, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (result);
+ result = intersection;
+ GST_DEBUG_OBJECT (btrans, "Intersection %" GST_PTR_FORMAT, result);
+ }
+
+
return result;
}
#define DEFINE_ARGB_AYUV_FUNCTIONS(name, A, R, G, B) \
static void \
-transform_##name##_ayuv (guint8 * data, gint size, const gint *matrix) \
+transform_##name##_ayuv (GstVideoFrame * frame, const gint *matrix) \
{ \
+ guint8 *data; \
+ gsize size; \
gint y, u, v; \
gint yc[4]; \
gint uc[4]; \
gint vc[4]; \
\
+ data = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);\
+ size = GST_VIDEO_FRAME_SIZE (frame);\
+ \
memcpy (yc, matrix, 4 * sizeof (gint)); \
memcpy (uc, matrix + 4, 4 * sizeof (gint)); \
memcpy (vc, matrix + 8, 4 * sizeof (gint)); \
} \
\
static void \
-transform_ayuv_##name (guint8 * data, gint size, const gint *matrix) \
+transform_ayuv_##name (GstVideoFrame * frame, const gint *matrix) \
{ \
+ guint8 *data; \
+ gsize size; \
gint r, g, b; \
gint rc[4]; \
gint gc[4]; \
gint bc[4]; \
\
+ data = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);\
+ size = GST_VIDEO_FRAME_SIZE (frame);\
+ \
memcpy (rc, matrix, 4 * sizeof (gint)); \
memcpy (gc, matrix + 4, 4 * sizeof (gint)); \
memcpy (bc, matrix + 8, 4 * sizeof (gint)); \
DEFINE_ARGB_AYUV_FUNCTIONS (abgr, 0, 3, 2, 1);
static void
-transform_ayuv_ayuv (guint8 * data, gint size, const gint * matrix)
+transform_ayuv_ayuv (GstVideoFrame * frame, const gint * matrix)
{
+ guint8 *data;
+ gsize size;
gint y, u, v;
gint yc[4];
gint uc[4];
if (matrix == NULL)
return;
+ data = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
+ size = GST_VIDEO_FRAME_SIZE (frame);
+
memcpy (yc, matrix, 4 * sizeof (gint));
memcpy (uc, matrix + 4, 4 * sizeof (gint));
memcpy (vc, matrix + 8, 4 * sizeof (gint));
}
static void
-transform_argb_bgra (guint8 * data, gint size, const gint * matrix)
+transform_argb_bgra (GstVideoFrame * frame, const gint * matrix)
{
+ guint8 *data;
+ gsize size;
gint r, g, b;
+ data = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
+ size = GST_VIDEO_FRAME_SIZE (frame);
+
while (size > 0) {
r = data[1];
g = data[2];
#define transform_abgr_rgba transform_argb_bgra
static void
-transform_argb_abgr (guint8 * data, gint size, const gint * matrix)
+transform_argb_abgr (GstVideoFrame * frame, const gint * matrix)
{
+ guint8 *data;
+ gsize size;
gint r, g, b;
+ data = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
+ size = GST_VIDEO_FRAME_SIZE (frame);
+
while (size > 0) {
r = data[1];
g = data[2];
#define transform_abgr_argb transform_argb_abgr
static void
-transform_rgba_bgra (guint8 * data, gint size, const gint * matrix)
+transform_rgba_bgra (GstVideoFrame * frame, const gint * matrix)
{
+ guint8 *data;
+ gsize size;
gint r, g, b;
+ data = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
+ size = GST_VIDEO_FRAME_SIZE (frame);
+
while (size > 0) {
r = data[0];
g = data[1];
#define transform_bgra_rgba transform_rgba_bgra
static void
-transform_argb_rgba (guint8 * data, gint size, const gint * matrix)
+transform_argb_rgba (GstVideoFrame * frame, const gint * matrix)
{
+ guint8 *data;
+ gsize size;
gint r, g, b;
+ data = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
+ size = GST_VIDEO_FRAME_SIZE (frame);
+
while (size > 0) {
r = data[1];
g = data[2];
#define transform_abgr_bgra transform_argb_rgba
static void
-transform_bgra_argb (guint8 * data, gint size, const gint * matrix)
+transform_bgra_argb (GstVideoFrame * frame, const gint * matrix)
{
+ guint8 *data;
+ gsize size;
gint r, g, b;
+ data = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
+ size = GST_VIDEO_FRAME_SIZE (frame);
+
while (size > 0) {
r = data[2];
g = data[1];
#define transform_rgba_abgr transform_bgra_argb
static void
-transform_rgba_argb (guint8 * data, gint size, const gint * matrix)
+transform_rgba_argb (GstVideoFrame * frame, const gint * matrix)
{
+ guint8 *data;
+ gsize size;
gint r, g, b;
+ data = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
+ size = GST_VIDEO_FRAME_SIZE (frame);
+
while (size > 0) {
r = data[0];
g = data[1];
#define transform_bgra_abgr transform_rgba_argb
static gboolean
-gst_alpha_color_set_caps (GstBaseTransform * btrans, GstCaps * incaps,
- GstCaps * outcaps)
+gst_alpha_color_set_info (GstVideoFilter * filter, GstCaps * incaps,
+ GstVideoInfo * in_info, GstCaps * outcaps, GstVideoInfo * out_info)
{
- GstAlphaColor *alpha = GST_ALPHA_COLOR (btrans);
- gboolean ret;
- gint w, h;
- gint w2, h2;
- GstVideoFormat in_format, out_format;
- const gchar *matrix;
+ GstAlphaColor *alpha = GST_ALPHA_COLOR (filter);
gboolean in_sdtv, out_sdtv;
alpha->process = NULL;
alpha->matrix = NULL;
- ret = gst_video_format_parse_caps (incaps, &in_format, &w, &h);
- ret &= gst_video_format_parse_caps (outcaps, &out_format, &w2, &h2);
-
- if (!ret || w != w2 || h != h2) {
- GST_DEBUG_OBJECT (alpha, "incomplete or invalid caps!");
- return FALSE;
- }
-
- matrix = gst_video_parse_caps_color_matrix (incaps);
- in_sdtv = matrix ? g_str_equal (matrix, "sdtv") : TRUE;
- matrix = gst_video_parse_caps_color_matrix (outcaps);
- out_sdtv = matrix ? g_str_equal (matrix, "sdtv") : TRUE;
+ if (GST_VIDEO_INFO_WIDTH (in_info) != GST_VIDEO_INFO_WIDTH (out_info) ||
+ GST_VIDEO_INFO_HEIGHT (in_info) != GST_VIDEO_INFO_HEIGHT (out_info))
+ goto invalid_caps;
- alpha->in_format = in_format;
- alpha->out_format = out_format;
- alpha->width = w;
- alpha->height = h;
+ in_sdtv = in_info->colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_BT601;
+ out_sdtv = out_info->colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_BT601;
- switch (alpha->in_format) {
+ switch (GST_VIDEO_INFO_FORMAT (in_info)) {
case GST_VIDEO_FORMAT_ARGB:
- switch (alpha->out_format) {
+ switch (GST_VIDEO_INFO_FORMAT (out_info)) {
case GST_VIDEO_FORMAT_ARGB:
alpha->process = NULL;
alpha->matrix = NULL;
}
break;
case GST_VIDEO_FORMAT_BGRA:
- switch (alpha->out_format) {
+ switch (GST_VIDEO_INFO_FORMAT (out_info)) {
case GST_VIDEO_FORMAT_BGRA:
alpha->process = NULL;
alpha->matrix = NULL;
}
break;
case GST_VIDEO_FORMAT_ABGR:
- switch (alpha->out_format) {
+ switch (GST_VIDEO_INFO_FORMAT (out_info)) {
case GST_VIDEO_FORMAT_ABGR:
alpha->process = NULL;
alpha->matrix = NULL;
}
break;
case GST_VIDEO_FORMAT_RGBA:
- switch (alpha->out_format) {
+ switch (GST_VIDEO_INFO_FORMAT (out_info)) {
case GST_VIDEO_FORMAT_RGBA:
alpha->process = NULL;
alpha->matrix = NULL;
}
break;
case GST_VIDEO_FORMAT_AYUV:
- switch (alpha->out_format) {
+ switch (GST_VIDEO_INFO_FORMAT (out_info)) {
case GST_VIDEO_FORMAT_AYUV:
if (in_sdtv == out_sdtv) {
alpha->process = transform_ayuv_ayuv;
break;
}
- if (in_format == out_format && in_sdtv == out_sdtv)
- gst_base_transform_set_passthrough (btrans, TRUE);
+ if (GST_VIDEO_INFO_FORMAT (in_info) == GST_VIDEO_INFO_FORMAT (out_info)
+ && in_sdtv == out_sdtv)
+ gst_base_transform_set_passthrough (GST_BASE_TRANSFORM (filter), TRUE);
else if (!alpha->process)
- return FALSE;
+ goto no_process;
return TRUE;
+
+ /* ERRORS */
+invalid_caps:
+ {
+ GST_DEBUG_OBJECT (alpha, "incomplete or invalid caps");
+ return FALSE;
+ }
+no_process:
+ {
+ GST_DEBUG_OBJECT (alpha, "could not find process function");
+ return FALSE;
+ }
}
static GstFlowReturn
-gst_alpha_color_transform_ip (GstBaseTransform * btrans, GstBuffer * inbuf)
+gst_alpha_color_transform_frame_ip (GstVideoFilter * filter,
+ GstVideoFrame * frame)
{
- GstAlphaColor *alpha = GST_ALPHA_COLOR (btrans);
+ GstAlphaColor *alpha = GST_ALPHA_COLOR (filter);
- if (G_UNLIKELY (GST_BUFFER_SIZE (inbuf) != 4 * alpha->width * alpha->height)) {
- GST_ERROR_OBJECT (alpha, "Invalid buffer size (was %u, expected %u)",
- GST_BUFFER_SIZE (inbuf), alpha->width * alpha->height);
- return GST_FLOW_ERROR;
- }
-
- if (gst_base_transform_is_passthrough (btrans))
+ if (gst_base_transform_is_passthrough (GST_BASE_TRANSFORM (filter)))
return GST_FLOW_OK;
- if (G_UNLIKELY (!alpha->process)) {
- GST_ERROR_OBJECT (alpha, "Not negotiated yet");
- return GST_FLOW_NOT_NEGOTIATED;
- }
+ if (G_UNLIKELY (!alpha->process))
+ goto not_negotiated;
/* Transform in place */
- alpha->process (GST_BUFFER_DATA (inbuf), GST_BUFFER_SIZE (inbuf),
- alpha->matrix);
+ alpha->process (frame, alpha->matrix);
return GST_FLOW_OK;
+
+ /* ERRORS */
+not_negotiated:
+ {
+ GST_ERROR_OBJECT (alpha, "Not negotiated yet");
+ return GST_FLOW_NOT_NEGOTIATED;
+ }
}
static gboolean
GstVideoFilter parent;
/*< private >*/
- /* caps */
- GstVideoFormat in_format, out_format;
- gint width, height;
+ void (*process) (GstVideoFrame * frame, const gint * matrix);
- void (*process) (guint8 * data, gint size, const gint * matrix);
const gint *matrix;
};
GstBuffer * buffer, gboolean start_tag, guint * tag_size,
GstTagList ** tags);
-GST_BOILERPLATE (GstApeDemux, gst_ape_demux, GstTagDemux, GST_TYPE_TAG_DEMUX);
+G_DEFINE_TYPE (GstApeDemux, gst_ape_demux, GST_TYPE_TAG_DEMUX);
static void
-gst_ape_demux_base_init (gpointer klass)
+gst_ape_demux_class_init (GstApeDemuxClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstElementClass *element_class;
+ GstTagDemuxClass *tagdemux_class;
+
+ GST_DEBUG_CATEGORY_INIT (apedemux_debug, "apedemux", 0,
+ "GStreamer APE tag demuxer");
- gst_element_class_add_static_pad_template (element_class, &sink_factory);
+ tagdemux_class = GST_TAG_DEMUX_CLASS (klass);
+ element_class = GST_ELEMENT_CLASS (klass);
gst_element_class_set_details_simple (element_class, "APE tag demuxer",
"Codec/Demuxer/Metadata",
"Read and output APE tags while demuxing the contents",
"Tim-Philipp Müller <tim centricular net>");
- GST_DEBUG_CATEGORY_INIT (apedemux_debug, "apedemux", 0,
- "GStreamer APE tag demuxer");
-}
-
-static void
-gst_ape_demux_class_init (GstApeDemuxClass * klass)
-{
- GstTagDemuxClass *tagdemux_class;
-
- tagdemux_class = GST_TAG_DEMUX_CLASS (klass);
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&sink_factory));
tagdemux_class->identify_tag = GST_DEBUG_FUNCPTR (gst_ape_demux_identify_tag);
tagdemux_class->parse_tag = GST_DEBUG_FUNCPTR (gst_ape_demux_parse_tag);
}
static void
-gst_ape_demux_init (GstApeDemux * apedemux, GstApeDemuxClass * gclass)
+gst_ape_demux_init (GstApeDemux * apedemux)
{
/* nothing to do here */
}
static GstTagList *
ape_demux_parse_tags (const guint8 * data, gint size)
{
- GstTagList *taglist = gst_tag_list_new ();
+ GstTagList *taglist = gst_tag_list_new_empty ();
GST_LOG ("Reading tags from chunk of size %u bytes", size);
break;
}
default:{
- if (gst_tag_type == GST_TYPE_DATE) {
+ if (gst_tag_type == G_TYPE_DATE) {
gint v_int;
if (sscanf (val, "%d", &v_int) == 1) {
GDate *date = g_date_new_dmy (1, 1, v_int);
- g_value_init (&v, GST_TYPE_DATE);
- gst_value_set_date (&v, date);
- g_date_free (date);
+ g_value_init (&v, G_TYPE_DATE);
+ g_value_take_boxed (&v, date);
}
} else {
GST_WARNING ("Unhandled tag type '%s' for tag '%s'",
gst_ape_demux_identify_tag (GstTagDemux * demux, GstBuffer * buffer,
gboolean start_tag, guint * tag_size)
{
- if (memcmp (GST_BUFFER_DATA (buffer), "APETAGEX", 8) != 0) {
+ GstMapInfo map;
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+
+ if (memcmp (map.data, "APETAGEX", 8) != 0) {
GST_DEBUG_OBJECT (demux, "No APETAGEX marker at %s - not an APE file",
(start_tag) ? "start" : "end");
+ gst_buffer_unmap (buffer, &map);
return FALSE;
}
- *tag_size = GST_READ_UINT32_LE (GST_BUFFER_DATA (buffer) + 12);
+ *tag_size = GST_READ_UINT32_LE (map.data + 12);
/* size is without header, so add 32 to account for that */
*tag_size += 32;
+ gst_buffer_unmap (buffer, &map);
+
return TRUE;
}
gst_ape_demux_parse_tag (GstTagDemux * demux, GstBuffer * buffer,
gboolean start_tag, guint * tag_size, GstTagList ** tags)
{
- const guint8 *data;
- const guint8 *footer;
+ guint8 *data;
+ guint8 *footer;
gboolean have_header;
gboolean end_tag = !start_tag;
GstCaps *sink_caps;
guint version, footer_size;
+ GstMapInfo map;
+ gsize size;
- GST_LOG_OBJECT (demux, "Parsing buffer of size %u", GST_BUFFER_SIZE (buffer));
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ data = map.data;
+ size = map.size;
- data = GST_BUFFER_DATA (buffer);
- footer = GST_BUFFER_DATA (buffer) + GST_BUFFER_SIZE (buffer) - 32;
+ GST_LOG_OBJECT (demux, "Parsing buffer of size %" G_GSIZE_FORMAT, size);
+
+ footer = data + size - 32;
GST_LOG_OBJECT (demux, "Checking for footer at offset 0x%04x",
(guint) (footer - data));
GST_TAG_CONTAINER_FORMAT, sink_caps);
gst_caps_unref (sink_caps);
+ gst_buffer_unmap (buffer, &map);
+
return GST_TAG_DEMUX_RESULT_OK;
}
plugin_LTLIBRARIES = libgstaudiofx.la
+# FIXME 0.11: ignore GValueArray warnings for now until this is sorted
+ERROR_CFLAGS=
+
# sources used to compile this plug-in
libgstaudiofx_la_SOURCES = audiofx.c\
audiopanorama.c \
# flags used to compile this plugin
libgstaudiofx_la_CFLAGS = $(GST_CFLAGS) \
$(GST_BASE_CFLAGS) \
- $(GST_CONTROLLER_CFLAGS) \
$(GST_PLUGINS_BASE_CFLAGS)
libgstaudiofx_la_LIBADD = $(GST_LIBS) \
$(GST_BASE_LIBS) \
- $(GST_CONTROLLER_LIBS) \
$(GST_PLUGINS_BASE_LIBS) \
-lgstaudio-$(GST_MAJORMINOR) \
-lgstfft-$(GST_MAJORMINOR) \
#include <gst/base/gstbasetransform.h>
#include <gst/audio/audio.h>
#include <gst/audio/gstaudiofilter.h>
-#include <gst/controller/gstcontroller.h>
#include "audioamplify.h"
{METHOD_NOCLIP, "No clipping", "none"},
{0, NULL, NULL}
};
-
- /* FIXME 0.11: rename to GstAudioAmplifyClippingMethod */
- gtype = g_enum_register_static ("GstAudioPanoramaClippingMethod", values);
+ gtype = g_enum_register_static ("GstAudioAmplifyClippingMethod", values);
}
return gtype;
}
#define ALLOWED_CAPS \
- "audio/x-raw-int," \
- " depth=(int)8," \
- " width=(int)8," \
- " endianness=(int)BYTE_ORDER," \
- " signed=(bool)TRUE," \
- " rate=(int)[1,MAX]," \
- " channels=(int)[1,MAX]; " \
- "audio/x-raw-int," \
- " depth=(int)16," \
- " width=(int)16," \
- " endianness=(int)BYTE_ORDER," \
- " signed=(bool)TRUE," \
- " rate=(int)[1,MAX]," \
- " channels=(int)[1,MAX]; " \
- "audio/x-raw-int," \
- " depth=(int)32," \
- " width=(int)32," \
- " endianness=(int)BYTE_ORDER," \
- " signed=(bool)TRUE," \
+ "audio/x-raw," \
+ " format=(string) {S8,"GST_AUDIO_NE(S16)","GST_AUDIO_NE(S32)"," \
+ GST_AUDIO_NE(F32)","GST_AUDIO_NE(F64)"}," \
" rate=(int)[1,MAX]," \
- " channels=(int)[1,MAX]; " \
- "audio/x-raw-float," \
- " width=(int){32,64}," \
- " endianness=(int)BYTE_ORDER," \
- " rate=(int)[1,MAX]," \
- " channels=(int)[1,MAX]"
-
-#define DEBUG_INIT(bla) \
- GST_DEBUG_CATEGORY_INIT (gst_audio_amplify_debug, "audioamplify", 0, "audioamplify element");
+ " channels=(int)[1,MAX], " \
+ " layout=(string) {interleaved, non-interleaved}"
-GST_BOILERPLATE_FULL (GstAudioAmplify, gst_audio_amplify, GstAudioFilter,
- GST_TYPE_AUDIO_FILTER, DEBUG_INIT);
+G_DEFINE_TYPE (GstAudioAmplify, gst_audio_amplify, GST_TYPE_AUDIO_FILTER);
static gboolean gst_audio_amplify_set_process_function (GstAudioAmplify *
- filter, gint clipping, gint format, gint width);
+ filter, gint clipping, GstAudioFormat format);
static void gst_audio_amplify_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void gst_audio_amplify_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
static gboolean gst_audio_amplify_setup (GstAudioFilter * filter,
- GstRingBufferSpec * format);
+ const GstAudioInfo * info);
static GstFlowReturn gst_audio_amplify_transform_ip (GstBaseTransform * base,
GstBuffer * buf);
/* GObject vmethod implementations */
static void
-gst_audio_amplify_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
- GstCaps *caps;
-
- gst_element_class_set_details_simple (element_class, "Audio amplifier",
- "Filter/Effect/Audio",
- "Amplifies an audio stream by a given factor",
- "Sebastian Dröge <slomo@circular-chaos.org>");
-
- caps = gst_caps_from_string (ALLOWED_CAPS);
- gst_audio_filter_class_add_pad_templates (GST_AUDIO_FILTER_CLASS (klass),
- caps);
- gst_caps_unref (caps);
-}
-
-static void
gst_audio_amplify_class_init (GstAudioAmplifyClass * klass)
{
GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstCaps *caps;
+
+ GST_DEBUG_CATEGORY_INIT (gst_audio_amplify_debug, "audioamplify", 0,
+ "audioamplify element");
gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+
gobject_class->set_property = gst_audio_amplify_set_property;
gobject_class->get_property = gst_audio_amplify_get_property;
GST_TYPE_AUDIO_AMPLIFY_CLIPPING_METHOD, METHOD_CLIP,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ gst_element_class_set_details_simple (gstelement_class, "Audio amplifier",
+ "Filter/Effect/Audio",
+ "Amplifies an audio stream by a given factor",
+ "Sebastian Dröge <slomo@circular-chaos.org>");
+
+ caps = gst_caps_from_string (ALLOWED_CAPS);
+ gst_audio_filter_class_add_pad_templates (GST_AUDIO_FILTER_CLASS (klass),
+ caps);
+ gst_caps_unref (caps);
+
GST_AUDIO_FILTER_CLASS (klass)->setup =
GST_DEBUG_FUNCPTR (gst_audio_amplify_setup);
GST_BASE_TRANSFORM_CLASS (klass)->transform_ip =
}
static void
-gst_audio_amplify_init (GstAudioAmplify * filter, GstAudioAmplifyClass * klass)
+gst_audio_amplify_init (GstAudioAmplify * filter)
{
filter->amplification = 1.0;
gst_audio_amplify_set_process_function (filter, METHOD_CLIP,
- GST_BUFTYPE_LINEAR, 16);
+ GST_AUDIO_FORMAT_S16);
gst_base_transform_set_in_place (GST_BASE_TRANSFORM (filter), TRUE);
gst_base_transform_set_gap_aware (GST_BASE_TRANSFORM (filter), TRUE);
}
static GstAudioAmplifyProcessFunc
-gst_audio_amplify_process_function (gint clipping, gint format, gint width)
+gst_audio_amplify_process_function (gint clipping, GstAudioFormat format)
{
static const struct process
{
- gint format;
- gint width;
+ GstAudioFormat format;
gint clipping;
GstAudioAmplifyProcessFunc func;
} process[] = {
{
- GST_BUFTYPE_FLOAT, 32, METHOD_CLIP,
- gst_audio_amplify_transform_gfloat_clip}, {
- GST_BUFTYPE_FLOAT, 32, METHOD_WRAP_NEGATIVE,
+ GST_AUDIO_FORMAT_F32, METHOD_CLIP, gst_audio_amplify_transform_gfloat_clip}, {
+ GST_AUDIO_FORMAT_F32, METHOD_WRAP_NEGATIVE,
gst_audio_amplify_transform_gfloat_wrap_negative}, {
- GST_BUFTYPE_FLOAT, 32, METHOD_WRAP_POSITIVE,
+ GST_AUDIO_FORMAT_F32, METHOD_WRAP_POSITIVE,
gst_audio_amplify_transform_gfloat_wrap_positive}, {
- GST_BUFTYPE_FLOAT, 32, METHOD_NOCLIP,
+ GST_AUDIO_FORMAT_F32, METHOD_NOCLIP,
gst_audio_amplify_transform_gfloat_noclip}, {
- GST_BUFTYPE_FLOAT, 64, METHOD_CLIP,
+ GST_AUDIO_FORMAT_F64, METHOD_CLIP,
gst_audio_amplify_transform_gdouble_clip}, {
- GST_BUFTYPE_FLOAT, 64, METHOD_WRAP_NEGATIVE,
+ GST_AUDIO_FORMAT_F64, METHOD_WRAP_NEGATIVE,
gst_audio_amplify_transform_gdouble_wrap_negative}, {
- GST_BUFTYPE_FLOAT, 64, METHOD_WRAP_POSITIVE,
+ GST_AUDIO_FORMAT_F64, METHOD_WRAP_POSITIVE,
gst_audio_amplify_transform_gdouble_wrap_positive}, {
- GST_BUFTYPE_FLOAT, 64, METHOD_NOCLIP,
+ GST_AUDIO_FORMAT_F64, METHOD_NOCLIP,
gst_audio_amplify_transform_gdouble_noclip}, {
- GST_BUFTYPE_LINEAR, 8, METHOD_CLIP, gst_audio_amplify_transform_gint8_clip}, {
- GST_BUFTYPE_LINEAR, 8, METHOD_WRAP_NEGATIVE,
+ GST_AUDIO_FORMAT_S8, METHOD_CLIP, gst_audio_amplify_transform_gint8_clip}, {
+ GST_AUDIO_FORMAT_S8, METHOD_WRAP_NEGATIVE,
gst_audio_amplify_transform_gint8_wrap_negative}, {
- GST_BUFTYPE_LINEAR, 8, METHOD_WRAP_POSITIVE,
+ GST_AUDIO_FORMAT_S8, METHOD_WRAP_POSITIVE,
gst_audio_amplify_transform_gint8_wrap_positive}, {
- GST_BUFTYPE_LINEAR, 8, METHOD_NOCLIP,
+ GST_AUDIO_FORMAT_S8, METHOD_NOCLIP,
gst_audio_amplify_transform_gint8_noclip}, {
- GST_BUFTYPE_LINEAR, 16, METHOD_CLIP,
- gst_audio_amplify_transform_gint16_clip}, {
- GST_BUFTYPE_LINEAR, 16, METHOD_WRAP_NEGATIVE,
+ GST_AUDIO_FORMAT_S16, METHOD_CLIP, gst_audio_amplify_transform_gint16_clip}, {
+ GST_AUDIO_FORMAT_S16, METHOD_WRAP_NEGATIVE,
gst_audio_amplify_transform_gint16_wrap_negative}, {
- GST_BUFTYPE_LINEAR, 16, METHOD_WRAP_POSITIVE,
+ GST_AUDIO_FORMAT_S16, METHOD_WRAP_POSITIVE,
gst_audio_amplify_transform_gint16_wrap_positive}, {
- GST_BUFTYPE_LINEAR, 16, METHOD_NOCLIP,
+ GST_AUDIO_FORMAT_S16, METHOD_NOCLIP,
gst_audio_amplify_transform_gint16_noclip}, {
- GST_BUFTYPE_LINEAR, 32, METHOD_CLIP,
- gst_audio_amplify_transform_gint32_clip}, {
- GST_BUFTYPE_LINEAR, 32, METHOD_WRAP_NEGATIVE,
+ GST_AUDIO_FORMAT_S32, METHOD_CLIP, gst_audio_amplify_transform_gint32_clip}, {
+ GST_AUDIO_FORMAT_S32, METHOD_WRAP_NEGATIVE,
gst_audio_amplify_transform_gint32_wrap_negative}, {
- GST_BUFTYPE_LINEAR, 32, METHOD_WRAP_POSITIVE,
+ GST_AUDIO_FORMAT_S32, METHOD_WRAP_POSITIVE,
gst_audio_amplify_transform_gint32_wrap_positive}, {
- GST_BUFTYPE_LINEAR, 32, METHOD_NOCLIP,
+ GST_AUDIO_FORMAT_S32, METHOD_NOCLIP,
gst_audio_amplify_transform_gint32_noclip}, {
- 0, 0, 0, NULL}
+ 0, 0, NULL}
};
const struct process *p;
for (p = process; p->func; p++)
- if (p->format == format && p->width == width && p->clipping == clipping)
+ if (p->format == format && p->clipping == clipping)
return p->func;
return NULL;
}
static gboolean
gst_audio_amplify_set_process_function (GstAudioAmplify * filter, gint
- clipping_method, gint format, gint width)
+ clipping_method, GstAudioFormat format)
{
GstAudioAmplifyProcessFunc process;
/* set processing function */
- process = gst_audio_amplify_process_function (clipping_method, format, width);
+ process = gst_audio_amplify_process_function (clipping_method, format);
if (!process) {
GST_DEBUG ("wrong format");
return FALSE;
filter->process = process;
filter->clipping_method = clipping_method;
filter->format = format;
- filter->width = width;
return TRUE;
}
break;
case PROP_CLIPPING_METHOD:
gst_audio_amplify_set_process_function (filter, g_value_get_enum (value),
- filter->format, filter->width);
+ filter->format);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
/* GstAudioFilter vmethod implementations */
static gboolean
-gst_audio_amplify_setup (GstAudioFilter * base, GstRingBufferSpec * format)
+gst_audio_amplify_setup (GstAudioFilter * base, const GstAudioInfo * info)
{
GstAudioAmplify *filter = GST_AUDIO_AMPLIFY (base);
return gst_audio_amplify_set_process_function (filter,
- filter->clipping_method, format->type, format->width);
+ filter->clipping_method, GST_AUDIO_INFO_FORMAT (info));
}
/* GstBaseTransform vmethod implementations */
GstAudioAmplify *filter = GST_AUDIO_AMPLIFY (base);
guint num_samples;
GstClockTime timestamp, stream_time;
+ GstMapInfo map;
timestamp = GST_BUFFER_TIMESTAMP (buf);
stream_time =
GST_TIME_ARGS (timestamp));
if (GST_CLOCK_TIME_IS_VALID (stream_time))
- gst_object_sync_values (G_OBJECT (filter), stream_time);
-
- num_samples =
- GST_BUFFER_SIZE (buf) / (GST_AUDIO_FILTER (filter)->format.width / 8);
+ gst_object_sync_values (GST_OBJECT (filter), stream_time);
if (gst_base_transform_is_passthrough (base) ||
G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_GAP)))
return GST_FLOW_OK;
- filter->process (filter, GST_BUFFER_DATA (buf), num_samples);
+ gst_buffer_map (buf, &map, GST_MAP_READWRITE);
+ num_samples = map.size / GST_AUDIO_FILTER_BPS (filter);
+
+ filter->process (filter, map.data, num_samples);
+
+ gst_buffer_unmap (buf, &map);
return GST_FLOW_OK;
}
/* < private > */
GstAudioAmplifyProcessFunc process;
gint clipping_method;
- gint format;
- gint width;
+ GstAudioFormat format;
};
struct _GstAudioAmplifyClass
#include "config.h"
#endif
+#include <string.h>
+
#include <gst/gst.h>
#include <gst/base/gstbasetransform.h>
#include <gst/audio/audio.h>
#include <gst/audio/gstaudiofilter.h>
-#include <gst/controller/gstcontroller.h>
#include <math.h>
PROP_POLES
};
-#define DEBUG_INIT(bla) \
- GST_DEBUG_CATEGORY_INIT (gst_audio_cheb_band_debug, "audiochebband", 0, "audiochebband element");
-
-GST_BOILERPLATE_FULL (GstAudioChebBand, gst_audio_cheb_band,
- GstAudioFXBaseIIRFilter, GST_TYPE_AUDIO_FX_BASE_IIR_FILTER, DEBUG_INIT);
+#define gst_audio_cheb_band_parent_class parent_class
+G_DEFINE_TYPE (GstAudioChebBand, gst_audio_cheb_band,
+ GST_TYPE_AUDIO_FX_BASE_IIR_FILTER);
static void gst_audio_cheb_band_set_property (GObject * object,
guint prop_id, const GValue * value, GParamSpec * pspec);
static void gst_audio_cheb_band_finalize (GObject * object);
static gboolean gst_audio_cheb_band_setup (GstAudioFilter * filter,
- GstRingBufferSpec * format);
+ const GstAudioInfo * info);
enum
{
/* GObject vmethod implementations */
static void
-gst_audio_cheb_band_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_set_details_simple (element_class,
- "Band pass & band reject filter", "Filter/Effect/Audio",
- "Chebyshev band pass and band reject filter",
- "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
-}
-
-static void
gst_audio_cheb_band_class_init (GstAudioChebBandClass * klass)
{
GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
GstAudioFilterClass *filter_class = (GstAudioFilterClass *) klass;
+ GST_DEBUG_CATEGORY_INIT (gst_audio_cheb_band_debug, "audiochebband", 0,
+ "audiochebband element");
+
gobject_class->set_property = gst_audio_cheb_band_set_property;
gobject_class->get_property = gst_audio_cheb_band_get_property;
gobject_class->finalize = gst_audio_cheb_band_finalize;
4, 32, 4,
G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
+ gst_element_class_set_details_simple (gstelement_class,
+ "Band pass & band reject filter", "Filter/Effect/Audio",
+ "Chebyshev band pass and band reject filter",
+ "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
+
filter_class->setup = GST_DEBUG_FUNCPTR (gst_audio_cheb_band_setup);
}
static void
-gst_audio_cheb_band_init (GstAudioChebBand * filter,
- GstAudioChebBandClass * klass)
+gst_audio_cheb_band_init (GstAudioChebBand * filter)
{
filter->lower_frequency = filter->upper_frequency = 0.0;
filter->mode = MODE_BAND_PASS;
filter->poles = 4;
filter->ripple = 0.25;
- filter->lock = g_mutex_new ();
+ g_mutex_init (&filter->lock);
}
static void
generate_biquad_coefficients (GstAudioChebBand * filter,
- gint p, gdouble * a0, gdouble * a1, gdouble * a2, gdouble * a3,
- gdouble * a4, gdouble * b1, gdouble * b2, gdouble * b3, gdouble * b4)
+ gint p, gdouble * b0, gdouble * b1, gdouble * b2, gdouble * b3,
+ gdouble * b4, gdouble * a1, gdouble * a2, gdouble * a3, gdouble * a4)
{
gint np = filter->poles / 2;
gdouble ripple = filter->ripple;
+ gint rate = GST_AUDIO_FILTER_RATE (filter);
/* pole location in s-plane */
gdouble rp, ip;
{
gdouble a, b, d;
gdouble alpha, beta;
- gdouble w0 =
- 2.0 * G_PI * (filter->lower_frequency /
- GST_AUDIO_FILTER (filter)->format.rate);
- gdouble w1 =
- 2.0 * G_PI * (filter->upper_frequency /
- GST_AUDIO_FILTER (filter)->format.rate);
+ gdouble w0 = 2.0 * G_PI * (filter->lower_frequency / rate);
+ gdouble w1 = 2.0 * G_PI * (filter->upper_frequency / rate);
if (filter->mode == MODE_BAND_PASS) {
a = cos ((w1 + w0) / 2.0) / cos ((w1 - w0) / 2.0);
d = 1.0 + beta * (y1 - beta * y2);
- *a0 = (x0 + beta * (-x1 + beta * x2)) / d;
- *a1 = (alpha * (-2.0 * x0 + x1 + beta * x1 - 2.0 * beta * x2)) / d;
- *a2 =
+ *b0 = (x0 + beta * (-x1 + beta * x2)) / d;
+ *b1 = (alpha * (-2.0 * x0 + x1 + beta * x1 - 2.0 * beta * x2)) / d;
+ *b2 =
(-x1 - beta * beta * x1 + 2.0 * beta * (x0 + x2) +
alpha * alpha * (x0 - x1 + x2)) / d;
- *a3 = (alpha * (x1 + beta * (-2.0 * x0 + x1) - 2.0 * x2)) / d;
- *a4 = (beta * (beta * x0 - x1) + x2) / d;
- *b1 = (alpha * (2.0 + y1 + beta * y1 - 2.0 * beta * y2)) / d;
- *b2 =
+ *b3 = (alpha * (x1 + beta * (-2.0 * x0 + x1) - 2.0 * x2)) / d;
+ *b4 = (beta * (beta * x0 - x1) + x2) / d;
+ *a1 = (alpha * (2.0 + y1 + beta * y1 - 2.0 * beta * y2)) / d;
+ *a2 =
(-y1 - beta * beta * y1 - alpha * alpha * (1.0 + y1 - y2) +
2.0 * beta * (-1.0 + y2)) / d;
- *b3 = (alpha * (y1 + beta * (2.0 + y1) - 2.0 * y2)) / d;
- *b4 = (-beta * beta - beta * y1 + y2) / d;
+ *a3 = (alpha * (y1 + beta * (2.0 + y1) - 2.0 * y2)) / d;
+ *a4 = (-beta * beta - beta * y1 + y2) / d;
} else {
a = cos ((w1 + w0) / 2.0) / cos ((w1 - w0) / 2.0);
b = tan (1.0 / 2.0) * tan ((w1 - w0) / 2.0);
d = -1.0 + beta * (beta * y2 + y1);
- *a0 = (-x0 - beta * x1 - beta * beta * x2) / d;
- *a1 = (alpha * (2.0 * x0 + x1 + beta * x1 + 2.0 * beta * x2)) / d;
- *a2 =
+ *b0 = (-x0 - beta * x1 - beta * beta * x2) / d;
+ *b1 = (alpha * (2.0 * x0 + x1 + beta * x1 + 2.0 * beta * x2)) / d;
+ *b2 =
(-x1 - beta * beta * x1 - 2.0 * beta * (x0 + x2) -
alpha * alpha * (x0 + x1 + x2)) / d;
- *a3 = (alpha * (x1 + beta * (2.0 * x0 + x1) + 2.0 * x2)) / d;
- *a4 = (-beta * beta * x0 - beta * x1 - x2) / d;
- *b1 = (alpha * (-2.0 + y1 + beta * y1 + 2.0 * beta * y2)) / d;
- *b2 =
+ *b3 = (alpha * (x1 + beta * (2.0 * x0 + x1) + 2.0 * x2)) / d;
+ *b4 = (-beta * beta * x0 - beta * x1 - x2) / d;
+ *a1 = (alpha * (-2.0 + y1 + beta * y1 + 2.0 * beta * y2)) / d;
+ *a2 =
-(y1 + beta * beta * y1 + 2.0 * beta * (-1.0 + y2) +
alpha * alpha * (-1.0 + y1 + y2)) / d;
- *b3 = (alpha * (beta * (-2.0 + y1) + y1 + 2.0 * y2)) / d;
- *b4 = -(-beta * beta + beta * y1 + y2) / d;
+ *a3 = (alpha * (beta * (-2.0 + y1) + y1 + 2.0 * y2)) / d;
+ *a4 = -(-beta * beta + beta * y1 + y2) / d;
}
}
}
static void
generate_coefficients (GstAudioChebBand * filter)
{
- if (GST_AUDIO_FILTER (filter)->format.rate == 0) {
+ gint rate = GST_AUDIO_FILTER_RATE (filter);
+
+ if (rate == 0) {
gdouble *a = g_new0 (gdouble, 1);
+ gdouble *b = g_new0 (gdouble, 1);
a[0] = 1.0;
+ b[0] = 1.0;
gst_audio_fx_base_iir_filter_set_coefficients (GST_AUDIO_FX_BASE_IIR_FILTER
- (filter), a, 1, NULL, 0);
+ (filter), a, 1, b, 1);
GST_LOG_OBJECT (filter, "rate was not set yet");
return;
}
if (filter->upper_frequency <= filter->lower_frequency) {
gdouble *a = g_new0 (gdouble, 1);
+ gdouble *b = g_new0 (gdouble, 1);
- a[0] = (filter->mode == MODE_BAND_PASS) ? 0.0 : 1.0;
+ a[0] = 1.0;
+ b[0] = (filter->mode == MODE_BAND_PASS) ? 0.0 : 1.0;
gst_audio_fx_base_iir_filter_set_coefficients (GST_AUDIO_FX_BASE_IIR_FILTER
- (filter), a, 1, NULL, 0);
+ (filter), a, 1, b, 1);
GST_LOG_OBJECT (filter, "frequency band had no or negative dimension");
return;
}
- if (filter->upper_frequency > GST_AUDIO_FILTER (filter)->format.rate / 2) {
- filter->upper_frequency = GST_AUDIO_FILTER (filter)->format.rate / 2;
+ if (filter->upper_frequency > rate / 2) {
+ filter->upper_frequency = rate / 2;
GST_LOG_OBJECT (filter, "clipped upper frequency to nyquist frequency");
}
b[4] = 1.0;
for (p = 1; p <= np / 4; p++) {
- gdouble a0, a1, a2, a3, a4, b1, b2, b3, b4;
+ gdouble b0, b1, b2, b3, b4, a1, a2, a3, a4;
gdouble *ta = g_new0 (gdouble, np + 5);
gdouble *tb = g_new0 (gdouble, np + 5);
- generate_biquad_coefficients (filter, p, &a0, &a1, &a2, &a3, &a4, &b1,
- &b2, &b3, &b4);
+ generate_biquad_coefficients (filter, p, &b0, &b1, &b2, &b3, &b4, &a1,
+ &a2, &a3, &a4);
memcpy (ta, a, sizeof (gdouble) * (np + 5));
memcpy (tb, b, sizeof (gdouble) * (np + 5));
* to the cascade by multiplication of the transfer
* functions */
for (i = 4; i < np + 5; i++) {
- a[i] =
- a0 * ta[i] + a1 * ta[i - 1] + a2 * ta[i - 2] + a3 * ta[i - 3] +
- a4 * ta[i - 4];
b[i] =
- tb[i] - b1 * tb[i - 1] - b2 * tb[i - 2] - b3 * tb[i - 3] -
+ b0 * tb[i] + b1 * tb[i - 1] + b2 * tb[i - 2] + b3 * tb[i - 3] +
b4 * tb[i - 4];
+ a[i] =
+ ta[i] - a1 * ta[i - 1] - a2 * ta[i - 2] - a3 * ta[i - 3] -
+ a4 * ta[i - 4];
}
g_free (ta);
g_free (tb);
}
- /* Move coefficients to the beginning of the array
- * and multiply the b coefficients with -1 to move from
+ /* Move coefficients to the beginning of the array to move from
* the transfer function's coefficients to the difference
* equation's coefficients */
- b[4] = 0.0;
for (i = 0; i <= np; i++) {
a[i] = a[i + 4];
- b[i] = -b[i + 4];
+ b[i] = b[i + 4];
}
/* Normalize to unity gain at frequency 0 and frequency
gain1 = sqrt (gain1 * gain2);
for (i = 0; i <= np; i++) {
- a[i] /= gain1;
+ b[i] /= gain1;
}
} else {
/* gain is H(wc), wc = center frequency */
- gdouble w1 =
- 2.0 * G_PI * (filter->lower_frequency /
- GST_AUDIO_FILTER (filter)->format.rate);
- gdouble w2 =
- 2.0 * G_PI * (filter->upper_frequency /
- GST_AUDIO_FILTER (filter)->format.rate);
+ gdouble w1 = 2.0 * G_PI * (filter->lower_frequency / rate);
+ gdouble w2 = 2.0 * G_PI * (filter->upper_frequency / rate);
gdouble w0 = (w2 + w1) / 2.0;
gdouble zr = cos (w0), zi = sin (w0);
gdouble gain =
zi);
for (i = 0; i <= np; i++) {
- a[i] /= gain;
+ b[i] /= gain;
}
}
20.0 * log10 (gst_audio_fx_base_iir_filter_calculate_gain (a, np + 1, b,
np + 1, 1.0, 0.0)));
{
- gdouble w1 =
- 2.0 * G_PI * (filter->lower_frequency /
- GST_AUDIO_FILTER (filter)->format.rate);
- gdouble w2 =
- 2.0 * G_PI * (filter->upper_frequency /
- GST_AUDIO_FILTER (filter)->format.rate);
+ gdouble w1 = 2.0 * G_PI * (filter->lower_frequency / rate);
+ gdouble w2 = 2.0 * G_PI * (filter->upper_frequency / rate);
gdouble w0 = (w2 + w1) / 2.0;
gdouble zr, zi;
}
GST_LOG_OBJECT (filter, "%.2f dB gain @ %dHz",
20.0 * log10 (gst_audio_fx_base_iir_filter_calculate_gain (a, np + 1, b,
- np + 1, -1.0, 0.0)),
- GST_AUDIO_FILTER (filter)->format.rate / 2);
+ np + 1, -1.0, 0.0)), rate / 2);
}
}
{
GstAudioChebBand *filter = GST_AUDIO_CHEB_BAND (object);
- g_mutex_free (filter->lock);
- filter->lock = NULL;
+ g_mutex_clear (&filter->lock);
G_OBJECT_CLASS (parent_class)->finalize (object);
}
switch (prop_id) {
case PROP_MODE:
- g_mutex_lock (filter->lock);
+ g_mutex_lock (&filter->lock);
filter->mode = g_value_get_enum (value);
generate_coefficients (filter);
- g_mutex_unlock (filter->lock);
+ g_mutex_unlock (&filter->lock);
break;
case PROP_TYPE:
- g_mutex_lock (filter->lock);
+ g_mutex_lock (&filter->lock);
filter->type = g_value_get_int (value);
generate_coefficients (filter);
- g_mutex_unlock (filter->lock);
+ g_mutex_unlock (&filter->lock);
break;
case PROP_LOWER_FREQUENCY:
- g_mutex_lock (filter->lock);
+ g_mutex_lock (&filter->lock);
filter->lower_frequency = g_value_get_float (value);
generate_coefficients (filter);
- g_mutex_unlock (filter->lock);
+ g_mutex_unlock (&filter->lock);
break;
case PROP_UPPER_FREQUENCY:
- g_mutex_lock (filter->lock);
+ g_mutex_lock (&filter->lock);
filter->upper_frequency = g_value_get_float (value);
generate_coefficients (filter);
- g_mutex_unlock (filter->lock);
+ g_mutex_unlock (&filter->lock);
break;
case PROP_RIPPLE:
- g_mutex_lock (filter->lock);
+ g_mutex_lock (&filter->lock);
filter->ripple = g_value_get_float (value);
generate_coefficients (filter);
- g_mutex_unlock (filter->lock);
+ g_mutex_unlock (&filter->lock);
break;
case PROP_POLES:
- g_mutex_lock (filter->lock);
+ g_mutex_lock (&filter->lock);
filter->poles = GST_ROUND_UP_4 (g_value_get_int (value));
generate_coefficients (filter);
- g_mutex_unlock (filter->lock);
+ g_mutex_unlock (&filter->lock);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
/* GstAudioFilter vmethod implementations */
static gboolean
-gst_audio_cheb_band_setup (GstAudioFilter * base, GstRingBufferSpec * format)
+gst_audio_cheb_band_setup (GstAudioFilter * base, const GstAudioInfo * info)
{
GstAudioChebBand *filter = GST_AUDIO_CHEB_BAND (base);
generate_coefficients (filter);
- return GST_AUDIO_FILTER_CLASS (parent_class)->setup (base, format);
+ return GST_AUDIO_FILTER_CLASS (parent_class)->setup (base, info);
}
gfloat ripple;
/* < private > */
- GMutex *lock;
+ GMutex lock;
};
struct _GstAudioChebBandClass
#include "config.h"
#endif
+#include <string.h>
+
#include <gst/gst.h>
#include <gst/base/gstbasetransform.h>
#include <gst/audio/audio.h>
#include <gst/audio/gstaudiofilter.h>
-#include <gst/controller/gstcontroller.h>
#include <math.h>
PROP_POLES
};
-#define DEBUG_INIT(bla) \
- GST_DEBUG_CATEGORY_INIT (gst_audio_cheb_limit_debug, "audiocheblimit", 0, "audiocheblimit element");
-
-GST_BOILERPLATE_FULL (GstAudioChebLimit,
- gst_audio_cheb_limit, GstAudioFXBaseIIRFilter,
- GST_TYPE_AUDIO_FX_BASE_IIR_FILTER, DEBUG_INIT);
+#define gst_audio_cheb_limit_parent_class parent_class
+G_DEFINE_TYPE (GstAudioChebLimit,
+ gst_audio_cheb_limit, GST_TYPE_AUDIO_FX_BASE_IIR_FILTER);
static void gst_audio_cheb_limit_set_property (GObject * object,
guint prop_id, const GValue * value, GParamSpec * pspec);
static void gst_audio_cheb_limit_finalize (GObject * object);
static gboolean gst_audio_cheb_limit_setup (GstAudioFilter * filter,
- GstRingBufferSpec * format);
+ const GstAudioInfo * info);
enum
{
/* GObject vmethod implementations */
static void
-gst_audio_cheb_limit_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_set_details_simple (element_class,
- "Low pass & high pass filter",
- "Filter/Effect/Audio",
- "Chebyshev low pass and high pass filter",
- "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
-}
-
-static void
gst_audio_cheb_limit_class_init (GstAudioChebLimitClass * klass)
{
GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
GstAudioFilterClass *filter_class = (GstAudioFilterClass *) klass;
+ GST_DEBUG_CATEGORY_INIT (gst_audio_cheb_limit_debug, "audiocheblimit", 0,
+ "audiocheblimit element");
+
gobject_class->set_property = gst_audio_cheb_limit_set_property;
gobject_class->get_property = gst_audio_cheb_limit_get_property;
gobject_class->finalize = gst_audio_cheb_limit_finalize;
2, 32, 4,
G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
+ gst_element_class_set_details_simple (gstelement_class,
+ "Low pass & high pass filter",
+ "Filter/Effect/Audio",
+ "Chebyshev low pass and high pass filter",
+ "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
+
filter_class->setup = GST_DEBUG_FUNCPTR (gst_audio_cheb_limit_setup);
}
static void
-gst_audio_cheb_limit_init (GstAudioChebLimit * filter,
- GstAudioChebLimitClass * klass)
+gst_audio_cheb_limit_init (GstAudioChebLimit * filter)
{
filter->cutoff = 0.0;
filter->mode = MODE_LOW_PASS;
filter->poles = 4;
filter->ripple = 0.25;
- filter->lock = g_mutex_new ();
+ g_mutex_init (&filter->lock);
}
static void
generate_biquad_coefficients (GstAudioChebLimit * filter,
- gint p, gdouble * a0, gdouble * a1, gdouble * a2,
- gdouble * b1, gdouble * b2)
+ gint p, gdouble * b0, gdouble * b1, gdouble * b2,
+ gdouble * a1, gdouble * a2)
{
gint np = filter->poles;
gdouble ripple = filter->ripple;
{
gdouble k, d;
gdouble omega =
- 2.0 * G_PI * (filter->cutoff / GST_AUDIO_FILTER (filter)->format.rate);
+ 2.0 * G_PI * (filter->cutoff / GST_AUDIO_FILTER_RATE (filter));
if (filter->mode == MODE_LOW_PASS)
k = sin ((1.0 - omega) / 2.0) / sin ((1.0 + omega) / 2.0);
k = -cos ((omega + 1.0) / 2.0) / cos ((omega - 1.0) / 2.0);
d = 1.0 + y1 * k - y2 * k * k;
- *a0 = (x0 + k * (-x1 + k * x2)) / d;
- *a1 = (x1 + k * k * x1 - 2.0 * k * (x0 + x2)) / d;
- *a2 = (x0 * k * k - x1 * k + x2) / d;
- *b1 = (2.0 * k + y1 + y1 * k * k - 2.0 * y2 * k) / d;
- *b2 = (-k * k - y1 * k + y2) / d;
+ *b0 = (x0 + k * (-x1 + k * x2)) / d;
+ *b1 = (x1 + k * k * x1 - 2.0 * k * (x0 + x2)) / d;
+ *b2 = (x0 * k * k - x1 * k + x2) / d;
+ *a1 = (2.0 * k + y1 + y1 * k * k - 2.0 * y2 * k) / d;
+ *a2 = (-k * k - y1 * k + y2) / d;
if (filter->mode == MODE_HIGH_PASS) {
*a1 = -*a1;
static void
generate_coefficients (GstAudioChebLimit * filter)
{
- if (GST_AUDIO_FILTER (filter)->format.rate == 0) {
+ if (GST_AUDIO_FILTER_RATE (filter) == 0) {
gdouble *a = g_new0 (gdouble, 1);
+ gdouble *b = g_new0 (gdouble, 1);
a[0] = 1.0;
+ b[0] = 1.0;
gst_audio_fx_base_iir_filter_set_coefficients (GST_AUDIO_FX_BASE_IIR_FILTER
- (filter), a, 1, NULL, 0);
+ (filter), a, 1, b, 1);
GST_LOG_OBJECT (filter, "rate was not set yet");
return;
}
- if (filter->cutoff >= GST_AUDIO_FILTER (filter)->format.rate / 2.0) {
+ if (filter->cutoff >= GST_AUDIO_FILTER_RATE (filter) / 2.0) {
gdouble *a = g_new0 (gdouble, 1);
+ gdouble *b = g_new0 (gdouble, 1);
- a[0] = (filter->mode == MODE_LOW_PASS) ? 1.0 : 0.0;
+ a[0] = 1.0;
+ b[0] = (filter->mode == MODE_LOW_PASS) ? 1.0 : 0.0;
gst_audio_fx_base_iir_filter_set_coefficients (GST_AUDIO_FX_BASE_IIR_FILTER
- (filter), a, 1, NULL, 0);
+ (filter), a, 1, b, 1);
GST_LOG_OBJECT (filter, "cutoff was higher than nyquist frequency");
return;
} else if (filter->cutoff <= 0.0) {
gdouble *a = g_new0 (gdouble, 1);
+ gdouble *b = g_new0 (gdouble, 1);
- a[0] = (filter->mode == MODE_LOW_PASS) ? 0.0 : 1.0;
+ a[0] = 1.0;
+ b[0] = (filter->mode == MODE_LOW_PASS) ? 0.0 : 1.0;
gst_audio_fx_base_iir_filter_set_coefficients (GST_AUDIO_FX_BASE_IIR_FILTER
- (filter), a, 1, NULL, 0);
+ (filter), a, 1, b, 1);
GST_LOG_OBJECT (filter, "cutoff is lower than zero");
return;
}
b[2] = 1.0;
for (p = 1; p <= np / 2; p++) {
- gdouble a0, a1, a2, b1, b2;
+ gdouble b0, b1, b2, a1, a2;
gdouble *ta = g_new0 (gdouble, np + 3);
gdouble *tb = g_new0 (gdouble, np + 3);
- generate_biquad_coefficients (filter, p, &a0, &a1, &a2, &b1, &b2);
+ generate_biquad_coefficients (filter, p, &b0, &b1, &b2, &a1, &a2);
memcpy (ta, a, sizeof (gdouble) * (np + 3));
memcpy (tb, b, sizeof (gdouble) * (np + 3));
* to the cascade by multiplication of the transfer
* functions */
for (i = 2; i < np + 3; i++) {
- a[i] = a0 * ta[i] + a1 * ta[i - 1] + a2 * ta[i - 2];
- b[i] = tb[i] - b1 * tb[i - 1] - b2 * tb[i - 2];
+ b[i] = b0 * tb[i] + b1 * tb[i - 1] + b2 * tb[i - 2];
+ a[i] = ta[i] - a1 * ta[i - 1] - a2 * ta[i - 2];
}
g_free (ta);
g_free (tb);
}
- /* Move coefficients to the beginning of the array
- * and multiply the b coefficients with -1 to move from
+ /* Move coefficients to the beginning of the array to move from
* the transfer function's coefficients to the difference
* equation's coefficients */
- b[2] = 0.0;
for (i = 0; i <= np; i++) {
a[i] = a[i + 2];
- b[i] = -b[i + 2];
+ b[i] = b[i + 2];
}
/* Normalize to unity gain at frequency 0 for lowpass
-1.0, 0.0);
for (i = 0; i <= np; i++) {
- a[i] /= gain;
+ b[i] /= gain;
}
}
#ifndef GST_DISABLE_GST_DEBUG
{
gdouble wc =
- 2.0 * G_PI * (filter->cutoff /
- GST_AUDIO_FILTER (filter)->format.rate);
+ 2.0 * G_PI * (filter->cutoff / GST_AUDIO_FILTER_RATE (filter));
gdouble zr = cos (wc), zi = sin (wc);
GST_LOG_OBJECT (filter, "%.2f dB gain @ %d Hz",
GST_LOG_OBJECT (filter, "%.2f dB gain @ %d Hz",
20.0 * log10 (gst_audio_fx_base_iir_filter_calculate_gain (a, np + 1, b,
- np + 1, -1.0, 0.0)),
- GST_AUDIO_FILTER (filter)->format.rate / 2);
+ np + 1, -1.0, 0.0)), GST_AUDIO_FILTER_RATE (filter) / 2);
}
}
{
GstAudioChebLimit *filter = GST_AUDIO_CHEB_LIMIT (object);
- g_mutex_free (filter->lock);
- filter->lock = NULL;
+ g_mutex_clear (&filter->lock);
G_OBJECT_CLASS (parent_class)->finalize (object);
}
switch (prop_id) {
case PROP_MODE:
- g_mutex_lock (filter->lock);
+ g_mutex_lock (&filter->lock);
filter->mode = g_value_get_enum (value);
generate_coefficients (filter);
- g_mutex_unlock (filter->lock);
+ g_mutex_unlock (&filter->lock);
break;
case PROP_TYPE:
- g_mutex_lock (filter->lock);
+ g_mutex_lock (&filter->lock);
filter->type = g_value_get_int (value);
generate_coefficients (filter);
- g_mutex_unlock (filter->lock);
+ g_mutex_unlock (&filter->lock);
break;
case PROP_CUTOFF:
- g_mutex_lock (filter->lock);
+ g_mutex_lock (&filter->lock);
filter->cutoff = g_value_get_float (value);
generate_coefficients (filter);
- g_mutex_unlock (filter->lock);
+ g_mutex_unlock (&filter->lock);
break;
case PROP_RIPPLE:
- g_mutex_lock (filter->lock);
+ g_mutex_lock (&filter->lock);
filter->ripple = g_value_get_float (value);
generate_coefficients (filter);
- g_mutex_unlock (filter->lock);
+ g_mutex_unlock (&filter->lock);
break;
case PROP_POLES:
- g_mutex_lock (filter->lock);
+ g_mutex_lock (&filter->lock);
filter->poles = GST_ROUND_UP_2 (g_value_get_int (value));
generate_coefficients (filter);
- g_mutex_unlock (filter->lock);
+ g_mutex_unlock (&filter->lock);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
/* GstAudioFilter vmethod implementations */
static gboolean
-gst_audio_cheb_limit_setup (GstAudioFilter * base, GstRingBufferSpec * format)
+gst_audio_cheb_limit_setup (GstAudioFilter * base, const GstAudioInfo * info)
{
GstAudioChebLimit *filter = GST_AUDIO_CHEB_LIMIT (base);
generate_coefficients (filter);
- return GST_AUDIO_FILTER_CLASS (parent_class)->setup (base, format);
+ return GST_AUDIO_FILTER_CLASS (parent_class)->setup (base, info);
}
gfloat ripple;
/* < private > */
- GMutex *lock;
+ GMutex lock;
};
struct _GstAudioChebLimitClass
#include <gst/base/gstbasetransform.h>
#include <gst/audio/audio.h>
#include <gst/audio/gstaudiofilter.h>
-#include <gst/controller/gstcontroller.h>
#include "audiodynamic.h"
};
#define ALLOWED_CAPS \
- "audio/x-raw-int," \
- " depth=(int)16," \
- " width=(int)16," \
- " endianness=(int)BYTE_ORDER," \
- " signed=(bool)TRUE," \
- " rate=(int)[1,MAX]," \
- " channels=(int)[1,MAX]; " \
- "audio/x-raw-float," \
- " width=(int)32," \
- " endianness=(int)BYTE_ORDER," \
- " rate=(int)[1,MAX]," \
- " channels=(int)[1,MAX]"
-
-#define DEBUG_INIT(bla) \
- GST_DEBUG_CATEGORY_INIT (gst_audio_dynamic_debug, "audiodynamic", 0, "audiodynamic element");
-
-GST_BOILERPLATE_FULL (GstAudioDynamic, gst_audio_dynamic, GstAudioFilter,
- GST_TYPE_AUDIO_FILTER, DEBUG_INIT);
+ "audio/x-raw," \
+ " format=(string) {"GST_AUDIO_NE(S16)","GST_AUDIO_NE(F32)"}," \
+ " rate=(int)[1,MAX]," \
+ " channels=(int)[1,MAX]," \
+ " layout=(string) {interleaved, non-interleaved}"
+
+G_DEFINE_TYPE (GstAudioDynamic, gst_audio_dynamic, GST_TYPE_AUDIO_FILTER);
static void gst_audio_dynamic_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
GValue * value, GParamSpec * pspec);
static gboolean gst_audio_dynamic_setup (GstAudioFilter * filter,
- GstRingBufferSpec * format);
+ const GstAudioInfo * info);
static GstFlowReturn gst_audio_dynamic_transform_ip (GstBaseTransform * base,
GstBuffer * buf);
}
static gboolean
-gst_audio_dynamic_set_process_function (GstAudioDynamic * filter)
+gst_audio_dynamic_set_process_function (GstAudioDynamic * filter,
+ const GstAudioInfo * info)
{
gint func_index;
+ if (GST_AUDIO_INFO_FORMAT (info) == GST_AUDIO_FORMAT_UNKNOWN)
+ return FALSE;
+
func_index = (filter->mode == MODE_COMPRESSOR) ? 0 : 4;
func_index += (filter->characteristics == CHARACTERISTICS_HARD_KNEE) ? 0 : 2;
- func_index +=
- (GST_AUDIO_FILTER (filter)->format.type == GST_BUFTYPE_FLOAT) ? 1 : 0;
+ func_index += (GST_AUDIO_INFO_FORMAT (info) == GST_AUDIO_FORMAT_F32) ? 1 : 0;
if (func_index >= 0 && func_index < 8) {
filter->process = process_functions[func_index];
/* GObject vmethod implementations */
static void
-gst_audio_dynamic_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
- GstCaps *caps;
-
- gst_element_class_set_details_simple (element_class,
- "Dynamic range controller", "Filter/Effect/Audio",
- "Compressor and Expander", "Sebastian Dröge <slomo@circular-chaos.org>");
-
- caps = gst_caps_from_string (ALLOWED_CAPS);
- gst_audio_filter_class_add_pad_templates (GST_AUDIO_FILTER_CLASS (klass),
- caps);
- gst_caps_unref (caps);
-}
-
-static void
gst_audio_dynamic_class_init (GstAudioDynamicClass * klass)
{
GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstCaps *caps;
+
+ GST_DEBUG_CATEGORY_INIT (gst_audio_dynamic_debug, "audiodynamic", 0,
+ "audiodynamic element");
gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+
gobject_class->set_property = gst_audio_dynamic_set_property;
gobject_class->get_property = gst_audio_dynamic_get_property;
1.0,
G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
+ gst_element_class_set_details_simple (gstelement_class,
+ "Dynamic range controller", "Filter/Effect/Audio",
+ "Compressor and Expander", "Sebastian Dröge <slomo@circular-chaos.org>");
+
+ caps = gst_caps_from_string (ALLOWED_CAPS);
+ gst_audio_filter_class_add_pad_templates (GST_AUDIO_FILTER_CLASS (klass),
+ caps);
+ gst_caps_unref (caps);
+
GST_AUDIO_FILTER_CLASS (klass)->setup =
GST_DEBUG_FUNCPTR (gst_audio_dynamic_setup);
GST_BASE_TRANSFORM_CLASS (klass)->transform_ip =
}
static void
-gst_audio_dynamic_init (GstAudioDynamic * filter, GstAudioDynamicClass * klass)
+gst_audio_dynamic_init (GstAudioDynamic * filter)
{
filter->ratio = 1.0;
filter->threshold = 0.0;
switch (prop_id) {
case PROP_CHARACTERISTICS:
filter->characteristics = g_value_get_enum (value);
- gst_audio_dynamic_set_process_function (filter);
+ gst_audio_dynamic_set_process_function (filter,
+ GST_AUDIO_FILTER_INFO (filter));
break;
case PROP_MODE:
filter->mode = g_value_get_enum (value);
- gst_audio_dynamic_set_process_function (filter);
+ gst_audio_dynamic_set_process_function (filter,
+ GST_AUDIO_FILTER_INFO (filter));
break;
case PROP_THRESHOLD:
filter->threshold = g_value_get_float (value);
/* GstAudioFilter vmethod implementations */
static gboolean
-gst_audio_dynamic_setup (GstAudioFilter * base, GstRingBufferSpec * format)
+gst_audio_dynamic_setup (GstAudioFilter * base, const GstAudioInfo * info)
{
GstAudioDynamic *filter = GST_AUDIO_DYNAMIC (base);
gboolean ret = TRUE;
- ret = gst_audio_dynamic_set_process_function (filter);
+ ret = gst_audio_dynamic_set_process_function (filter, info);
return ret;
}
gdouble zero_p, zero_n;
gdouble a_p, b_p, c_p;
gdouble a_n, b_n, c_n;
+ gdouble r2;
/* Nothing to do for us here if threshold equals 0.0
* or ratio equals 1.0 */
* b = (1 + r^2) / 2
* c = t * (1.0 - b - a*t)
* f(x) = ax^2 + bx + c */
- a_p = (1.0 - filter->ratio * filter->ratio) / (4.0 * thr_p);
- b_p = (1.0 + filter->ratio * filter->ratio) / 2.0;
+ r2 = filter->ratio * filter->ratio;
+ a_p = (1.0 - r2) / (4.0 * thr_p);
+ b_p = (1.0 + r2) / 2.0;
c_p = thr_p * (1.0 - b_p - a_p * thr_p);
- a_n = (1.0 - filter->ratio * filter->ratio) / (4.0 * thr_n);
- b_n = (1.0 + filter->ratio * filter->ratio) / 2.0;
+ a_n = (1.0 - r2) / (4.0 * thr_n);
+ b_n = (1.0 + r2) / 2.0;
c_n = thr_n * (1.0 - b_n - a_n * thr_n);
for (; num_samples; num_samples--) {
gdouble zero;
gdouble a_p, b_p, c_p;
gdouble a_n, b_n, c_n;
+ gdouble r2;
/* Nothing to do for us here if threshold equals 0.0
* or ratio equals 1.0 */
* b = (1 + r^2) / 2
* c = t * (1.0 - b - a*t)
* f(x) = ax^2 + bx + c */
- a_p = (1.0 - filter->ratio * filter->ratio) / (4.0 * threshold);
- b_p = (1.0 + filter->ratio * filter->ratio) / 2.0;
+ r2 = filter->ratio * filter->ratio;
+ a_p = (1.0 - r2) / (4.0 * threshold);
+ b_p = (1.0 + r2) / 2.0;
c_p = threshold * (1.0 - b_p - a_p * threshold);
- a_n = (1.0 - filter->ratio * filter->ratio) / (-4.0 * threshold);
- b_n = (1.0 + filter->ratio * filter->ratio) / 2.0;
+ a_n = (1.0 - r2) / (-4.0 * threshold);
+ b_n = (1.0 + r2) / 2.0;
c_n = -threshold * (1.0 - b_n + a_n * threshold);
for (; num_samples; num_samples--) {
GstAudioDynamic *filter = GST_AUDIO_DYNAMIC (base);
guint num_samples;
GstClockTime timestamp, stream_time;
+ GstMapInfo map;
timestamp = GST_BUFFER_TIMESTAMP (buf);
stream_time =
GST_TIME_ARGS (timestamp));
if (GST_CLOCK_TIME_IS_VALID (stream_time))
- gst_object_sync_values (G_OBJECT (filter), stream_time);
-
- num_samples =
- GST_BUFFER_SIZE (buf) / (GST_AUDIO_FILTER (filter)->format.width / 8);
+ gst_object_sync_values (GST_OBJECT (filter), stream_time);
if (gst_base_transform_is_passthrough (base) ||
G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_GAP)))
return GST_FLOW_OK;
- filter->process (filter, GST_BUFFER_DATA (buf), num_samples);
+ gst_buffer_map (buf, &map, GST_MAP_READWRITE);
+ num_samples = map.size / GST_AUDIO_FILTER_BPS (filter);
+
+ filter->process (filter, map.data, num_samples);
+
+ gst_buffer_unmap (buf, &map);
return GST_FLOW_OK;
}
#include <gst/base/gstbasetransform.h>
#include <gst/audio/audio.h>
#include <gst/audio/gstaudiofilter.h>
-#include <gst/controller/gstcontroller.h>
#include "audioecho.h"
};
#define ALLOWED_CAPS \
- "audio/x-raw-float," \
- " width=(int) { 32, 64 }, " \
- " endianness=(int)BYTE_ORDER," \
- " rate=(int)[1,MAX]," \
- " channels=(int)[1,MAX]"
+ "audio/x-raw," \
+ " format=(string) {"GST_AUDIO_NE(F32)","GST_AUDIO_NE(F64)"}, " \
+ " rate=(int)[1,MAX]," \
+ " channels=(int)[1,MAX]," \
+ " layout=(string) interleaved"
-#define DEBUG_INIT(bla) \
- GST_DEBUG_CATEGORY_INIT (gst_audio_echo_debug, "audioecho", 0, "audioecho element");
-
-GST_BOILERPLATE_FULL (GstAudioEcho, gst_audio_echo, GstAudioFilter,
- GST_TYPE_AUDIO_FILTER, DEBUG_INIT);
+#define gst_audio_echo_parent_class parent_class
+G_DEFINE_TYPE (GstAudioEcho, gst_audio_echo, GST_TYPE_AUDIO_FILTER);
static void gst_audio_echo_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void gst_audio_echo_finalize (GObject * object);
static gboolean gst_audio_echo_setup (GstAudioFilter * self,
- GstRingBufferSpec * format);
+ const GstAudioInfo * info);
static gboolean gst_audio_echo_stop (GstBaseTransform * base);
static GstFlowReturn gst_audio_echo_transform_ip (GstBaseTransform * base,
GstBuffer * buf);
/* GObject vmethod implementations */
static void
-gst_audio_echo_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
- GstCaps *caps;
-
- gst_element_class_set_details_simple (element_class, "Audio echo",
- "Filter/Effect/Audio",
- "Adds an echo or reverb effect to an audio stream",
- "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
-
- caps = gst_caps_from_string (ALLOWED_CAPS);
- gst_audio_filter_class_add_pad_templates (GST_AUDIO_FILTER_CLASS (klass),
- caps);
- gst_caps_unref (caps);
-}
-
-static void
gst_audio_echo_class_init (GstAudioEchoClass * klass)
{
GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
GstBaseTransformClass *basetransform_class = (GstBaseTransformClass *) klass;
GstAudioFilterClass *audioself_class = (GstAudioFilterClass *) klass;
+ GstCaps *caps;
+
+ GST_DEBUG_CATEGORY_INIT (gst_audio_echo_debug, "audioecho", 0,
+ "audioecho element");
gobject_class->set_property = gst_audio_echo_set_property;
gobject_class->get_property = gst_audio_echo_get_property;
0.0, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS
| GST_PARAM_CONTROLLABLE));
+ gst_element_class_set_details_simple (gstelement_class, "Audio echo",
+ "Filter/Effect/Audio",
+ "Adds an echo or reverb effect to an audio stream",
+ "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
+
+ caps = gst_caps_from_string (ALLOWED_CAPS);
+ gst_audio_filter_class_add_pad_templates (GST_AUDIO_FILTER_CLASS (klass),
+ caps);
+ gst_caps_unref (caps);
+
audioself_class->setup = GST_DEBUG_FUNCPTR (gst_audio_echo_setup);
basetransform_class->transform_ip =
GST_DEBUG_FUNCPTR (gst_audio_echo_transform_ip);
}
static void
-gst_audio_echo_init (GstAudioEcho * self, GstAudioEchoClass * klass)
+gst_audio_echo_init (GstAudioEcho * self)
{
self->delay = 1;
self->max_delay = 1;
/* GstAudioFilter vmethod implementations */
static gboolean
-gst_audio_echo_setup (GstAudioFilter * base, GstRingBufferSpec * format)
+gst_audio_echo_setup (GstAudioFilter * base, const GstAudioInfo * info)
{
GstAudioEcho *self = GST_AUDIO_ECHO (base);
gboolean ret = TRUE;
- if (format->type == GST_BUFTYPE_FLOAT && format->width == 32)
- self->process = (GstAudioEchoProcessFunc)
- gst_audio_echo_transform_float;
- else if (format->type == GST_BUFTYPE_FLOAT && format->width == 64)
- self->process = (GstAudioEchoProcessFunc)
- gst_audio_echo_transform_double;
- else
- ret = FALSE;
+ switch (GST_AUDIO_INFO_FORMAT (info)) {
+ case GST_AUDIO_FORMAT_F32:
+ self->process = (GstAudioEchoProcessFunc)
+ gst_audio_echo_transform_float;
+ break;
+ case GST_AUDIO_FORMAT_F64:
+ self->process = (GstAudioEchoProcessFunc)
+ gst_audio_echo_transform_double;
+ break;
+ default:
+ ret = FALSE;
+ break;
+ }
g_free (self->buffer);
self->buffer = NULL;
type * data, guint num_samples) \
{ \
type *buffer = (type *) self->buffer; \
- guint channels = GST_AUDIO_FILTER (self)->format.channels; \
- guint rate = GST_AUDIO_FILTER (self)->format.rate; \
+ guint channels = GST_AUDIO_FILTER_CHANNELS (self); \
+ guint rate = GST_AUDIO_FILTER_RATE (self); \
guint i, j; \
guint echo_index = self->buffer_size_frames - self->delay_frames; \
gdouble echo_off = ((((gdouble) self->delay) * rate) / GST_SECOND) - self->delay_frames; \
GstAudioEcho *self = GST_AUDIO_ECHO (base);
guint num_samples;
GstClockTime timestamp, stream_time;
+ GstMapInfo map;
timestamp = GST_BUFFER_TIMESTAMP (buf);
stream_time =
GST_TIME_ARGS (timestamp));
if (GST_CLOCK_TIME_IS_VALID (stream_time))
- gst_object_sync_values (G_OBJECT (self), stream_time);
-
- num_samples =
- GST_BUFFER_SIZE (buf) / (GST_AUDIO_FILTER (self)->format.width / 8);
+ gst_object_sync_values (GST_OBJECT (self), stream_time);
if (self->buffer == NULL) {
- guint width, rate, channels;
+ guint bpf, rate;
- width = GST_AUDIO_FILTER (self)->format.width / 8;
- rate = GST_AUDIO_FILTER (self)->format.rate;
- channels = GST_AUDIO_FILTER (self)->format.channels;
+ bpf = GST_AUDIO_FILTER_BPF (self);
+ rate = GST_AUDIO_FILTER_RATE (self);
self->delay_frames =
MAX (gst_util_uint64_scale (self->delay, rate, GST_SECOND), 1);
self->buffer_size_frames =
MAX (gst_util_uint64_scale (self->max_delay, rate, GST_SECOND), 1);
- self->buffer_size = self->buffer_size_frames * width * channels;
+ self->buffer_size = self->buffer_size_frames * bpf;
self->buffer = g_try_malloc0 (self->buffer_size);
self->buffer_pos = 0;
}
}
- self->process (self, GST_BUFFER_DATA (buf), num_samples);
+ gst_buffer_map (buf, &map, GST_MAP_READWRITE);
+ num_samples = map.size / GST_AUDIO_FILTER_BPS (self);
+
+ self->process (self, map.data, num_samples);
+
+ gst_buffer_unmap (buf, &map);
return GST_FLOW_OK;
}
#include <math.h>
#include <gst/gst.h>
#include <gst/audio/gstaudiofilter.h>
-#include <gst/controller/gstcontroller.h>
#include "audiofirfilter.h"
static guint gst_audio_fir_filter_signals[LAST_SIGNAL] = { 0, };
-#define DEBUG_INIT(bla) \
- GST_DEBUG_CATEGORY_INIT (gst_audio_fir_filter_debug, "audiofirfilter", 0, \
- "Generic audio FIR filter plugin");
-
-GST_BOILERPLATE_FULL (GstAudioFIRFilter, gst_audio_fir_filter, GstAudioFilter,
- GST_TYPE_AUDIO_FX_BASE_FIR_FILTER, DEBUG_INIT);
+#define gst_audio_fir_filter_parent_class parent_class
+G_DEFINE_TYPE (GstAudioFIRFilter, gst_audio_fir_filter,
+ GST_TYPE_AUDIO_FX_BASE_FIR_FILTER);
static void gst_audio_fir_filter_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void gst_audio_fir_filter_finalize (GObject * object);
static gboolean gst_audio_fir_filter_setup (GstAudioFilter * base,
- GstRingBufferSpec * format);
+ const GstAudioInfo * info);
-/* Element class */
-static void
-gst_audio_fir_filter_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_set_details_simple (element_class,
- "Audio FIR filter", "Filter/Effect/Audio",
- "Generic audio FIR filter with custom filter kernel",
- "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
-}
static void
gst_audio_fir_filter_class_init (GstAudioFIRFilterClass * klass)
{
GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
GstAudioFilterClass *filter_class = (GstAudioFilterClass *) klass;
+ GST_DEBUG_CATEGORY_INIT (gst_audio_fir_filter_debug, "audiofirfilter", 0,
+ "Generic audio FIR filter plugin");
+
gobject_class->set_property = gst_audio_fir_filter_set_property;
gobject_class->get_property = gst_audio_fir_filter_get_property;
gobject_class->finalize = gst_audio_fir_filter_finalize;
g_signal_new ("rate-changed", G_TYPE_FROM_CLASS (klass),
G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstAudioFIRFilterClass, rate_changed),
NULL, NULL, gst_marshal_VOID__INT, G_TYPE_NONE, 1, G_TYPE_INT);
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "Audio FIR filter", "Filter/Effect/Audio",
+ "Generic audio FIR filter with custom filter kernel",
+ "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
}
static void
}
static void
-gst_audio_fir_filter_init (GstAudioFIRFilter * self,
- GstAudioFIRFilterClass * g_class)
+gst_audio_fir_filter_init (GstAudioFIRFilter * self)
{
GValue v = { 0, };
GValueArray *va;
g_value_unset (&v);
gst_audio_fir_filter_update_kernel (self, va);
- self->lock = g_mutex_new ();
+ g_mutex_init (&self->lock);
}
/* GstAudioFilter vmethod implementations */
/* get notified of caps and plug in the correct process function */
static gboolean
-gst_audio_fir_filter_setup (GstAudioFilter * base, GstRingBufferSpec * format)
+gst_audio_fir_filter_setup (GstAudioFilter * base, const GstAudioInfo * info)
{
GstAudioFIRFilter *self = GST_AUDIO_FIR_FILTER (base);
+ gint new_rate = GST_AUDIO_INFO_RATE (info);
- if (self->rate != format->rate) {
+ if (GST_AUDIO_FILTER_RATE (self) != new_rate) {
g_signal_emit (G_OBJECT (self),
- gst_audio_fir_filter_signals[SIGNAL_RATE_CHANGED], 0, format->rate);
- self->rate = format->rate;
+ gst_audio_fir_filter_signals[SIGNAL_RATE_CHANGED], 0, new_rate);
}
- return GST_AUDIO_FILTER_CLASS (parent_class)->setup (base, format);
+ return GST_AUDIO_FILTER_CLASS (parent_class)->setup (base, info);
}
static void
{
GstAudioFIRFilter *self = GST_AUDIO_FIR_FILTER (object);
- g_mutex_free (self->lock);
- self->lock = NULL;
+ g_mutex_clear (&self->lock);
if (self->kernel)
g_value_array_free (self->kernel);
switch (prop_id) {
case PROP_KERNEL:
- g_mutex_lock (self->lock);
+ g_mutex_lock (&self->lock);
/* update kernel already pushes residues */
gst_audio_fir_filter_update_kernel (self, g_value_dup_boxed (value));
- g_mutex_unlock (self->lock);
+ g_mutex_unlock (&self->lock);
break;
case PROP_LATENCY:
- g_mutex_lock (self->lock);
+ g_mutex_lock (&self->lock);
self->latency = g_value_get_uint64 (value);
gst_audio_fir_filter_update_kernel (self, NULL);
- g_mutex_unlock (self->lock);
+ g_mutex_unlock (&self->lock);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
guint64 latency;
/* < private > */
- GMutex *lock;
- gint rate;
+ GMutex lock;
};
struct _GstAudioFIRFilterClass {
#endif
#include <gst/gst.h>
-#include <gst/controller/gstcontroller.h>
#include "audiopanorama.h"
#include "audioinvert.h"
static gboolean
plugin_init (GstPlugin * plugin)
{
- /* initialize gst controller library */
- gst_controller_init (NULL, NULL);
-
return (gst_element_register (plugin, "audiopanorama", GST_RANK_NONE,
GST_TYPE_AUDIO_PANORAMA) &&
gst_element_register (plugin, "audioinvert", GST_RANK_NONE,
#include <math.h>
#include <gst/gst.h>
#include <gst/audio/gstaudiofilter.h>
-#include <gst/controller/gstcontroller.h>
-
-/* FIXME: Remove this once we depend on gst-plugins-base 0.10.26 */
-#ifndef GST_AUDIO_FILTER_CAST
-#define GST_AUDIO_FILTER_CAST(obj) ((GstAudioFilter *) (obj))
-#endif
#include "audiofxbasefirfilter.h"
GST_DEBUG_CATEGORY_STATIC (GST_CAT_DEFAULT);
#define ALLOWED_CAPS \
- "audio/x-raw-float, " \
- " width = (int) { 32, 64 }, " \
- " endianness = (int) BYTE_ORDER, " \
- " rate = (int) [ 1, MAX ], " \
- " channels = (int) [ 1, MAX ]"
-
-#define DEBUG_INIT(bla) \
- GST_DEBUG_CATEGORY_INIT (gst_audio_fx_base_fir_filter_debug, "audiofxbasefirfilter", 0, \
- "FIR filter base class");
+ "audio/x-raw, " \
+ " format=(string){"GST_AUDIO_NE(F32)","GST_AUDIO_NE(F64)"}, " \
+ " rate = (int) [ 1, MAX ], " \
+ " channels = (int) [ 1, MAX ], " \
+ " layout=(string) interleaved"
/* Switch from time-domain to FFT convolution for kernels >= this */
#define FFT_THRESHOLD 32
#define DEFAULT_LOW_LATENCY FALSE
#define DEFAULT_DRAIN_ON_CHANGES TRUE
-GST_BOILERPLATE_FULL (GstAudioFXBaseFIRFilter, gst_audio_fx_base_fir_filter,
- GstAudioFilter, GST_TYPE_AUDIO_FILTER, DEBUG_INIT);
+#define gst_audio_fx_base_fir_filter_parent_class parent_class
+G_DEFINE_TYPE (GstAudioFXBaseFIRFilter, gst_audio_fx_base_fir_filter,
+ GST_TYPE_AUDIO_FILTER);
static GstFlowReturn gst_audio_fx_base_fir_filter_transform (GstBaseTransform *
base, GstBuffer * inbuf, GstBuffer * outbuf);
static gboolean gst_audio_fx_base_fir_filter_start (GstBaseTransform * base);
static gboolean gst_audio_fx_base_fir_filter_stop (GstBaseTransform * base);
-static gboolean gst_audio_fx_base_fir_filter_event (GstBaseTransform * base,
- GstEvent * event);
+static gboolean gst_audio_fx_base_fir_filter_sink_event (GstBaseTransform *
+ base, GstEvent * event);
static gboolean gst_audio_fx_base_fir_filter_transform_size (GstBaseTransform *
- base, GstPadDirection direction, GstCaps * caps, guint size,
- GstCaps * othercaps, guint * othersize);
+ base, GstPadDirection direction, GstCaps * caps, gsize size,
+ GstCaps * othercaps, gsize * othersize);
static gboolean gst_audio_fx_base_fir_filter_setup (GstAudioFilter * base,
- GstRingBufferSpec * format);
+ const GstAudioInfo * info);
static gboolean gst_audio_fx_base_fir_filter_query (GstPad * pad,
- GstQuery * query);
-static const GstQueryType *gst_audio_fx_base_fir_filter_query_type (GstPad *
- pad);
+ GstObject * parent, GstQuery * query);
/*
* The code below calculates the linear convolution:
static guint \
process_##width (GstAudioFXBaseFIRFilter * self, const g##ctype * src, g##ctype * dst, guint input_samples) \
{ \
- gint channels = GST_AUDIO_FILTER_CAST (self)->format.channels; \
+ gint channels = GST_AUDIO_FILTER_CHANNELS (self); \
TIME_DOMAIN_CONVOLUTION_BODY (channels); \
}
process_fft_##width (GstAudioFXBaseFIRFilter * self, const g##ctype * src, \
g##ctype * dst, guint input_samples) \
{ \
- gint channels = GST_AUDIO_FILTER_CAST (self)->format.channels; \
+ gint channels = GST_AUDIO_FILTER_CHANNELS (self); \
FFT_CONVOLUTION_BODY (channels); \
}
/* Must be called with base transform lock! */
static void
gst_audio_fx_base_fir_filter_select_process_function (GstAudioFXBaseFIRFilter *
- self, gint width, gint channels)
+ self, GstAudioFormat format, gint channels)
{
- if (width == 32 && self->fft && !self->low_latency) {
- if (channels == 1)
- self->process = (GstAudioFXBaseFIRFilterProcessFunc) process_fft_1_32;
- else if (channels == 2)
- self->process = (GstAudioFXBaseFIRFilterProcessFunc) process_fft_2_32;
- else
- self->process = (GstAudioFXBaseFIRFilterProcessFunc) process_fft_32;
- } else if (width == 64 && self->fft && !self->low_latency) {
- if (channels == 1)
- self->process = (GstAudioFXBaseFIRFilterProcessFunc) process_fft_1_64;
- else if (channels == 2)
- self->process = (GstAudioFXBaseFIRFilterProcessFunc) process_fft_2_64;
- else
- self->process = (GstAudioFXBaseFIRFilterProcessFunc) process_fft_64;
- } else if (width == 32) {
- if (channels == 1)
- self->process = (GstAudioFXBaseFIRFilterProcessFunc) process_1_32;
- else if (channels == 2)
- self->process = (GstAudioFXBaseFIRFilterProcessFunc) process_2_32;
- else
- self->process = (GstAudioFXBaseFIRFilterProcessFunc) process_32;
- } else if (width == 64) {
- if (channels == 1)
- self->process = (GstAudioFXBaseFIRFilterProcessFunc) process_1_64;
- else if (channels == 2)
- self->process = (GstAudioFXBaseFIRFilterProcessFunc) process_2_64;
- else
- self->process = (GstAudioFXBaseFIRFilterProcessFunc) process_64;
- } else {
- self->process = NULL;
+ switch (format) {
+ case GST_AUDIO_FORMAT_F32:
+ if (self->fft && !self->low_latency) {
+ if (channels == 1)
+ self->process = (GstAudioFXBaseFIRFilterProcessFunc) process_fft_1_32;
+ else if (channels == 2)
+ self->process = (GstAudioFXBaseFIRFilterProcessFunc) process_fft_2_32;
+ else
+ self->process = (GstAudioFXBaseFIRFilterProcessFunc) process_fft_32;
+ } else {
+ if (channels == 1)
+ self->process = (GstAudioFXBaseFIRFilterProcessFunc) process_1_32;
+ else if (channels == 2)
+ self->process = (GstAudioFXBaseFIRFilterProcessFunc) process_2_32;
+ else
+ self->process = (GstAudioFXBaseFIRFilterProcessFunc) process_32;
+ }
+ break;
+ case GST_AUDIO_FORMAT_F64:
+ if (self->fft && !self->low_latency) {
+ if (channels == 1)
+ self->process = (GstAudioFXBaseFIRFilterProcessFunc) process_fft_1_64;
+ else if (channels == 2)
+ self->process = (GstAudioFXBaseFIRFilterProcessFunc) process_fft_2_64;
+ else
+ self->process = (GstAudioFXBaseFIRFilterProcessFunc) process_fft_64;
+ } else {
+ if (channels == 1)
+ self->process = (GstAudioFXBaseFIRFilterProcessFunc) process_1_64;
+ else if (channels == 2)
+ self->process = (GstAudioFXBaseFIRFilterProcessFunc) process_2_64;
+ else
+ self->process = (GstAudioFXBaseFIRFilterProcessFunc) process_64;
+ }
+ break;
+ default:
+ self->process = NULL;
+ break;
}
}
self->low_latency = low_latency;
gst_audio_fx_base_fir_filter_calculate_frequency_response (self);
gst_audio_fx_base_fir_filter_select_process_function (self,
- GST_AUDIO_FILTER_CAST (self)->format.width,
- GST_AUDIO_FILTER_CAST (self)->format.channels);
+ GST_AUDIO_FILTER_FORMAT (self), GST_AUDIO_FILTER_CHANNELS (self));
}
GST_BASE_TRANSFORM_UNLOCK (self);
break;
}
static void
-gst_audio_fx_base_fir_filter_base_init (gpointer g_class)
-{
- GstCaps *caps;
-
- caps = gst_caps_from_string (ALLOWED_CAPS);
- gst_audio_filter_class_add_pad_templates (GST_AUDIO_FILTER_CLASS (g_class),
- caps);
- gst_caps_unref (caps);
-}
-
-static void
gst_audio_fx_base_fir_filter_class_init (GstAudioFXBaseFIRFilterClass * klass)
{
GObjectClass *gobject_class = (GObjectClass *) klass;
GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass;
GstAudioFilterClass *filter_class = (GstAudioFilterClass *) klass;
+ GstCaps *caps;
+
+ GST_DEBUG_CATEGORY_INIT (gst_audio_fx_base_fir_filter_debug,
+ "audiofxbasefirfilter", 0, "FIR filter base class");
gobject_class->dispose = gst_audio_fx_base_fir_filter_dispose;
gobject_class->set_property = gst_audio_fx_base_fir_filter_set_property;
DEFAULT_DRAIN_ON_CHANGES,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ caps = gst_caps_from_string (ALLOWED_CAPS);
+ gst_audio_filter_class_add_pad_templates (GST_AUDIO_FILTER_CLASS (klass),
+ caps);
+ gst_caps_unref (caps);
+
trans_class->transform =
GST_DEBUG_FUNCPTR (gst_audio_fx_base_fir_filter_transform);
trans_class->start = GST_DEBUG_FUNCPTR (gst_audio_fx_base_fir_filter_start);
trans_class->stop = GST_DEBUG_FUNCPTR (gst_audio_fx_base_fir_filter_stop);
- trans_class->event = GST_DEBUG_FUNCPTR (gst_audio_fx_base_fir_filter_event);
+ trans_class->sink_event =
+ GST_DEBUG_FUNCPTR (gst_audio_fx_base_fir_filter_sink_event);
trans_class->transform_size =
GST_DEBUG_FUNCPTR (gst_audio_fx_base_fir_filter_transform_size);
filter_class->setup = GST_DEBUG_FUNCPTR (gst_audio_fx_base_fir_filter_setup);
}
static void
-gst_audio_fx_base_fir_filter_init (GstAudioFXBaseFIRFilter * self,
- GstAudioFXBaseFIRFilterClass * g_class)
+gst_audio_fx_base_fir_filter_init (GstAudioFXBaseFIRFilter * self)
{
self->kernel = NULL;
self->buffer = NULL;
gst_pad_set_query_function (GST_BASE_TRANSFORM (self)->srcpad,
gst_audio_fx_base_fir_filter_query);
- gst_pad_set_query_type_function (GST_BASE_TRANSFORM (self)->srcpad,
- gst_audio_fx_base_fir_filter_query_type);
}
void
{
GstBuffer *outbuf;
GstFlowReturn res;
- gint rate = GST_AUDIO_FILTER_CAST (self)->format.rate;
- gint channels = GST_AUDIO_FILTER_CAST (self)->format.channels;
- gint width = GST_AUDIO_FILTER_CAST (self)->format.width / 8;
+ gint rate = GST_AUDIO_FILTER_RATE (self);
+ gint channels = GST_AUDIO_FILTER_CHANNELS (self);
+ gint bps = GST_AUDIO_FILTER_BPS (self);
gint outsize, outsamples;
+ GstMapInfo map;
guint8 *in, *out;
if (channels == 0 || rate == 0 || self->nsamples_in == 0) {
self->buffer = NULL;
return;
}
- outsize = outsamples * channels * width;
+ outsize = outsamples * channels * bps;
if (!self->fft || self->low_latency) {
gint64 diffsize, diffsamples;
diffsamples =
((gint64) self->latency) - ((gint64) self->buffer_fill) / channels;
if (diffsamples > 0) {
- diffsize = diffsamples * channels * width;
+ diffsize = diffsamples * channels * bps;
in = g_new0 (guint8, diffsize);
out = g_new0 (guint8, diffsize);
self->nsamples_out += self->process (self, in, out, diffsamples);
g_free (out);
}
- res = gst_pad_alloc_buffer (GST_BASE_TRANSFORM_CAST (self)->srcpad,
- GST_BUFFER_OFFSET_NONE, outsize,
- GST_PAD_CAPS (GST_BASE_TRANSFORM_CAST (self)->srcpad), &outbuf);
-
- if (G_UNLIKELY (res != GST_FLOW_OK)) {
- GST_WARNING_OBJECT (self, "failed allocating buffer of %d bytes",
- outsize);
- self->buffer_fill = 0;
- return;
- }
+ outbuf = gst_buffer_new_and_alloc (outsize);
/* Convolve the residue with zeros to get the actual remaining data */
in = g_new0 (guint8, outsize);
- self->nsamples_out +=
- self->process (self, in, GST_BUFFER_DATA (outbuf), outsamples);
+ gst_buffer_map (outbuf, &map, GST_MAP_READWRITE);
+ self->nsamples_out += self->process (self, in, map.data, outsamples);
+ gst_buffer_unmap (outbuf, &map);
+
g_free (in);
} else {
guint gensamples = 0;
- guint8 *data;
outbuf = gst_buffer_new_and_alloc (outsize);
- data = GST_BUFFER_DATA (outbuf);
+ gst_buffer_map (outbuf, &map, GST_MAP_READWRITE);
while (gensamples < outsamples) {
guint step_insamples = self->block_length - self->buffer_fill;
- guint8 *zeroes = g_new0 (guint8, step_insamples * channels * width);
- guint8 *out = g_new (guint8, self->block_length * channels * width);
+ guint8 *zeroes = g_new0 (guint8, step_insamples * channels * bps);
+ guint8 *out = g_new (guint8, self->block_length * channels * bps);
guint step_gensamples;
step_gensamples = self->process (self, zeroes, out, step_insamples);
g_free (zeroes);
- memcpy (data + gensamples * width, out, MIN (step_gensamples,
- outsamples - gensamples) * width);
+ memcpy (map.data + gensamples * bps, out, MIN (step_gensamples,
+ outsamples - gensamples) * bps);
gensamples += MIN (step_gensamples, outsamples - gensamples);
g_free (out);
}
self->nsamples_out += gensamples;
+
+ gst_buffer_unmap (outbuf, &map);
}
/* Set timestamp, offset, etc from the values we
GST_BUFFER_OFFSET_END (outbuf) = GST_BUFFER_OFFSET (outbuf) + outsamples;
}
- GST_DEBUG_OBJECT (self, "Pushing residue buffer of size %d with timestamp: %"
+ GST_DEBUG_OBJECT (self,
+ "Pushing residue buffer of size %" G_GSIZE_FORMAT " with timestamp: %"
GST_TIME_FORMAT ", duration: %" GST_TIME_FORMAT ", offset: %"
G_GUINT64_FORMAT ", offset_end: %" G_GUINT64_FORMAT ", nsamples_out: %d",
- GST_BUFFER_SIZE (outbuf), GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)),
+ gst_buffer_get_size (outbuf),
+ GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)),
GST_TIME_ARGS (GST_BUFFER_DURATION (outbuf)), GST_BUFFER_OFFSET (outbuf),
GST_BUFFER_OFFSET_END (outbuf), outsamples);
/* get notified of caps and plug in the correct process function */
static gboolean
gst_audio_fx_base_fir_filter_setup (GstAudioFilter * base,
- GstRingBufferSpec * format)
+ const GstAudioInfo * info)
{
GstAudioFXBaseFIRFilter *self = GST_AUDIO_FX_BASE_FIR_FILTER (base);
self->nsamples_in = 0;
}
- gst_audio_fx_base_fir_filter_select_process_function (self, format->width,
- format->channels);
+ gst_audio_fx_base_fir_filter_select_process_function (self,
+ GST_AUDIO_INFO_FORMAT (info), GST_AUDIO_INFO_CHANNELS (info));
return (self->process != NULL);
}
static gboolean
gst_audio_fx_base_fir_filter_transform_size (GstBaseTransform * base,
- GstPadDirection direction, GstCaps * caps, guint size, GstCaps * othercaps,
- guint * othersize)
+ GstPadDirection direction, GstCaps * caps, gsize size, GstCaps * othercaps,
+ gsize * othersize)
{
GstAudioFXBaseFIRFilter *self = GST_AUDIO_FX_BASE_FIR_FILTER (base);
guint blocklen;
- GstStructure *s;
- gint width, channels;
+ GstAudioInfo info;
+ gint bpf;
if (!self->fft || self->low_latency || direction == GST_PAD_SRC) {
*othersize = size;
return TRUE;
}
- s = gst_caps_get_structure (caps, 0);
- if (!gst_structure_get_int (s, "width", &width) ||
- !gst_structure_get_int (s, "channels", &channels))
+ if (!gst_audio_info_from_caps (&info, caps))
return FALSE;
- width /= 8;
-
- size /= width * channels;
+ bpf = GST_AUDIO_INFO_BPF (&info);
+ size /= bpf;
blocklen = self->block_length - self->kernel_length + 1;
*othersize = ((size + blocklen - 1) / blocklen) * blocklen;
-
- *othersize *= width * channels;
+ *othersize *= bpf;
return TRUE;
}
{
GstAudioFXBaseFIRFilter *self = GST_AUDIO_FX_BASE_FIR_FILTER (base);
GstClockTime timestamp, expected_timestamp;
- gint channels = GST_AUDIO_FILTER_CAST (self)->format.channels;
- gint rate = GST_AUDIO_FILTER_CAST (self)->format.rate;
- gint width = GST_AUDIO_FILTER_CAST (self)->format.width / 8;
- guint input_samples = (GST_BUFFER_SIZE (inbuf) / width) / channels;
- guint output_samples = (GST_BUFFER_SIZE (outbuf) / width) / channels;
+ gint channels = GST_AUDIO_FILTER_CHANNELS (self);
+ gint rate = GST_AUDIO_FILTER_RATE (self);
+ gint bps = GST_AUDIO_FILTER_BPS (self);
+ GstMapInfo inmap, outmap;
+ guint input_samples;
+ guint output_samples;
guint generated_samples;
guint64 output_offset;
gint64 diff = 0;
GST_TIME_ARGS (timestamp));
if (GST_CLOCK_TIME_IS_VALID (stream_time))
- gst_object_sync_values (G_OBJECT (self), stream_time);
+ gst_object_sync_values (GST_OBJECT (self), stream_time);
g_return_val_if_fail (self->kernel != NULL, GST_FLOW_ERROR);
g_return_val_if_fail (channels != 0, GST_FLOW_ERROR);
self->start_off = GST_BUFFER_OFFSET (inbuf);
}
+ gst_buffer_map (inbuf, &inmap, GST_MAP_READ);
+ gst_buffer_map (outbuf, &outmap, GST_MAP_WRITE);
+
+ input_samples = (inmap.size / bps) / channels;
+ output_samples = (outmap.size / bps) / channels;
+
self->nsamples_in += input_samples;
generated_samples =
- self->process (self, GST_BUFFER_DATA (inbuf), GST_BUFFER_DATA (outbuf),
- input_samples);
+ self->process (self, inmap.data, outmap.data, input_samples);
+
+ gst_buffer_unmap (inbuf, &inmap);
+ gst_buffer_unmap (outbuf, &outmap);
g_assert (generated_samples <= output_samples);
self->nsamples_out += generated_samples;
gint64 tmp = diff;
diff = generated_samples - diff;
generated_samples = tmp;
- GST_BUFFER_DATA (outbuf) += diff * width * channels;
}
- GST_BUFFER_SIZE (outbuf) = generated_samples * width * channels;
+ gst_buffer_resize (outbuf, diff * bps * channels,
+ generated_samples * bps * channels);
output_offset = self->nsamples_out - self->latency - generated_samples;
GST_BUFFER_TIMESTAMP (outbuf) =
GST_BUFFER_OFFSET_END (outbuf) = GST_BUFFER_OFFSET_NONE;
}
- GST_DEBUG_OBJECT (self, "Pushing buffer of size %d with timestamp: %"
+ GST_DEBUG_OBJECT (self,
+ "Pushing buffer of size %" G_GSIZE_FORMAT " with timestamp: %"
GST_TIME_FORMAT ", duration: %" GST_TIME_FORMAT ", offset: %"
G_GUINT64_FORMAT ", offset_end: %" G_GUINT64_FORMAT ", nsamples_out: %d",
- GST_BUFFER_SIZE (outbuf), GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)),
+ gst_buffer_get_size (outbuf),
+ GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)),
GST_TIME_ARGS (GST_BUFFER_DURATION (outbuf)), GST_BUFFER_OFFSET (outbuf),
GST_BUFFER_OFFSET_END (outbuf), generated_samples);
}
static gboolean
-gst_audio_fx_base_fir_filter_query (GstPad * pad, GstQuery * query)
+gst_audio_fx_base_fir_filter_query (GstPad * pad, GstObject * parent,
+ GstQuery * query)
{
- GstAudioFXBaseFIRFilter *self =
- GST_AUDIO_FX_BASE_FIR_FILTER (gst_pad_get_parent (pad));
+ GstAudioFXBaseFIRFilter *self = GST_AUDIO_FX_BASE_FIR_FILTER (parent);
gboolean res = TRUE;
switch (GST_QUERY_TYPE (query)) {
GstClockTime min, max;
gboolean live;
guint64 latency;
- GstPad *peer;
- gint rate = GST_AUDIO_FILTER (self)->format.rate;
+ gint rate = GST_AUDIO_FILTER_RATE (self);
if (rate == 0) {
res = FALSE;
- } else if ((peer = gst_pad_get_peer (GST_BASE_TRANSFORM (self)->sinkpad))) {
- if ((res = gst_pad_query (peer, query))) {
- gst_query_parse_latency (query, &live, &min, &max);
+ } else if ((res =
+ gst_pad_peer_query (GST_BASE_TRANSFORM (self)->sinkpad, query))) {
+ gst_query_parse_latency (query, &live, &min, &max);
- GST_DEBUG_OBJECT (self, "Peer latency: min %"
- GST_TIME_FORMAT " max %" GST_TIME_FORMAT,
- GST_TIME_ARGS (min), GST_TIME_ARGS (max));
+ GST_DEBUG_OBJECT (self, "Peer latency: min %"
+ GST_TIME_FORMAT " max %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (min), GST_TIME_ARGS (max));
- if (self->fft && !self->low_latency)
- latency = self->block_length - self->kernel_length + 1;
- else
- latency = self->latency;
+ if (self->fft && !self->low_latency)
+ latency = self->block_length - self->kernel_length + 1;
+ else
+ latency = self->latency;
- /* add our own latency */
- latency = gst_util_uint64_scale_round (latency, GST_SECOND, rate);
+ /* add our own latency */
+ latency = gst_util_uint64_scale_round (latency, GST_SECOND, rate);
- GST_DEBUG_OBJECT (self, "Our latency: %"
- GST_TIME_FORMAT, GST_TIME_ARGS (latency));
+ GST_DEBUG_OBJECT (self, "Our latency: %"
+ GST_TIME_FORMAT, GST_TIME_ARGS (latency));
- min += latency;
- if (max != GST_CLOCK_TIME_NONE)
- max += latency;
+ min += latency;
+ if (max != GST_CLOCK_TIME_NONE)
+ max += latency;
- GST_DEBUG_OBJECT (self, "Calculated total latency : min %"
- GST_TIME_FORMAT " max %" GST_TIME_FORMAT,
- GST_TIME_ARGS (min), GST_TIME_ARGS (max));
+ GST_DEBUG_OBJECT (self, "Calculated total latency : min %"
+ GST_TIME_FORMAT " max %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (min), GST_TIME_ARGS (max));
- gst_query_set_latency (query, live, min, max);
- }
- gst_object_unref (peer);
+ gst_query_set_latency (query, live, min, max);
}
break;
}
default:
- res = gst_pad_query_default (pad, query);
+ res = gst_pad_query_default (pad, parent, query);
break;
}
- gst_object_unref (self);
return res;
}
-static const GstQueryType *
-gst_audio_fx_base_fir_filter_query_type (GstPad * pad)
-{
- static const GstQueryType types[] = {
- GST_QUERY_LATENCY,
- 0
- };
-
- return types;
-}
-
static gboolean
-gst_audio_fx_base_fir_filter_event (GstBaseTransform * base, GstEvent * event)
+gst_audio_fx_base_fir_filter_sink_event (GstBaseTransform * base,
+ GstEvent * event)
{
GstAudioFXBaseFIRFilter *self = GST_AUDIO_FX_BASE_FIR_FILTER (base);
break;
}
- return GST_BASE_TRANSFORM_CLASS (parent_class)->event (base, event);
+ return GST_BASE_TRANSFORM_CLASS (parent_class)->sink_event (base, event);
}
void
gst_audio_fx_base_fir_filter_calculate_frequency_response (self);
gst_audio_fx_base_fir_filter_select_process_function (self,
- GST_AUDIO_FILTER_CAST (self)->format.width,
- GST_AUDIO_FILTER_CAST (self)->format.channels);
+ GST_AUDIO_FILTER_FORMAT (self), GST_AUDIO_FILTER_CHANNELS (self));
if (latency_changed) {
self->latency = latency;
#include "config.h"
#endif
+#include <string.h>
+
#include <gst/gst.h>
#include <gst/base/gstbasetransform.h>
#include <gst/audio/audio.h>
#include <gst/audio/gstaudiofilter.h>
-#include <gst/controller/gstcontroller.h>
#include <math.h>
GST_DEBUG_CATEGORY_STATIC (GST_CAT_DEFAULT);
#define ALLOWED_CAPS \
- "audio/x-raw-float," \
- " width = (int) { 32, 64 }, " \
- " endianness = (int) BYTE_ORDER," \
- " rate = (int) [ 1, MAX ]," \
- " channels = (int) [ 1, MAX ]"
-
-#define DEBUG_INIT(bla) \
- GST_DEBUG_CATEGORY_INIT (gst_audio_fx_base_iir_filter_debug, "audiofxbaseiirfilter", 0, "Audio IIR Filter Base Class");
+ "audio/x-raw," \
+ " format=(string){"GST_AUDIO_NE(F32)","GST_AUDIO_NE(F64)"}," \
+ " rate = (int) [ 1, MAX ]," \
+ " channels = (int) [ 1, MAX ]," \
+ " layout=(string) interleaved"
-GST_BOILERPLATE_FULL (GstAudioFXBaseIIRFilter,
- gst_audio_fx_base_iir_filter, GstAudioFilter, GST_TYPE_AUDIO_FILTER,
- DEBUG_INIT);
+#define gst_audio_fx_base_iir_filter_parent_class parent_class
+G_DEFINE_TYPE (GstAudioFXBaseIIRFilter,
+ gst_audio_fx_base_iir_filter, GST_TYPE_AUDIO_FILTER);
static gboolean gst_audio_fx_base_iir_filter_setup (GstAudioFilter * filter,
- GstRingBufferSpec * format);
+ const GstAudioInfo * info);
static GstFlowReturn
gst_audio_fx_base_iir_filter_transform_ip (GstBaseTransform * base,
GstBuffer * buf);
/* GObject vmethod implementations */
static void
-gst_audio_fx_base_iir_filter_base_init (gpointer klass)
-{
- GstCaps *caps;
-
- caps = gst_caps_from_string (ALLOWED_CAPS);
- gst_audio_filter_class_add_pad_templates (GST_AUDIO_FILTER_CLASS (klass),
- caps);
- gst_caps_unref (caps);
-}
-
-static void
gst_audio_fx_base_iir_filter_dispose (GObject * object)
{
GstAudioFXBaseIIRFilter *filter = GST_AUDIO_FX_BASE_IIR_FILTER (object);
GObjectClass *gobject_class = (GObjectClass *) klass;
GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass;
GstAudioFilterClass *filter_class = (GstAudioFilterClass *) klass;
+ GstCaps *caps;
+
+ GST_DEBUG_CATEGORY_INIT (gst_audio_fx_base_iir_filter_debug,
+ "audiofxbaseiirfilter", 0, "Audio IIR Filter Base Class");
gobject_class->dispose = gst_audio_fx_base_iir_filter_dispose;
+ caps = gst_caps_from_string (ALLOWED_CAPS);
+ gst_audio_filter_class_add_pad_templates (GST_AUDIO_FILTER_CLASS (klass),
+ caps);
+ gst_caps_unref (caps);
+
filter_class->setup = GST_DEBUG_FUNCPTR (gst_audio_fx_base_iir_filter_setup);
trans_class->transform_ip =
}
static void
-gst_audio_fx_base_iir_filter_init (GstAudioFXBaseIIRFilter * filter,
- GstAudioFXBaseIIRFilterClass * klass)
+gst_audio_fx_base_iir_filter_init (GstAudioFXBaseIIRFilter * filter)
{
gst_base_transform_set_in_place (GST_BASE_TRANSFORM (filter), TRUE);
}
/* Evaluate the transfer function that corresponds to the IIR
- * coefficients at zr + zi*I and return the magnitude */
+ * coefficients at (zr + zi*I)^-1 and return the magnitude */
gdouble
gst_audio_fx_base_iir_filter_calculate_gain (gdouble * a, guint na, gdouble * b,
guint nb, gdouble zr, gdouble zi)
gint i;
- sum_ar = 0.0;
+ sum_ar = a[na - 1];
sum_ai = 0.0;
- for (i = na - 1; i >= 0; i--) {
+ for (i = na - 2; i >= 0; i--) {
sum_r_old = sum_ar;
sum_i_old = sum_ai;
sum_ai = (sum_r_old * zi + sum_i_old * zr) + 0.0;
}
- sum_br = 0.0;
+ sum_br = b[nb - 1];
sum_bi = 0.0;
- for (i = nb - 1; i >= 0; i--) {
+ for (i = nb - 2; i >= 0; i--) {
sum_r_old = sum_br;
sum_i_old = sum_bi;
- sum_br = (sum_r_old * zr - sum_i_old * zi) - b[i];
- sum_bi = (sum_r_old * zi + sum_i_old * zr) - 0.0;
+ sum_br = (sum_r_old * zr - sum_i_old * zi) + b[i];
+ sum_bi = (sum_r_old * zi + sum_i_old * zr) + 0.0;
}
- sum_br += 1.0;
- sum_bi += 0.0;
gain_r =
- (sum_ar * sum_br + sum_ai * sum_bi) / (sum_br * sum_br + sum_bi * sum_bi);
+ (sum_br * sum_ar + sum_bi * sum_ai) / (sum_ar * sum_ar + sum_ai * sum_ai);
gain_i =
- (sum_ai * sum_br - sum_ar * sum_bi) / (sum_br * sum_br + sum_bi * sum_bi);
+ (sum_bi * sum_ar - sum_br * sum_ai) / (sum_ar * sum_ar + sum_ai * sum_ai);
return (sqrt (gain_r * gain_r + gain_i * gain_i));
}
if (free)
g_free (ctx->x);
else
- memset (ctx->x, 0, filter->na * sizeof (gdouble));
+ memset (ctx->x, 0, filter->nb * sizeof (gdouble));
if (free)
g_free (ctx->y);
else
- memset (ctx->y, 0, filter->nb * sizeof (gdouble));
+ memset (ctx->y, 0, filter->na * sizeof (gdouble));
}
g_free (filter->channels);
for (i = 0; i < filter->nchannels; i++) {
ctx = &filter->channels[i];
- ctx->x = g_new0 (gdouble, filter->na);
- ctx->y = g_new0 (gdouble, filter->nb);
+ ctx->x = g_new0 (gdouble, filter->nb);
+ ctx->y = g_new0 (gdouble, filter->na);
}
}
static gboolean
gst_audio_fx_base_iir_filter_setup (GstAudioFilter * base,
- GstRingBufferSpec * format)
+ const GstAudioInfo * info)
{
GstAudioFXBaseIIRFilter *filter = GST_AUDIO_FX_BASE_IIR_FILTER (base);
gboolean ret = TRUE;
+ gint channels;
+
+ switch (GST_AUDIO_INFO_FORMAT (info)) {
+ case GST_AUDIO_FORMAT_F32:
+ filter->process = (GstAudioFXBaseIIRFilterProcessFunc)
+ process_32;
+ break;
+ case GST_AUDIO_FORMAT_F64:
+ filter->process = (GstAudioFXBaseIIRFilterProcessFunc)
+ process_64;
+ break;
+ default:
+ ret = FALSE;
+ break;
+ }
- if (format->width == 32)
- filter->process = (GstAudioFXBaseIIRFilterProcessFunc)
- process_32;
- else if (format->width == 64)
- filter->process = (GstAudioFXBaseIIRFilterProcessFunc)
- process_64;
- else
- ret = FALSE;
+ channels = GST_AUDIO_INFO_CHANNELS (info);
- if (format->channels != filter->nchannels) {
+ if (channels != filter->nchannels) {
guint i;
GstAudioFXBaseIIRFilterChannelCtx *ctx;
if (filter->channels) {
-
for (i = 0; i < filter->nchannels; i++) {
ctx = &filter->channels[i];
g_free (ctx->x);
g_free (ctx->y);
}
-
g_free (filter->channels);
- filter->channels = NULL;
}
- filter->nchannels = format->channels;
-
- filter->channels =
- g_new0 (GstAudioFXBaseIIRFilterChannelCtx, filter->nchannels);
- for (i = 0; i < filter->nchannels; i++) {
+ filter->channels = g_new0 (GstAudioFXBaseIIRFilterChannelCtx, channels);
+ for (i = 0; i < channels; i++) {
ctx = &filter->channels[i];
- ctx->x = g_new0 (gdouble, filter->na);
- ctx->y = g_new0 (gdouble, filter->nb);
+ ctx->x = g_new0 (gdouble, filter->nb);
+ ctx->y = g_new0 (gdouble, filter->na);
}
+ filter->nchannels = channels;
}
return ret;
process (GstAudioFXBaseIIRFilter * filter,
GstAudioFXBaseIIRFilterChannelCtx * ctx, gdouble x0)
{
- gdouble val = filter->a[0] * x0;
+ gdouble val = filter->b[0] * x0;
gint i, j;
- for (i = 1, j = ctx->x_pos; i < filter->na; i++) {
- val += filter->a[i] * ctx->x[j];
+ for (i = 1, j = ctx->x_pos; i < filter->nb; i++) {
+ val += filter->b[i] * ctx->x[j];
j--;
if (j < 0)
- j = filter->na - 1;
+ j = filter->nb - 1;
}
- for (i = 1, j = ctx->y_pos; i < filter->nb; i++) {
- val += filter->b[i] * ctx->y[j];
+ for (i = 1, j = ctx->y_pos; i < filter->na; i++) {
+ val -= filter->a[i] * ctx->y[j];
j--;
if (j < 0)
- j = filter->nb - 1;
+ j = filter->na - 1;
}
+ val /= filter->a[0];
if (ctx->x) {
ctx->x_pos++;
- if (ctx->x_pos >= filter->na)
+ if (ctx->x_pos >= filter->nb)
ctx->x_pos = 0;
ctx->x[ctx->x_pos] = x0;
}
if (ctx->y) {
ctx->y_pos++;
- if (ctx->y_pos >= filter->nb)
+ if (ctx->y_pos >= filter->na)
ctx->y_pos = 0;
ctx->y[ctx->y_pos] = val;
process_##width (GstAudioFXBaseIIRFilter * filter, \
g##ctype * data, guint num_samples) \
{ \
- gint i, j, channels = GST_AUDIO_FILTER (filter)->format.channels; \
+ gint i, j, channels = filter->nchannels; \
gdouble val; \
\
for (i = 0; i < num_samples / channels; i++) { \
GstAudioFXBaseIIRFilter *filter = GST_AUDIO_FX_BASE_IIR_FILTER (base);
guint num_samples;
GstClockTime timestamp, stream_time;
+ GstMapInfo map;
timestamp = GST_BUFFER_TIMESTAMP (buf);
stream_time =
GST_TIME_ARGS (timestamp));
if (GST_CLOCK_TIME_IS_VALID (stream_time))
- gst_object_sync_values (G_OBJECT (filter), stream_time);
-
- num_samples =
- GST_BUFFER_SIZE (buf) / (GST_AUDIO_FILTER (filter)->format.width / 8);
+ gst_object_sync_values (GST_OBJECT (filter), stream_time);
if (gst_base_transform_is_passthrough (base))
return GST_FLOW_OK;
g_return_val_if_fail (filter->a != NULL, GST_FLOW_ERROR);
- filter->process (filter, GST_BUFFER_DATA (buf), num_samples);
+ gst_buffer_map (buf, &map, GST_MAP_READWRITE);
+ num_samples = map.size / GST_AUDIO_FILTER_BPS (filter);
+
+ filter->process (filter, map.data, num_samples);
+
+ gst_buffer_unmap (buf, &map);
return GST_FLOW_OK;
}
gst_audio_fx_base_iir_filter_stop (GstBaseTransform * base)
{
GstAudioFXBaseIIRFilter *filter = GST_AUDIO_FX_BASE_IIR_FILTER (base);
- guint channels = GST_AUDIO_FILTER (filter)->format.channels;
+ guint channels = filter->nchannels;
GstAudioFXBaseIIRFilterChannelCtx *ctx;
guint i;
g_free (filter->channels);
}
filter->channels = NULL;
+ filter->nchannels = 0;
return TRUE;
}
#include <math.h>
#include <gst/gst.h>
#include <gst/audio/gstaudiofilter.h>
-#include <gst/controller/gstcontroller.h>
#include "audioiirfilter.h"
static guint gst_audio_iir_filter_signals[LAST_SIGNAL] = { 0, };
-#define DEBUG_INIT(bla) \
- GST_DEBUG_CATEGORY_INIT (gst_audio_iir_filter_debug, "audioiirfilter", 0, \
- "Generic audio IIR filter plugin");
-
-GST_BOILERPLATE_FULL (GstAudioIIRFilter, gst_audio_iir_filter, GstAudioFilter,
- GST_TYPE_AUDIO_FX_BASE_IIR_FILTER, DEBUG_INIT);
+#define gst_audio_iir_filter_parent_class parent_class
+G_DEFINE_TYPE (GstAudioIIRFilter, gst_audio_iir_filter,
+ GST_TYPE_AUDIO_FX_BASE_IIR_FILTER);
static void gst_audio_iir_filter_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void gst_audio_iir_filter_finalize (GObject * object);
static gboolean gst_audio_iir_filter_setup (GstAudioFilter * base,
- GstRingBufferSpec * format);
-
-/* Element class */
-static void
-gst_audio_iir_filter_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_set_details_simple (element_class,
- "Audio IIR filter", "Filter/Effect/Audio",
- "Generic audio IIR filter with custom filter kernel",
- "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
-}
+ const GstAudioInfo * info);
static void
gst_audio_iir_filter_class_init (GstAudioIIRFilterClass * klass)
{
GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
GstAudioFilterClass *filter_class = (GstAudioFilterClass *) klass;
+ GST_DEBUG_CATEGORY_INIT (gst_audio_iir_filter_debug, "audioiirfilter", 0,
+ "Generic audio IIR filter plugin");
+
gobject_class->set_property = gst_audio_iir_filter_set_property;
gobject_class->get_property = gst_audio_iir_filter_get_property;
gobject_class->finalize = gst_audio_iir_filter_finalize;
g_object_class_install_property (gobject_class, PROP_A,
g_param_spec_value_array ("a", "A",
- "Filter coefficients (numerator of transfer function)",
+ "Filter coefficients (denominator of transfer function)",
g_param_spec_double ("Coefficient", "Filter Coefficient",
"Filter coefficient", -G_MAXDOUBLE, G_MAXDOUBLE, 0.0,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS),
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_B,
g_param_spec_value_array ("b", "B",
- "Filter coefficients (denominator of transfer function)",
+ "Filter coefficients (numerator of transfer function)",
g_param_spec_double ("Coefficient", "Filter Coefficient",
"Filter coefficient", -G_MAXDOUBLE, G_MAXDOUBLE, 0.0,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS),
g_signal_new ("rate-changed", G_TYPE_FROM_CLASS (klass),
G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstAudioIIRFilterClass, rate_changed),
NULL, NULL, gst_marshal_VOID__INT, G_TYPE_NONE, 1, G_TYPE_INT);
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "Audio IIR filter", "Filter/Effect/Audio",
+ "Generic audio IIR filter with custom filter kernel",
+ "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
}
static void
}
static void
-gst_audio_iir_filter_init (GstAudioIIRFilter * self,
- GstAudioIIRFilterClass * g_class)
+gst_audio_iir_filter_init (GstAudioIIRFilter * self)
{
GValue v = { 0, };
GValueArray *a, *b;
b = NULL;
gst_audio_iir_filter_update_coefficients (self, a, b);
- self->lock = g_mutex_new ();
+ g_mutex_init (&self->lock);
}
/* GstAudioFilter vmethod implementations */
/* get notified of caps and plug in the correct process function */
static gboolean
-gst_audio_iir_filter_setup (GstAudioFilter * base, GstRingBufferSpec * format)
+gst_audio_iir_filter_setup (GstAudioFilter * base, const GstAudioInfo * info)
{
GstAudioIIRFilter *self = GST_AUDIO_IIR_FILTER (base);
+ gint new_rate = GST_AUDIO_INFO_RATE (info);
- if (self->rate != format->rate) {
+ if (GST_AUDIO_FILTER_RATE (self) != new_rate) {
g_signal_emit (G_OBJECT (self),
- gst_audio_iir_filter_signals[SIGNAL_RATE_CHANGED], 0, format->rate);
- self->rate = format->rate;
+ gst_audio_iir_filter_signals[SIGNAL_RATE_CHANGED], 0, new_rate);
}
- return GST_AUDIO_FILTER_CLASS (parent_class)->setup (base, format);
+ return GST_AUDIO_FILTER_CLASS (parent_class)->setup (base, info);
}
static void
{
GstAudioIIRFilter *self = GST_AUDIO_IIR_FILTER (object);
- g_mutex_free (self->lock);
- self->lock = NULL;
+ g_mutex_clear (&self->lock);
if (self->a)
g_value_array_free (self->a);
switch (prop_id) {
case PROP_A:
- g_mutex_lock (self->lock);
+ g_mutex_lock (&self->lock);
gst_audio_iir_filter_update_coefficients (self, g_value_dup_boxed (value),
NULL);
- g_mutex_unlock (self->lock);
+ g_mutex_unlock (&self->lock);
break;
case PROP_B:
- g_mutex_lock (self->lock);
+ g_mutex_lock (&self->lock);
gst_audio_iir_filter_update_coefficients (self, NULL,
g_value_dup_boxed (value));
- g_mutex_unlock (self->lock);
+ g_mutex_unlock (&self->lock);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
GValueArray *a, *b;
/* < private > */
- GMutex *lock;
- gint rate;
+ GMutex lock;
};
struct _GstAudioIIRFilterClass {
#include <gst/base/gstbasetransform.h>
#include <gst/audio/audio.h>
#include <gst/audio/gstaudiofilter.h>
-#include <gst/controller/gstcontroller.h>
#include "audioinvert.h"
};
#define ALLOWED_CAPS \
- "audio/x-raw-int," \
- " depth=(int)16," \
- " width=(int)16," \
- " endianness=(int)BYTE_ORDER," \
- " signed=(bool)TRUE," \
- " rate=(int)[1,MAX]," \
- " channels=(int)[1,MAX]; " \
- "audio/x-raw-float," \
- " width=(int)32," \
- " endianness=(int)BYTE_ORDER," \
- " rate=(int)[1,MAX]," \
- " channels=(int)[1,MAX]"
-
-#define DEBUG_INIT(bla) \
- GST_DEBUG_CATEGORY_INIT (gst_audio_invert_debug, "audioinvert", 0, "audioinvert element");
-
-GST_BOILERPLATE_FULL (GstAudioInvert, gst_audio_invert, GstAudioFilter,
- GST_TYPE_AUDIO_FILTER, DEBUG_INIT);
+ "audio/x-raw," \
+ " format=(string) {"GST_AUDIO_NE(S16)","GST_AUDIO_NE(F32)"}," \
+ " rate=(int)[1,MAX]," \
+ " channels=(int)[1,MAX]," \
+ " layout=(string) {interleaved, non-interleaved}"
+
+G_DEFINE_TYPE (GstAudioInvert, gst_audio_invert, GST_TYPE_AUDIO_FILTER);
static void gst_audio_invert_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
GValue * value, GParamSpec * pspec);
static gboolean gst_audio_invert_setup (GstAudioFilter * filter,
- GstRingBufferSpec * format);
+ const GstAudioInfo * info);
static GstFlowReturn gst_audio_invert_transform_ip (GstBaseTransform * base,
GstBuffer * buf);
/* GObject vmethod implementations */
static void
-gst_audio_invert_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
- GstCaps *caps;
-
- gst_element_class_set_details_simple (element_class, "Audio inversion",
- "Filter/Effect/Audio",
- "Swaps upper and lower half of audio samples",
- "Sebastian Dröge <slomo@circular-chaos.org>");
-
- caps = gst_caps_from_string (ALLOWED_CAPS);
- gst_audio_filter_class_add_pad_templates (GST_AUDIO_FILTER_CLASS (klass),
- caps);
- gst_caps_unref (caps);
-}
-
-static void
gst_audio_invert_class_init (GstAudioInvertClass * klass)
{
GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstCaps *caps;
+
+ GST_DEBUG_CATEGORY_INIT (gst_audio_invert_debug, "audioinvert", 0,
+ "audioinvert element");
gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+
gobject_class->set_property = gst_audio_invert_set_property;
gobject_class->get_property = gst_audio_invert_get_property;
0.0,
G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
+ gst_element_class_set_details_simple (gstelement_class, "Audio inversion",
+ "Filter/Effect/Audio",
+ "Swaps upper and lower half of audio samples",
+ "Sebastian Dröge <slomo@circular-chaos.org>");
+
+ caps = gst_caps_from_string (ALLOWED_CAPS);
+ gst_audio_filter_class_add_pad_templates (GST_AUDIO_FILTER_CLASS (klass),
+ caps);
+ gst_caps_unref (caps);
+
GST_AUDIO_FILTER_CLASS (klass)->setup =
GST_DEBUG_FUNCPTR (gst_audio_invert_setup);
GST_BASE_TRANSFORM_CLASS (klass)->transform_ip =
}
static void
-gst_audio_invert_init (GstAudioInvert * filter, GstAudioInvertClass * klass)
+gst_audio_invert_init (GstAudioInvert * filter)
{
filter->degree = 0.0;
gst_base_transform_set_in_place (GST_BASE_TRANSFORM (filter), TRUE);
/* GstAudioFilter vmethod implementations */
static gboolean
-gst_audio_invert_setup (GstAudioFilter * base, GstRingBufferSpec * format)
+gst_audio_invert_setup (GstAudioFilter * base, const GstAudioInfo * info)
{
GstAudioInvert *filter = GST_AUDIO_INVERT (base);
gboolean ret = TRUE;
- if (format->type == GST_BUFTYPE_FLOAT && format->width == 32)
- filter->process = (GstAudioInvertProcessFunc)
- gst_audio_invert_transform_float;
- else if (format->type == GST_BUFTYPE_LINEAR && format->width == 16)
- filter->process = (GstAudioInvertProcessFunc)
- gst_audio_invert_transform_int;
- else
- ret = FALSE;
-
+ switch (GST_AUDIO_INFO_FORMAT (info)) {
+ case GST_AUDIO_FORMAT_S16:
+ filter->process = (GstAudioInvertProcessFunc)
+ gst_audio_invert_transform_int;
+ break;
+ case GST_AUDIO_FORMAT_F32:
+ filter->process = (GstAudioInvertProcessFunc)
+ gst_audio_invert_transform_float;
+ break;
+ default:
+ ret = FALSE;
+ break;
+ }
return ret;
}
GstAudioInvert *filter = GST_AUDIO_INVERT (base);
guint num_samples;
GstClockTime timestamp, stream_time;
+ GstMapInfo map;
timestamp = GST_BUFFER_TIMESTAMP (buf);
stream_time =
GST_TIME_ARGS (timestamp));
if (GST_CLOCK_TIME_IS_VALID (stream_time))
- gst_object_sync_values (G_OBJECT (filter), stream_time);
-
- num_samples =
- GST_BUFFER_SIZE (buf) / (GST_AUDIO_FILTER (filter)->format.width / 8);
+ gst_object_sync_values (GST_OBJECT (filter), stream_time);
if (gst_base_transform_is_passthrough (base) ||
G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_GAP)))
return GST_FLOW_OK;
- filter->process (filter, GST_BUFFER_DATA (buf), num_samples);
+ gst_buffer_map (buf, &map, GST_MAP_READWRITE);
+ num_samples = map.size / GST_AUDIO_FILTER_BPS (filter);
+
+ filter->process (filter, map.data, num_samples);
+
+ gst_buffer_unmap (buf, &map);
return GST_FLOW_OK;
}
#include <gst/base/gstbasetransform.h>
#include <gst/audio/audio.h>
#include <gst/audio/gstaudiofilter.h>
-#include <gst/controller/gstcontroller.h>
#include "audiokaraoke.h"
};
#define ALLOWED_CAPS \
- "audio/x-raw-int," \
- " depth=(int)16," \
- " width=(int)16," \
- " endianness=(int)BYTE_ORDER," \
- " signed=(bool)TRUE," \
- " rate=(int)[1,MAX]," \
- " channels=(int)[1,MAX]; " \
- "audio/x-raw-float," \
- " width=(int)32," \
- " endianness=(int)BYTE_ORDER," \
- " rate=(int)[1,MAX]," \
- " channels=(int)[1,MAX]"
-
-#define DEBUG_INIT(bla) \
- GST_DEBUG_CATEGORY_INIT (gst_audio_karaoke_debug, "audiokaraoke", 0, "audiokaraoke element");
-
-GST_BOILERPLATE_FULL (GstAudioKaraoke, gst_audio_karaoke, GstAudioFilter,
- GST_TYPE_AUDIO_FILTER, DEBUG_INIT);
+ "audio/x-raw," \
+ " format=(string){"GST_AUDIO_NE(S16)","GST_AUDIO_NE(F32)"}," \
+ " rate=(int)[1,MAX]," \
+ " channels=(int)2," \
+ " channel-mask=(bitmask)0x3," \
+ " layout=(string) interleaved"
+
+G_DEFINE_TYPE (GstAudioKaraoke, gst_audio_karaoke, GST_TYPE_AUDIO_FILTER);
static void gst_audio_karaoke_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
GValue * value, GParamSpec * pspec);
static gboolean gst_audio_karaoke_setup (GstAudioFilter * filter,
- GstRingBufferSpec * format);
+ const GstAudioInfo * info);
static GstFlowReturn gst_audio_karaoke_transform_ip (GstBaseTransform * base,
GstBuffer * buf);
/* GObject vmethod implementations */
static void
-gst_audio_karaoke_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
- GstCaps *caps;
-
- gst_element_class_set_details_simple (element_class, "AudioKaraoke",
- "Filter/Effect/Audio",
- "Removes voice from sound", "Wim Taymans <wim.taymans@gmail.com>");
-
- caps = gst_caps_from_string (ALLOWED_CAPS);
- gst_audio_filter_class_add_pad_templates (GST_AUDIO_FILTER_CLASS (klass),
- caps);
- gst_caps_unref (caps);
-}
-
-static void
gst_audio_karaoke_class_init (GstAudioKaraokeClass * klass)
{
GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstCaps *caps;
+
+ GST_DEBUG_CATEGORY_INIT (gst_audio_karaoke_debug, "audiokaraoke", 0,
+ "audiokaraoke element");
gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+
gobject_class->set_property = gst_audio_karaoke_set_property;
gobject_class->get_property = gst_audio_karaoke_get_property;
"The Frequency width of the filter", 0.0, 100.0, DEFAULT_FILTER_WIDTH,
G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
+ gst_element_class_set_details_simple (gstelement_class, "AudioKaraoke",
+ "Filter/Effect/Audio",
+ "Removes voice from sound", "Wim Taymans <wim.taymans@gmail.com>");
+
+ caps = gst_caps_from_string (ALLOWED_CAPS);
+ gst_audio_filter_class_add_pad_templates (GST_AUDIO_FILTER_CLASS (klass),
+ caps);
+ gst_caps_unref (caps);
+
GST_AUDIO_FILTER_CLASS (klass)->setup =
GST_DEBUG_FUNCPTR (gst_audio_karaoke_setup);
GST_BASE_TRANSFORM_CLASS (klass)->transform_ip =
}
static void
-gst_audio_karaoke_init (GstAudioKaraoke * filter, GstAudioKaraokeClass * klass)
+gst_audio_karaoke_init (GstAudioKaraoke * filter)
{
gst_base_transform_set_in_place (GST_BASE_TRANSFORM (filter), TRUE);
gst_base_transform_set_gap_aware (GST_BASE_TRANSFORM (filter), TRUE);
}
static void
-update_filter (GstAudioKaraoke * filter, gint rate)
+update_filter (GstAudioKaraoke * filter)
{
gfloat A, B, C;
+ gint rate;
+ rate = GST_AUDIO_FILTER_RATE (filter);
if (rate == 0)
return;
break;
case PROP_FILTER_BAND:
filter->filter_band = g_value_get_float (value);
- update_filter (filter, filter->rate);
+ update_filter (filter);
break;
case PROP_FILTER_WIDTH:
filter->filter_width = g_value_get_float (value);
- update_filter (filter, filter->rate);
+ update_filter (filter);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
/* GstAudioFilter vmethod implementations */
static gboolean
-gst_audio_karaoke_setup (GstAudioFilter * base, GstRingBufferSpec * format)
+gst_audio_karaoke_setup (GstAudioFilter * base, const GstAudioInfo * info)
{
GstAudioKaraoke *filter = GST_AUDIO_KARAOKE (base);
gboolean ret = TRUE;
- filter->channels = format->channels;
- filter->rate = format->rate;
-
- if (format->type == GST_BUFTYPE_FLOAT && format->width == 32)
- filter->process = (GstAudioKaraokeProcessFunc)
- gst_audio_karaoke_transform_float;
- else if (format->type == GST_BUFTYPE_LINEAR && format->width == 16)
- filter->process = (GstAudioKaraokeProcessFunc)
- gst_audio_karaoke_transform_int;
- else
- ret = FALSE;
-
- update_filter (filter, format->rate);
+ switch (GST_AUDIO_INFO_FORMAT (info)) {
+ case GST_AUDIO_FORMAT_S16:
+ filter->process = (GstAudioKaraokeProcessFunc)
+ gst_audio_karaoke_transform_int;
+ break;
+ case GST_AUDIO_FORMAT_F32:
+ filter->process = (GstAudioKaraokeProcessFunc)
+ gst_audio_karaoke_transform_float;
+ break;
+ default:
+ ret = FALSE;
+ break;
+ }
+ update_filter (filter);
return ret;
}
gdouble y;
gint level;
- channels = filter->channels;
+ channels = GST_AUDIO_FILTER_CHANNELS (filter);
level = filter->level * 256;
for (i = 0; i < num_samples; i += channels) {
gdouble l, r, o;
gdouble y;
- channels = filter->channels;
+ channels = GST_AUDIO_FILTER_CHANNELS (filter);
for (i = 0; i < num_samples; i += channels) {
/* get left and right inputs */
GstAudioKaraoke *filter = GST_AUDIO_KARAOKE (base);
guint num_samples;
GstClockTime timestamp, stream_time;
+ GstMapInfo map;
timestamp = GST_BUFFER_TIMESTAMP (buf);
stream_time =
GST_TIME_ARGS (timestamp));
if (GST_CLOCK_TIME_IS_VALID (stream_time))
- gst_object_sync_values (G_OBJECT (filter), stream_time);
-
- num_samples =
- GST_BUFFER_SIZE (buf) / (GST_AUDIO_FILTER (filter)->format.width / 8);
+ gst_object_sync_values (GST_OBJECT (filter), stream_time);
if (gst_base_transform_is_passthrough (base) ||
G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_GAP)))
return GST_FLOW_OK;
- filter->process (filter, GST_BUFFER_DATA (buf), num_samples);
+ gst_buffer_map (buf, &map, GST_MAP_READWRITE);
+ num_samples = map.size / GST_AUDIO_FILTER_BPS (filter);
+
+ filter->process (filter, map.data, num_samples);
+
+ gst_buffer_unmap (buf, &map);
return GST_FLOW_OK;
}
{
GstAudioFilter audiofilter;
- gint channels;
- gint rate;
-
/* properties */
gfloat level;
gfloat mono_level;
#include "config.h"
#endif
+#include <string.h>
+
#include <gst/gst.h>
#include <gst/base/gstbasetransform.h>
-#include <gst/controller/gstcontroller.h>
#include "audiopanorama.h"
static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("audio/x-raw-float, "
- "rate = (int) [ 1, MAX ], "
- "channels = (int) [ 1, 2 ], "
- "endianness = (int) BYTE_ORDER, " "width = (int) 32; "
- "audio/x-raw-int, "
- "rate = (int) [ 1, MAX ], "
- "channels = (int) [ 1, 2 ], "
- "endianness = (int) BYTE_ORDER, "
- "width = (int) 16, " "depth = (int) 16, " "signed = (boolean) true")
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "format = (string) { " GST_AUDIO_NE (S32) ", " GST_AUDIO_NE (S16) "}, "
+ "rate = (int) [ 1, MAX ], " "channels = (int) 1, "
+ "layout = (string) interleaved;"
+ "audio/x-raw, "
+ "format = (string) { " GST_AUDIO_NE (S32) ", " GST_AUDIO_NE (S16) "}, "
+ "rate = (int) [ 1, MAX ], " "channels = (int) 2, "
+ "layout = (string) interleaved, " "channel-mask = (bitmask) 0x3")
);
static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("audio/x-raw-float, "
- "rate = (int) [ 1, MAX ], "
- "channels = (int) 2, "
- "endianness = (int) BYTE_ORDER, " "width = (int) 32; "
- "audio/x-raw-int, "
- "rate = (int) [ 1, MAX ], "
- "channels = (int) 2, "
- "endianness = (int) BYTE_ORDER, "
- "width = (int) 16, " "depth = (int) 16, " "signed = (boolean) true")
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "format = (string) { " GST_AUDIO_NE (S32) ", " GST_AUDIO_NE (S16) "}, "
+ "rate = (int) [ 1, MAX ], " "channels = (int) 2, "
+ "layout = (string) interleaved, " "channel-mask = (bitmask)0x3")
);
-#define DEBUG_INIT(bla) \
- GST_DEBUG_CATEGORY_INIT (gst_audio_panorama_debug, "audiopanorama", 0, "audiopanorama element");
-
-GST_BOILERPLATE_FULL (GstAudioPanorama, gst_audio_panorama, GstBaseTransform,
- GST_TYPE_BASE_TRANSFORM, DEBUG_INIT);
+G_DEFINE_TYPE (GstAudioPanorama, gst_audio_panorama, GST_TYPE_BASE_TRANSFORM);
static void gst_audio_panorama_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
GValue * value, GParamSpec * pspec);
static gboolean gst_audio_panorama_get_unit_size (GstBaseTransform * base,
- GstCaps * caps, guint * size);
+ GstCaps * caps, gsize * size);
static GstCaps *gst_audio_panorama_transform_caps (GstBaseTransform * base,
- GstPadDirection direction, GstCaps * caps);
+ GstPadDirection direction, GstCaps * caps, GstCaps * filter);
static gboolean gst_audio_panorama_set_caps (GstBaseTransform * base,
GstCaps * incaps, GstCaps * outcaps);
/* GObject vmethod implementations */
static void
-gst_audio_panorama_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_static_pad_template (element_class, &src_template);
- gst_element_class_add_static_pad_template (element_class,
- &sink_template);
- gst_element_class_set_details_simple (element_class, "Stereo positioning",
- "Filter/Effect/Audio",
- "Positions audio streams in the stereo panorama",
- "Stefan Kost <ensonic@users.sf.net>");
-}
-
-static void
gst_audio_panorama_class_init (GstAudioPanoramaClass * klass)
{
GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+
+ GST_DEBUG_CATEGORY_INIT (gst_audio_panorama_debug, "audiopanorama", 0,
+ "audiopanorama element");
gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+
gobject_class->set_property = gst_audio_panorama_set_property;
gobject_class->get_property = gst_audio_panorama_get_property;
GST_TYPE_AUDIO_PANORAMA_METHOD, METHOD_PSYCHOACOUSTIC,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ gst_element_class_set_details_simple (gstelement_class, "Stereo positioning",
+ "Filter/Effect/Audio",
+ "Positions audio streams in the stereo panorama",
+ "Stefan Kost <ensonic@users.sf.net>");
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&sink_template));
+
GST_BASE_TRANSFORM_CLASS (klass)->get_unit_size =
GST_DEBUG_FUNCPTR (gst_audio_panorama_get_unit_size);
GST_BASE_TRANSFORM_CLASS (klass)->transform_caps =
}
static void
-gst_audio_panorama_init (GstAudioPanorama * filter,
- GstAudioPanoramaClass * klass)
+gst_audio_panorama_init (GstAudioPanorama * filter)
{
filter->panorama = 0;
filter->method = METHOD_PSYCHOACOUSTIC;
- filter->width = 0;
- filter->channels = 0;
- filter->format_float = FALSE;
+ gst_audio_info_init (&filter->info);
filter->process = NULL;
gst_base_transform_set_gap_aware (GST_BASE_TRANSFORM (filter), TRUE);
}
static gboolean
-gst_audio_panorama_set_process_function (GstAudioPanorama * filter)
+gst_audio_panorama_set_process_function (GstAudioPanorama * filter,
+ GstAudioInfo * info)
{
gint channel_index, format_index, method_index;
+ const GstAudioFormatInfo *finfo = info->finfo;
/* set processing function */
- channel_index = filter->channels - 1;
+ channel_index = GST_AUDIO_INFO_CHANNELS (info) - 1;
if (channel_index > 1 || channel_index < 0) {
filter->process = NULL;
return FALSE;
}
- format_index = (filter->format_float) ? 1 : 0;
+ format_index = GST_AUDIO_FORMAT_INFO_IS_FLOAT (finfo) ? 1 : 0;
method_index = filter->method;
if (method_index >= NUM_METHODS || method_index < 0)
break;
case PROP_METHOD:
filter->method = g_value_get_enum (value);
- gst_audio_panorama_set_process_function (filter);
+ gst_audio_panorama_set_process_function (filter, &filter->info);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
static gboolean
gst_audio_panorama_get_unit_size (GstBaseTransform * base, GstCaps * caps,
- guint * size)
+ gsize * size)
{
- gint width, channels;
- GstStructure *structure;
- gboolean ret;
+ GstAudioInfo info;
g_assert (size);
- /* this works for both float and int */
- structure = gst_caps_get_structure (caps, 0);
- ret = gst_structure_get_int (structure, "width", &width);
- ret &= gst_structure_get_int (structure, "channels", &channels);
+ if (!gst_audio_info_from_caps (&info, caps))
+ return FALSE;
- *size = width * channels / 8;
+ *size = GST_AUDIO_INFO_BPF (&info);
- return ret;
+ return TRUE;
}
static GstCaps *
gst_audio_panorama_transform_caps (GstBaseTransform * base,
- GstPadDirection direction, GstCaps * caps)
+ GstPadDirection direction, GstCaps * caps, GstCaps * filter)
{
GstCaps *res;
GstStructure *structure;
GstCaps * outcaps)
{
GstAudioPanorama *filter = GST_AUDIO_PANORAMA (base);
- const GstStructure *structure;
- gboolean ret;
- gint width;
- const gchar *fmt;
+ GstAudioInfo info;
/*GST_INFO ("incaps are %" GST_PTR_FORMAT, incaps); */
+ if (!gst_audio_info_from_caps (&info, incaps))
+ goto no_format;
- structure = gst_caps_get_structure (incaps, 0);
- ret = gst_structure_get_int (structure, "channels", &filter->channels);
- if (!ret)
- goto no_channels;
-
- ret = gst_structure_get_int (structure, "width", &width);
- if (!ret)
- goto no_width;
- filter->width = width / 8;
-
- fmt = gst_structure_get_name (structure);
- if (!strcmp (fmt, "audio/x-raw-int"))
- filter->format_float = FALSE;
- else
- filter->format_float = TRUE;
-
- GST_DEBUG ("try to process %s input with %d channels", fmt, filter->channels);
+ GST_DEBUG ("try to process %d input with %d channels",
+ GST_AUDIO_INFO_FORMAT (&info), GST_AUDIO_INFO_CHANNELS (&info));
- ret = gst_audio_panorama_set_process_function (filter);
+ if (!gst_audio_panorama_set_process_function (filter, &info))
+ goto no_format;
- if (!ret)
- GST_WARNING ("can't process input with %d channels", filter->channels);
+ filter->info = info;
- return ret;
+ return TRUE;
-no_channels:
- GST_DEBUG ("no channels in caps");
- return ret;
-no_width:
- GST_DEBUG ("no width in caps");
- return ret;
+no_format:
+ {
+ GST_DEBUG ("invalid caps");
+ return FALSE;
+ }
}
/* psychoacoustic processing functions */
GstBuffer * outbuf)
{
GstAudioPanorama *filter = GST_AUDIO_PANORAMA (base);
- guint num_samples = GST_BUFFER_SIZE (outbuf) / (2 * filter->width);
GstClockTime timestamp, stream_time;
+ GstMapInfo inmap, outmap;
timestamp = GST_BUFFER_TIMESTAMP (inbuf);
stream_time =
GST_TIME_ARGS (timestamp));
if (GST_CLOCK_TIME_IS_VALID (stream_time))
- gst_object_sync_values (G_OBJECT (filter), stream_time);
+ gst_object_sync_values (GST_OBJECT (filter), stream_time);
+
+ gst_buffer_map (inbuf, &inmap, GST_MAP_READ);
+ gst_buffer_map (outbuf, &outmap, GST_MAP_WRITE);
if (G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (inbuf, GST_BUFFER_FLAG_GAP))) {
GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_GAP);
- memset (GST_BUFFER_DATA (outbuf), 0, GST_BUFFER_SIZE (outbuf));
- return GST_FLOW_OK;
+ memset (outmap.data, 0, outmap.size);
+ } else {
+ guint num_samples = outmap.size / GST_AUDIO_INFO_BPF (&filter->info);
+
+ filter->process (filter, inmap.data, outmap.data, num_samples);
}
- filter->process (filter, GST_BUFFER_DATA (inbuf),
- GST_BUFFER_DATA (outbuf), num_samples);
+ gst_buffer_unmap (inbuf, &inmap);
+ gst_buffer_unmap (outbuf, &outmap);
return GST_FLOW_OK;
}
#define __GST_AUDIO_PANORAMA_H__
#include <gst/gst.h>
+#include <gst/audio/audio.h>
#include <gst/base/gstbasetransform.h>
G_BEGIN_DECLS
GstBaseTransform element;
gfloat panorama;
-
+
/* < private > */
GstAudioPanoramaProcessFunc process;
- gint channels;
- gboolean format_float;
- gint width;
+
+ GstAudioInfo info;
gint method;
};
#include <math.h>
#include <gst/gst.h>
#include <gst/audio/gstaudiofilter.h>
-#include <gst/controller/gstcontroller.h>
#include "audiowsincband.h"
return gtype;
}
-#define DEBUG_INIT(bla) \
- GST_DEBUG_CATEGORY_INIT (gst_gst_audio_wsincband_debug, "audiowsincband", 0, \
- "Band-pass and Band-reject Windowed sinc filter plugin");
-
-GST_BOILERPLATE_FULL (GstAudioWSincBand, gst_audio_wsincband, GstAudioFilter,
- GST_TYPE_AUDIO_FX_BASE_FIR_FILTER, DEBUG_INIT);
+#define gst_audio_wsincband_parent_class parent_class
+G_DEFINE_TYPE (GstAudioWSincBand, gst_audio_wsincband,
+ GST_TYPE_AUDIO_FX_BASE_FIR_FILTER);
static void gst_audio_wsincband_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void gst_audio_wsincband_finalize (GObject * object);
static gboolean gst_audio_wsincband_setup (GstAudioFilter * base,
- GstRingBufferSpec * format);
-
+ const GstAudioInfo * info);
#define POW2(x) (x)*(x)
-/* Element class */
-static void
-gst_audio_wsincband_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_set_details_simple (element_class,
- "Band pass & band reject filter", "Filter/Effect/Audio",
- "Band pass and band reject windowed sinc filter",
- "Thomas Vander Stichele <thomas at apestaart dot org>, "
- "Steven W. Smith, "
- "Dreamlab Technologies Ltd. <mathis.hofer@dreamlab.net>, "
- "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
-}
-
static void
gst_audio_wsincband_class_init (GstAudioWSincBandClass * klass)
{
GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
GstAudioFilterClass *filter_class = (GstAudioFilterClass *) klass;
+ GST_DEBUG_CATEGORY_INIT (gst_gst_audio_wsincband_debug, "audiowsincband", 0,
+ "Band-pass and Band-reject Windowed sinc filter plugin");
+
gobject_class->set_property = gst_audio_wsincband_set_property;
gobject_class->get_property = gst_audio_wsincband_get_property;
gobject_class->finalize = gst_audio_wsincband_finalize;
WINDOW_HAMMING,
G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
+ gst_element_class_set_details_simple (gstelement_class,
+ "Band pass & band reject filter", "Filter/Effect/Audio",
+ "Band pass and band reject windowed sinc filter",
+ "Thomas Vander Stichele <thomas at apestaart dot org>, "
+ "Steven W. Smith, "
+ "Dreamlab Technologies Ltd. <mathis.hofer@dreamlab.net>, "
+ "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
+
filter_class->setup = GST_DEBUG_FUNCPTR (gst_audio_wsincband_setup);
}
static void
-gst_audio_wsincband_init (GstAudioWSincBand * self,
- GstAudioWSincBandClass * g_class)
+gst_audio_wsincband_init (GstAudioWSincBand * self)
{
self->kernel_length = 101;
self->lower_frequency = 0.0;
self->mode = MODE_BAND_PASS;
self->window = WINDOW_HAMMING;
- self->lock = g_mutex_new ();
+ g_mutex_init (&self->lock);
}
static void
gdouble *kernel_lp, *kernel_hp;
gdouble w;
gdouble *kernel;
+ gint rate, channels;
len = self->kernel_length;
- if (GST_AUDIO_FILTER (self)->format.rate == 0) {
+ rate = GST_AUDIO_FILTER_RATE (self);
+ channels = GST_AUDIO_FILTER_CHANNELS (self);
+
+ if (rate == 0) {
GST_DEBUG ("rate not set yet");
return;
}
- if (GST_AUDIO_FILTER (self)->format.channels == 0) {
+ if (channels == 0) {
GST_DEBUG ("channels not set yet");
return;
}
/* Clamp frequencies */
- self->lower_frequency =
- CLAMP (self->lower_frequency, 0.0,
- GST_AUDIO_FILTER (self)->format.rate / 2);
- self->upper_frequency =
- CLAMP (self->upper_frequency, 0.0,
- GST_AUDIO_FILTER (self)->format.rate / 2);
+ self->lower_frequency = CLAMP (self->lower_frequency, 0.0, rate / 2);
+ self->upper_frequency = CLAMP (self->upper_frequency, 0.0, rate / 2);
+
if (self->lower_frequency > self->upper_frequency) {
gint tmp = self->lower_frequency;
(self->mode == MODE_BAND_PASS) ? "band-pass" : "band-reject");
/* fill the lp kernel */
- w = 2 * G_PI * (self->lower_frequency / GST_AUDIO_FILTER (self)->format.rate);
+ w = 2 * G_PI * (self->lower_frequency / rate);
kernel_lp = g_new (gdouble, len);
for (i = 0; i < len; ++i) {
if (i == (len - 1) / 2.0)
kernel_lp[i] /= sum;
/* fill the hp kernel */
- w = 2 * G_PI * (self->upper_frequency / GST_AUDIO_FILTER (self)->format.rate);
+ w = 2 * G_PI * (self->upper_frequency / rate);
kernel_hp = g_new (gdouble, len);
for (i = 0; i < len; ++i) {
if (i == (len - 1) / 2.0)
/* get notified of caps and plug in the correct process function */
static gboolean
-gst_audio_wsincband_setup (GstAudioFilter * base, GstRingBufferSpec * format)
+gst_audio_wsincband_setup (GstAudioFilter * base, const GstAudioInfo * info)
{
GstAudioWSincBand *self = GST_AUDIO_WSINC_BAND (base);
gst_audio_wsincband_build_kernel (self);
- return GST_AUDIO_FILTER_CLASS (parent_class)->setup (base, format);
+ return GST_AUDIO_FILTER_CLASS (parent_class)->setup (base, info);
}
static void
{
GstAudioWSincBand *self = GST_AUDIO_WSINC_BAND (object);
- g_mutex_free (self->lock);
- self->lock = NULL;
+ g_mutex_clear (&self->lock);
G_OBJECT_CLASS (parent_class)->finalize (object);
}
case PROP_LENGTH:{
gint val;
- g_mutex_lock (self->lock);
+ g_mutex_lock (&self->lock);
val = g_value_get_int (value);
if (val % 2 == 0)
val++;
self->kernel_length = val;
gst_audio_wsincband_build_kernel (self);
}
- g_mutex_unlock (self->lock);
+ g_mutex_unlock (&self->lock);
break;
}
case PROP_LOWER_FREQUENCY:
- g_mutex_lock (self->lock);
+ g_mutex_lock (&self->lock);
self->lower_frequency = g_value_get_float (value);
gst_audio_wsincband_build_kernel (self);
- g_mutex_unlock (self->lock);
+ g_mutex_unlock (&self->lock);
break;
case PROP_UPPER_FREQUENCY:
- g_mutex_lock (self->lock);
+ g_mutex_lock (&self->lock);
self->upper_frequency = g_value_get_float (value);
gst_audio_wsincband_build_kernel (self);
- g_mutex_unlock (self->lock);
+ g_mutex_unlock (&self->lock);
break;
case PROP_MODE:
- g_mutex_lock (self->lock);
+ g_mutex_lock (&self->lock);
self->mode = g_value_get_enum (value);
gst_audio_wsincband_build_kernel (self);
- g_mutex_unlock (self->lock);
+ g_mutex_unlock (&self->lock);
break;
case PROP_WINDOW:
- g_mutex_lock (self->lock);
+ g_mutex_lock (&self->lock);
self->window = g_value_get_enum (value);
gst_audio_wsincband_build_kernel (self);
- g_mutex_unlock (self->lock);
+ g_mutex_unlock (&self->lock);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
/* -*- c-basic-offset: 2 -*-
- *
+ *
* GStreamer
* Copyright (C) 1999-2001 Erik Walthinsen <omega@cse.ogi.edu>
* 2006 Dreamlab Technologies Ltd. <mathis.hofer@dreamlab.net>
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
- *
- *
+ *
+ *
* this windowed sinc filter is taken from the freely downloadable DSP book,
* "The Scientist and Engineer's Guide to Digital Signal Processing",
* chapter 16
gint kernel_length; /* length of the filter kernel */
/* < private > */
- GMutex *lock;
+ GMutex lock;
};
struct _GstAudioWSincBandClass {
#include <math.h>
#include <gst/gst.h>
#include <gst/audio/gstaudiofilter.h>
-#include <gst/controller/gstcontroller.h>
#include "audiowsinclimit.h"
return gtype;
}
-#define DEBUG_INIT(bla) \
- GST_DEBUG_CATEGORY_INIT (gst_audio_wsinclimit_debug, "audiowsinclimit", 0, \
- "Low-pass and High-pass Windowed sinc filter plugin");
-
-GST_BOILERPLATE_FULL (GstAudioWSincLimit, gst_audio_wsinclimit, GstAudioFilter,
- GST_TYPE_AUDIO_FX_BASE_FIR_FILTER, DEBUG_INIT);
+#define gst_audio_wsinclimit_parent_class parent_class
+G_DEFINE_TYPE (GstAudioWSincLimit, gst_audio_wsinclimit,
+ GST_TYPE_AUDIO_FX_BASE_FIR_FILTER);
static void gst_audio_wsinclimit_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void gst_audio_wsinclimit_finalize (GObject * object);
static gboolean gst_audio_wsinclimit_setup (GstAudioFilter * base,
- GstRingBufferSpec * format);
+ const GstAudioInfo * info);
#define POW2(x) (x)*(x)
-/* Element class */
-
-static void
-gst_audio_wsinclimit_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_set_details_simple (element_class,
- "Low pass & high pass filter", "Filter/Effect/Audio",
- "Low pass and high pass windowed sinc filter",
- "Thomas Vander Stichele <thomas at apestaart dot org>, "
- "Steven W. Smith, "
- "Dreamlab Technologies Ltd. <mathis.hofer@dreamlab.net>, "
- "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
-}
-
static void
gst_audio_wsinclimit_class_init (GstAudioWSincLimitClass * klass)
{
GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
GstAudioFilterClass *filter_class = (GstAudioFilterClass *) klass;
+ GST_DEBUG_CATEGORY_INIT (gst_audio_wsinclimit_debug, "audiowsinclimit", 0,
+ "Low-pass and High-pass Windowed sinc filter plugin");
+
gobject_class->set_property = gst_audio_wsinclimit_set_property;
gobject_class->get_property = gst_audio_wsinclimit_get_property;
gobject_class->finalize = gst_audio_wsinclimit_finalize;
WINDOW_HAMMING,
G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
+ gst_element_class_set_details_simple (gstelement_class,
+ "Low pass & high pass filter", "Filter/Effect/Audio",
+ "Low pass and high pass windowed sinc filter",
+ "Thomas Vander Stichele <thomas at apestaart dot org>, "
+ "Steven W. Smith, "
+ "Dreamlab Technologies Ltd. <mathis.hofer@dreamlab.net>, "
+ "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
+
filter_class->setup = GST_DEBUG_FUNCPTR (gst_audio_wsinclimit_setup);
}
static void
-gst_audio_wsinclimit_init (GstAudioWSincLimit * self,
- GstAudioWSincLimitClass * g_class)
+gst_audio_wsinclimit_init (GstAudioWSincLimit * self)
{
self->mode = MODE_LOW_PASS;
self->window = WINDOW_HAMMING;
self->kernel_length = 101;
self->cutoff = 0.0;
- self->lock = g_mutex_new ();
+ g_mutex_init (&self->lock);
}
static void
gint len = 0;
gdouble w;
gdouble *kernel = NULL;
+ gint rate, channels;
len = self->kernel_length;
- if (GST_AUDIO_FILTER (self)->format.rate == 0) {
+ rate = GST_AUDIO_FILTER_RATE (self);
+ channels = GST_AUDIO_FILTER_CHANNELS (self);
+
+ if (rate == 0) {
GST_DEBUG ("rate not set yet");
return;
}
- if (GST_AUDIO_FILTER (self)->format.channels == 0) {
+ if (channels == 0) {
GST_DEBUG ("channels not set yet");
return;
}
/* Clamp cutoff frequency between 0 and the nyquist frequency */
- self->cutoff =
- CLAMP (self->cutoff, 0.0, GST_AUDIO_FILTER (self)->format.rate / 2);
+ self->cutoff = CLAMP (self->cutoff, 0.0, rate / 2);
GST_DEBUG ("gst_audio_wsinclimit_: initializing filter kernel of length %d "
"with cutoff %.2lf Hz "
(self->mode == MODE_LOW_PASS) ? "low-pass" : "high-pass");
/* fill the kernel */
- w = 2 * G_PI * (self->cutoff / GST_AUDIO_FILTER (self)->format.rate);
+ w = 2 * G_PI * (self->cutoff / rate);
kernel = g_new (gdouble, len);
/* get notified of caps and plug in the correct process function */
static gboolean
-gst_audio_wsinclimit_setup (GstAudioFilter * base, GstRingBufferSpec * format)
+gst_audio_wsinclimit_setup (GstAudioFilter * base, const GstAudioInfo * info)
{
GstAudioWSincLimit *self = GST_AUDIO_WSINC_LIMIT (base);
gst_audio_wsinclimit_build_kernel (self);
- return GST_AUDIO_FILTER_CLASS (parent_class)->setup (base, format);
+ return GST_AUDIO_FILTER_CLASS (parent_class)->setup (base, info);
}
static void
{
GstAudioWSincLimit *self = GST_AUDIO_WSINC_LIMIT (object);
- g_mutex_free (self->lock);
- self->lock = NULL;
+ g_mutex_clear (&self->lock);
G_OBJECT_CLASS (parent_class)->finalize (object);
}
case PROP_LENGTH:{
gint val;
- g_mutex_lock (self->lock);
+ g_mutex_lock (&self->lock);
val = g_value_get_int (value);
if (val % 2 == 0)
val++;
self->kernel_length = val;
gst_audio_wsinclimit_build_kernel (self);
}
- g_mutex_unlock (self->lock);
+ g_mutex_unlock (&self->lock);
break;
}
case PROP_FREQUENCY:
- g_mutex_lock (self->lock);
+ g_mutex_lock (&self->lock);
self->cutoff = g_value_get_float (value);
gst_audio_wsinclimit_build_kernel (self);
- g_mutex_unlock (self->lock);
+ g_mutex_unlock (&self->lock);
break;
case PROP_MODE:
- g_mutex_lock (self->lock);
+ g_mutex_lock (&self->lock);
self->mode = g_value_get_enum (value);
gst_audio_wsinclimit_build_kernel (self);
- g_mutex_unlock (self->lock);
+ g_mutex_unlock (&self->lock);
break;
case PROP_WINDOW:
- g_mutex_lock (self->lock);
+ g_mutex_lock (&self->lock);
self->window = g_value_get_enum (value);
gst_audio_wsinclimit_build_kernel (self);
- g_mutex_unlock (self->lock);
+ g_mutex_unlock (&self->lock);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
gint kernel_length;
/* < private > */
- GMutex *lock;
+ GMutex lock;
};
struct _GstAudioWSincLimitClass {
static gboolean gst_aac_parse_sink_setcaps (GstBaseParse * parse,
GstCaps * caps);
-static GstCaps *gst_aac_parse_sink_getcaps (GstBaseParse * parse);
+static GstCaps *gst_aac_parse_sink_getcaps (GstBaseParse * parse,
+ GstCaps * filter);
-static gboolean gst_aac_parse_check_valid_frame (GstBaseParse * parse,
- GstBaseParseFrame * frame, guint * size, gint * skipsize);
+static GstFlowReturn gst_aac_parse_handle_frame (GstBaseParse * parse,
+ GstBaseParseFrame * frame, gint * skipsize);
-static GstFlowReturn gst_aac_parse_parse_frame (GstBaseParse * parse,
- GstBaseParseFrame * frame);
-
-#define _do_init(bla) \
- GST_DEBUG_CATEGORY_INIT (aacparse_debug, "aacparse", 0, \
- "AAC audio stream parser");
-
-GST_BOILERPLATE_FULL (GstAacParse, gst_aac_parse, GstBaseParse,
- GST_TYPE_BASE_PARSE, _do_init);
+G_DEFINE_TYPE (GstAacParse, gst_aac_parse, GST_TYPE_BASE_PARSE);
static inline gint
gst_aac_parse_get_sample_rate_from_index (guint sr_idx)
}
/**
- * gst_aac_parse_base_init:
- * @klass: #GstElementClass.
+ * gst_aac_parse_class_init:
+ * @klass: #GstAacParseClass.
*
*/
static void
-gst_aac_parse_base_init (gpointer klass)
+gst_aac_parse_class_init (GstAacParseClass * klass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstBaseParseClass *parse_class = GST_BASE_PARSE_CLASS (klass);
+
+ GST_DEBUG_CATEGORY_INIT (aacparse_debug, "aacparse", 0,
+ "AAC audio stream parser");
- gst_element_class_add_static_pad_template (element_class,
- &sink_template);
- gst_element_class_add_static_pad_template (element_class, &src_template);
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&sink_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&src_template));
gst_element_class_set_details_simple (element_class,
"AAC audio stream parser", "Codec/Parser/Audio",
"Advanced Audio Coding parser", "Stefan Kost <stefan.kost@nokia.com>");
-}
-
-
-/**
- * gst_aac_parse_class_init:
- * @klass: #GstAacParseClass.
- *
- */
-static void
-gst_aac_parse_class_init (GstAacParseClass * klass)
-{
- GstBaseParseClass *parse_class = GST_BASE_PARSE_CLASS (klass);
parse_class->start = GST_DEBUG_FUNCPTR (gst_aac_parse_start);
parse_class->stop = GST_DEBUG_FUNCPTR (gst_aac_parse_stop);
parse_class->set_sink_caps = GST_DEBUG_FUNCPTR (gst_aac_parse_sink_setcaps);
parse_class->get_sink_caps = GST_DEBUG_FUNCPTR (gst_aac_parse_sink_getcaps);
- parse_class->parse_frame = GST_DEBUG_FUNCPTR (gst_aac_parse_parse_frame);
- parse_class->check_valid_frame =
- GST_DEBUG_FUNCPTR (gst_aac_parse_check_valid_frame);
+ parse_class->handle_frame = GST_DEBUG_FUNCPTR (gst_aac_parse_handle_frame);
}
*
*/
static void
-gst_aac_parse_init (GstAacParse * aacparse, GstAacParseClass * klass)
+gst_aac_parse_init (GstAacParse * aacparse)
{
GST_DEBUG ("initialized");
}
if (sink_caps)
src_caps = gst_caps_copy (sink_caps);
else
- src_caps = gst_caps_new_simple ("audio/mpeg", NULL);
+ src_caps = gst_caps_new_empty_simple ("audio/mpeg");
gst_caps_set_simple (src_caps, "framed", G_TYPE_BOOLEAN, TRUE,
"mpegversion", G_TYPE_INT, aacparse->mpegversion, NULL);
GstBuffer *buf = gst_value_get_buffer (value);
if (buf) {
- const guint8 *buffer = GST_BUFFER_DATA (buf);
+ GstMapInfo map;
guint sr_idx;
- sr_idx = ((buffer[0] & 0x07) << 1) | ((buffer[1] & 0x80) >> 7);
- aacparse->object_type = (buffer[0] & 0xf8) >> 3;
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+
+ sr_idx = ((map.data[0] & 0x07) << 1) | ((map.data[1] & 0x80) >> 7);
+ aacparse->object_type = (map.data[0] & 0xf8) >> 3;
aacparse->sample_rate = gst_aac_parse_get_sample_rate_from_index (sr_idx);
- aacparse->channels = (buffer[1] & 0x78) >> 3;
+ aacparse->channels = (map.data[1] & 0x78) >> 3;
aacparse->header_type = DSPAAC_HEADER_NONE;
aacparse->mpegversion = 4;
- aacparse->frame_samples = (buffer[1] & 4) ? 960 : 1024;
+ aacparse->frame_samples = (map.data[1] & 4) ? 960 : 1024;
+ gst_buffer_unmap (buf, &map);
GST_DEBUG ("codec_data: object_type=%d, sample_rate=%d, channels=%d, "
"samples=%d", aacparse->object_type, aacparse->sample_rate,
int skip_size = 0;
int bitstream_type;
int sr_idx;
+ GstCaps *sinkcaps;
aacparse->header_type = DSPAAC_HEADER_ADIF;
aacparse->mpegversion = 4;
gst_base_parse_set_min_frame_size (GST_BASE_PARSE (aacparse), 512);
/* arrange for metadata and get out of the way */
- gst_aac_parse_set_src_caps (aacparse,
- GST_PAD_CAPS (GST_BASE_PARSE_SINK_PAD (aacparse)));
+ sinkcaps = gst_pad_get_current_caps (GST_BASE_PARSE_SINK_PAD (aacparse));
+ gst_aac_parse_set_src_caps (aacparse, sinkcaps);
+ if (sinkcaps)
+ gst_caps_unref (sinkcaps);
/* not syncable, not easily seekable (unless we push data from start */
gst_base_parse_set_syncable (GST_BASE_PARSE_CAST (aacparse), FALSE);
/**
* gst_aac_parse_check_valid_frame:
* @parse: #GstBaseParse.
- * @buffer: #GstBuffer.
- * @framesize: If the buffer contains a valid frame, its size will be put here
+ * @frame: #GstBaseParseFrame.
* @skipsize: How much data parent class should skip in order to find the
* frame header.
*
- * Implementation of "check_valid_frame" vmethod in #GstBaseParse class.
+ * Implementation of "handle_frame" vmethod in #GstBaseParse class.
+ *
+ * Also determines frame overhead.
+ * ADTS streams have a 7 byte header in each frame. MP4 and ADIF streams don't have
+ * a per-frame header. LOAS has 3 bytes.
+ *
+ * We're making a couple of simplifying assumptions:
+ *
+ * 1. We count Program Configuration Elements rather than searching for them
+ * in the streams to discount them - the overhead is negligible.
*
- * Returns: TRUE if buffer contains a valid frame.
+ * 2. We ignore CRC. This has a worst-case impact of (num_raw_blocks + 1)*16
+ * bits, which should still not be significant enough to warrant the
+ * additional parsing through the headers
+ *
+ * Returns: a #GstFlowReturn.
*/
-static gboolean
-gst_aac_parse_check_valid_frame (GstBaseParse * parse,
- GstBaseParseFrame * frame, guint * framesize, gint * skipsize)
+static GstFlowReturn
+gst_aac_parse_handle_frame (GstBaseParse * parse,
+ GstBaseParseFrame * frame, gint * skipsize)
{
- const guint8 *data;
+ GstMapInfo map;
GstAacParse *aacparse;
gboolean ret = FALSE;
gboolean lost_sync;
GstBuffer *buffer;
+ guint framesize;
+ gint rate, channels;
aacparse = GST_AAC_PARSE (parse);
buffer = frame->buffer;
- data = GST_BUFFER_DATA (buffer);
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+
+ *skipsize = -1;
lost_sync = GST_BASE_PARSE_LOST_SYNC (parse);
if (aacparse->header_type == DSPAAC_HEADER_ADIF ||
aacparse->header_type == DSPAAC_HEADER_NONE) {
/* There is nothing to parse */
- *framesize = GST_BUFFER_SIZE (buffer);
+ framesize = map.size;
ret = TRUE;
} else if (aacparse->header_type == DSPAAC_HEADER_NOT_PARSED || lost_sync) {
- ret = gst_aac_parse_detect_stream (aacparse, data, GST_BUFFER_SIZE (buffer),
- GST_BASE_PARSE_DRAINING (parse), framesize, skipsize);
+ ret = gst_aac_parse_detect_stream (aacparse, map.data, map.size,
+ GST_BASE_PARSE_DRAINING (parse), &framesize, skipsize);
} else if (aacparse->header_type == DSPAAC_HEADER_ADTS) {
guint needed_data = 1024;
- ret = gst_aac_parse_check_adts_frame (aacparse, data,
- GST_BUFFER_SIZE (buffer), GST_BASE_PARSE_DRAINING (parse),
- framesize, &needed_data);
+ ret = gst_aac_parse_check_adts_frame (aacparse, map.data, map.size,
+ GST_BASE_PARSE_DRAINING (parse), &framesize, &needed_data);
if (!ret) {
GST_DEBUG ("buffer didn't contain valid frame");
} else if (aacparse->header_type == DSPAAC_HEADER_LOAS) {
guint needed_data = 1024;
- ret = gst_aac_parse_check_loas_frame (aacparse, data,
- GST_BUFFER_SIZE (buffer), GST_BASE_PARSE_DRAINING (parse),
- framesize, &needed_data);
+ ret = gst_aac_parse_check_loas_frame (aacparse, map.data,
+ map.size, GST_BASE_PARSE_DRAINING (parse), &framesize, &needed_data);
if (!ret) {
GST_DEBUG ("buffer didn't contain valid frame");
ADTS_MAX_SIZE);
}
- return ret;
-}
-
-
-/**
- * gst_aac_parse_parse_frame:
- * @parse: #GstBaseParse.
- * @buffer: #GstBuffer.
- *
- * Implementation of "parse_frame" vmethod in #GstBaseParse class.
- *
- * Also determines frame overhead.
- * ADTS streams have a 7 byte header in each frame. MP4 and ADIF streams don't have
- * a per-frame header. LOAS has 3 bytes.
- *
- * We're making a couple of simplifying assumptions:
- *
- * 1. We count Program Configuration Elements rather than searching for them
- * in the streams to discount them - the overhead is negligible.
- *
- * 2. We ignore CRC. This has a worst-case impact of (num_raw_blocks + 1)*16
- * bits, which should still not be significant enough to warrant the
- * additional parsing through the headers
- *
- * Returns: GST_FLOW_OK if frame was successfully parsed and can be pushed
- * forward. Otherwise appropriate error is returned.
- */
-static GstFlowReturn
-gst_aac_parse_parse_frame (GstBaseParse * parse, GstBaseParseFrame * frame)
-{
- GstAacParse *aacparse;
- GstBuffer *buffer;
- GstFlowReturn ret = GST_FLOW_OK;
- gint rate, channels;
-
- aacparse = GST_AAC_PARSE (parse);
- buffer = frame->buffer;
+ if (G_UNLIKELY (!ret))
+ goto exit;
if (aacparse->header_type == DSPAAC_HEADER_ADTS) {
/* see above */
frame->overhead = 7;
- gst_aac_parse_parse_adts_header (aacparse, GST_BUFFER_DATA (buffer),
+ gst_aac_parse_parse_adts_header (aacparse, map.data,
&rate, &channels, NULL, NULL);
+
GST_LOG_OBJECT (aacparse, "rate: %d, chans: %d", rate, channels);
if (G_UNLIKELY (rate != aacparse->sample_rate
aacparse->sample_rate = rate;
aacparse->channels = channels;
- if (!gst_aac_parse_set_src_caps (aacparse,
- GST_PAD_CAPS (GST_BASE_PARSE (aacparse)->sinkpad))) {
+ GST_DEBUG_OBJECT (aacparse, "here");
+
+ if (!gst_aac_parse_set_src_caps (aacparse, NULL)) {
/* If linking fails, we need to return appropriate error */
ret = GST_FLOW_NOT_LINKED;
}
/* see above */
frame->overhead = 3;
- if (!gst_aac_parse_read_loas_config (aacparse, GST_BUFFER_DATA (buffer),
- GST_BUFFER_SIZE (buffer), &rate, &channels, NULL)) {
+ if (!gst_aac_parse_read_loas_config (aacparse, map.data, map.size, &rate,
+ &channels, NULL)) {
GST_WARNING_OBJECT (aacparse, "Error reading LOAS config");
} else if (G_UNLIKELY (rate != aacparse->sample_rate
|| channels != aacparse->channels)) {
/* We want to set caps both at start, and when rate/channels change.
Since only some LOAS frames have that info, we may receive frames
before knowing about rate/channels. */
- if (setcaps || !GST_PAD_CAPS (GST_BASE_PARSE_SRC_PAD (aacparse))) {
- if (!gst_aac_parse_set_src_caps (aacparse,
- GST_PAD_CAPS (GST_BASE_PARSE (aacparse)->sinkpad))) {
+ if (setcaps
+ || !gst_pad_has_current_caps (GST_BASE_PARSE_SRC_PAD (aacparse))) {
+ if (!gst_aac_parse_set_src_caps (aacparse, NULL)) {
/* If linking fails, we need to return appropriate error */
ret = GST_FLOW_NOT_LINKED;
}
}
}
- return ret;
+exit:
+ gst_buffer_unmap (buffer, &map);
+
+ if (ret) {
+ /* found, skip if needed */
+ if (*skipsize > 0)
+ return GST_FLOW_OK;
+ *skipsize = 0;
+ } else {
+ if (*skipsize < 0)
+ *skipsize = 1;
+ }
+
+ if (ret && framesize <= map.size) {
+ return gst_base_parse_finish_frame (parse, frame, framesize);
+ }
+
+ return GST_FLOW_OK;
}
}
static GstCaps *
-gst_aac_parse_sink_getcaps (GstBaseParse * parse)
+gst_aac_parse_sink_getcaps (GstBaseParse * parse, GstCaps * filter)
{
GstCaps *peercaps;
GstCaps *res;
+ /* FIXME: handle filter caps */
+
peercaps = gst_pad_get_allowed_caps (GST_BASE_PARSE_SRC_PAD (parse));
if (peercaps) {
guint i, n;
static gboolean gst_ac3_parse_start (GstBaseParse * parse);
static gboolean gst_ac3_parse_stop (GstBaseParse * parse);
-static gboolean gst_ac3_parse_check_valid_frame (GstBaseParse * parse,
- GstBaseParseFrame * frame, guint * size, gint * skipsize);
-static GstFlowReturn gst_ac3_parse_parse_frame (GstBaseParse * parse,
- GstBaseParseFrame * frame);
+static GstFlowReturn gst_ac3_parse_handle_frame (GstBaseParse * parse,
+ GstBaseParseFrame * frame, gint * skipsize);
static gboolean gst_ac3_parse_src_event (GstBaseParse * parse,
GstEvent * event);
-static GstCaps *gst_ac3_parse_get_sink_caps (GstBaseParse * parse);
+static GstCaps *gst_ac3_parse_get_sink_caps (GstBaseParse * parse,
+ GstCaps * filter);
-GST_BOILERPLATE (GstAc3Parse, gst_ac3_parse, GstBaseParse, GST_TYPE_BASE_PARSE);
-
-static void
-gst_ac3_parse_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_static_pad_template (element_class,
- &sink_template);
- gst_element_class_add_static_pad_template (element_class, &src_template);
-
- gst_element_class_set_details_simple (element_class,
- "AC3 audio stream parser", "Codec/Parser/Converter/Audio",
- "AC3 parser", "Tim-Philipp Müller <tim centricular net>");
-}
+#define gst_ac3_parse_parent_class parent_class
+G_DEFINE_TYPE (GstAc3Parse, gst_ac3_parse, GST_TYPE_BASE_PARSE);
static void
gst_ac3_parse_class_init (GstAc3ParseClass * klass)
{
- GstBaseParseClass *parse_class = GST_BASE_PARSE_CLASS (klass);
GObjectClass *object_class = G_OBJECT_CLASS (klass);
+ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstBaseParseClass *parse_class = GST_BASE_PARSE_CLASS (klass);
GST_DEBUG_CATEGORY_INIT (ac3_parse_debug, "ac3parse", 0,
"AC3 audio stream parser");
object_class->finalize = gst_ac3_parse_finalize;
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&sink_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&src_template));
+
+ gst_element_class_set_details_simple (element_class,
+ "AC3 audio stream parser", "Codec/Parser/Converter/Audio",
+ "AC3 parser", "Tim-Philipp Müller <tim centricular net>");
+
parse_class->start = GST_DEBUG_FUNCPTR (gst_ac3_parse_start);
parse_class->stop = GST_DEBUG_FUNCPTR (gst_ac3_parse_stop);
- parse_class->check_valid_frame =
- GST_DEBUG_FUNCPTR (gst_ac3_parse_check_valid_frame);
- parse_class->parse_frame = GST_DEBUG_FUNCPTR (gst_ac3_parse_parse_frame);
+ parse_class->handle_frame = GST_DEBUG_FUNCPTR (gst_ac3_parse_handle_frame);
parse_class->src_event = GST_DEBUG_FUNCPTR (gst_ac3_parse_src_event);
parse_class->get_sink_caps = GST_DEBUG_FUNCPTR (gst_ac3_parse_get_sink_caps);
}
}
static void
-gst_ac3_parse_init (GstAc3Parse * ac3parse, GstAc3ParseClass * klass)
+gst_ac3_parse_init (GstAc3Parse * ac3parse)
{
gst_base_parse_set_min_frame_size (GST_BASE_PARSE (ac3parse), 6);
gst_ac3_parse_reset (ac3parse);
gint skip, guint * frame_size, guint * rate, guint * chans, guint * blks,
guint * sid)
{
- GstBitReader bits = GST_BIT_READER_INIT_FROM_BUFFER (buf);
+ GstBitReader bits;
+ GstMapInfo map;
guint8 fscod, frmsizcod, bsid, acmod, lfe_on, rate_scale;
+ gboolean ret = FALSE;
GST_LOG_OBJECT (ac3parse, "parsing ac3");
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ gst_bit_reader_init (&bits, map.data, map.size);
gst_bit_reader_skip_unchecked (&bits, skip * 8);
gst_bit_reader_skip_unchecked (&bits, 16 + 16);
if (G_UNLIKELY (fscod == 3 || frmsizcod >= G_N_ELEMENTS (frmsizcod_table))) {
GST_DEBUG_OBJECT (ac3parse, "bad fscod=%d frmsizcod=%d", fscod, frmsizcod);
- return FALSE;
+ goto cleanup;
}
bsid = gst_bit_reader_get_bits_uint8_unchecked (&bits, 5);
format, so let them through. The spec says nothing about 9 and 10 */
if (bsid > 10) {
GST_DEBUG_OBJECT (ac3parse, "unexpected bsid=%d", bsid);
- return FALSE;
+ goto cleanup;
} else if (bsid != 8 && bsid != 6) {
GST_DEBUG_OBJECT (ac3parse, "undefined bsid=%d", bsid);
}
if (sid)
*sid = 0;
- return TRUE;
+ ret = TRUE;
+
+cleanup:
+ gst_buffer_unmap (buf, &map);
+
+ return ret;
}
static gboolean
gint skip, guint * frame_size, guint * rate, guint * chans, guint * blks,
guint * sid)
{
- GstBitReader bits = GST_BIT_READER_INIT_FROM_BUFFER (buf);
+ GstBitReader bits;
+ GstMapInfo map;
guint16 frmsiz, sample_rate, blocks;
guint8 strmtyp, fscod, fscod2, acmod, lfe_on, strmid, numblkscod;
+ gboolean ret = FALSE;
GST_LOG_OBJECT (ac3parse, "parsing e-ac3");
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ gst_bit_reader_init (&bits, map.data, map.size);
gst_bit_reader_skip_unchecked (&bits, skip * 8);
gst_bit_reader_skip_unchecked (&bits, 16);
strmtyp = gst_bit_reader_get_bits_uint8_unchecked (&bits, 2); /* strmtyp */
if (G_UNLIKELY (strmtyp == 3)) {
GST_DEBUG_OBJECT (ac3parse, "bad strmtyp %d", strmtyp);
- return FALSE;
+ goto cleanup;
}
strmid = gst_bit_reader_get_bits_uint8_unchecked (&bits, 3); /* substreamid */
fscod2 = gst_bit_reader_get_bits_uint8_unchecked (&bits, 2); /* fscod2 */
if (G_UNLIKELY (fscod2 == 3)) {
GST_DEBUG_OBJECT (ac3parse, "invalid fscod2");
- return FALSE;
+ goto cleanup;
}
sample_rate = fscod_rates[fscod2] / 2;
blocks = 6;
if (sid)
*sid = (strmtyp & 0x1) << 3 | strmid;
- return TRUE;
+ ret = TRUE;
+
+cleanup:
+ gst_buffer_unmap (buf, &map);
+
+ return ret;
}
static gboolean
guint * framesize, guint * rate, guint * chans, guint * blocks,
guint * sid, gboolean * eac)
{
- GstBitReader bits = GST_BIT_READER_INIT_FROM_BUFFER (buf);
+ GstBitReader bits;
guint16 sync;
guint8 bsid;
+ GstMapInfo map;
+ gboolean ret = FALSE;
+
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ gst_bit_reader_init (&bits, map.data, map.size);
- GST_MEMDUMP_OBJECT (parse, "AC3 frame sync", GST_BUFFER_DATA (buf), 16);
+ GST_MEMDUMP_OBJECT (parse, "AC3 frame sync", map.data, MIN (map.size, 16));
gst_bit_reader_skip_unchecked (&bits, skip * 8);
bsid = gst_bit_reader_peek_bits_uint8_unchecked (&bits, 5);
if (G_UNLIKELY (sync != 0x0b77))
- return FALSE;
+ goto cleanup;
GST_LOG_OBJECT (parse, "bsid = %d", bsid);
if (bsid <= 10) {
if (eac)
*eac = FALSE;
- return gst_ac3_parse_frame_header_ac3 (parse, buf, skip, framesize, rate,
+ ret = gst_ac3_parse_frame_header_ac3 (parse, buf, skip, framesize, rate,
chans, blocks, sid);
+ goto cleanup;
} else if (bsid <= 16) {
if (eac)
*eac = TRUE;
- return gst_ac3_parse_frame_header_eac3 (parse, buf, skip, framesize, rate,
+ ret = gst_ac3_parse_frame_header_eac3 (parse, buf, skip, framesize, rate,
chans, blocks, sid);
+ goto cleanup;
} else {
GST_DEBUG_OBJECT (parse, "unexpected bsid %d", bsid);
return FALSE;
}
+
+ GST_DEBUG_OBJECT (parse, "unexpected bsid %d", bsid);
+
+cleanup:
+ gst_buffer_unmap (buf, &map);
+
+ return ret;
}
-static gboolean
-gst_ac3_parse_check_valid_frame (GstBaseParse * parse,
- GstBaseParseFrame * frame, guint * framesize, gint * skipsize)
+static GstFlowReturn
+gst_ac3_parse_handle_frame (GstBaseParse * parse,
+ GstBaseParseFrame * frame, gint * skipsize)
{
GstAc3Parse *ac3parse = GST_AC3_PARSE (parse);
GstBuffer *buf = frame->buffer;
- GstByteReader reader = GST_BYTE_READER_INIT_FROM_BUFFER (buf);
+ GstByteReader reader;
gint off;
gboolean lost_sync, draining, eac, more = FALSE;
guint frmsiz, blocks, sid;
+ guint rate, chans;
+ gboolean update_rate = FALSE;
+ gint framesize = 0;
gint have_blocks = 0;
+ GstMapInfo map;
+ gboolean ret = FALSE;
+ GstFlowReturn res = GST_FLOW_OK;
- if (G_UNLIKELY (GST_BUFFER_SIZE (buf) < 6))
- return FALSE;
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ if (G_UNLIKELY (map.size < 6)) {
+ *skipsize = 1;
+ goto cleanup;
+ }
+
+ gst_byte_reader_init (&reader, map.data, map.size);
off = gst_byte_reader_masked_scan_uint32 (&reader, 0xffff0000, 0x0b770000,
- 0, GST_BUFFER_SIZE (buf));
+ 0, map.size);
GST_LOG_OBJECT (parse, "possible sync at buffer offset %d", off);
/* didn't find anything that looks like a sync word, skip */
if (off < 0) {
- *skipsize = GST_BUFFER_SIZE (buf) - 3;
- return FALSE;
+ *skipsize = map.size - 3;
+ goto cleanup;
}
/* possible frame header, but not at offset 0? skip bytes before sync */
if (off > 0) {
*skipsize = off;
- return FALSE;
+ goto cleanup;
}
/* make sure the values in the frame header look sane */
- if (!gst_ac3_parse_frame_header (ac3parse, buf, 0, &frmsiz, NULL, NULL,
+ if (!gst_ac3_parse_frame_header (ac3parse, buf, 0, &frmsiz, &rate, &chans,
&blocks, &sid, &eac)) {
*skipsize = off + 2;
- return FALSE;
+ goto cleanup;
}
- *framesize = frmsiz;
+ GST_LOG_OBJECT (parse, "size: %u, blocks: %u, rate: %u, chans: %u", frmsiz,
+ blocks, rate, chans);
+
+ framesize = frmsiz;
if (G_UNLIKELY (g_atomic_int_get (&ac3parse->align) ==
GST_AC3_PARSE_ALIGN_NONE))
/* We need the first substream to be the one with id 0 */
GST_LOG_OBJECT (ac3parse, "Skipping till we find sid 0");
*skipsize = off + 2;
- return FALSE;
+ goto cleanup;
}
- *framesize = 0;
+ framesize = 0;
/* Loop till we have 6 blocks per substream */
for (have_blocks = 0; !more && have_blocks < 6; have_blocks += blocks) {
/* Loop till we get one frame from each substream */
do {
- *framesize += frmsiz;
+ framesize += frmsiz;
- if (!gst_byte_reader_skip (&reader, frmsiz) ||
- GST_BUFFER_SIZE (buf) < (*framesize + 6)) {
+ if (!gst_byte_reader_skip (&reader, frmsiz)
+ || map.size < (framesize + 6)) {
more = TRUE;
break;
}
- if (!gst_ac3_parse_frame_header (ac3parse, buf, *framesize, &frmsiz,
+ if (!gst_ac3_parse_frame_header (ac3parse, buf, framesize, &frmsiz,
NULL, NULL, NULL, &sid, &eac)) {
*skipsize = off + 2;
- return FALSE;
+ goto cleanup;
}
} while (sid);
}
if (more || !gst_byte_reader_skip (&reader, frmsiz) ||
!gst_byte_reader_get_uint16_be (&reader, &word)) {
GST_DEBUG_OBJECT (ac3parse, "... but not sufficient data");
- gst_base_parse_set_min_frame_size (parse, *framesize + 6);
+ gst_base_parse_set_min_frame_size (parse, framesize + 6);
*skipsize = 0;
- return FALSE;
+ goto cleanup;
} else {
if (word != 0x0b77) {
GST_DEBUG_OBJECT (ac3parse, "0x%x not OK", word);
*skipsize = off + 2;
- return FALSE;
+ goto cleanup;
} else {
/* ok, got sync now, let's assume constant frame size */
- gst_base_parse_set_min_frame_size (parse, *framesize);
+ gst_base_parse_set_min_frame_size (parse, framesize);
}
}
}
- return TRUE;
-}
-
-static GstFlowReturn
-gst_ac3_parse_parse_frame (GstBaseParse * parse, GstBaseParseFrame * frame)
-{
- GstAc3Parse *ac3parse = GST_AC3_PARSE (parse);
- GstBuffer *buf = frame->buffer;
- guint fsize, rate, chans, blocks, sid;
- gboolean eac, update_rate = FALSE;
-
- if (!gst_ac3_parse_frame_header (ac3parse, buf, 0, &fsize, &rate, &chans,
- &blocks, &sid, &eac))
- goto broken_header;
-
- GST_LOG_OBJECT (parse, "size: %u, blocks: %u, rate: %u, chans: %u", fsize,
- blocks, rate, chans);
+ /* expect to have found a frame here */
+ g_assert (framesize);
+ ret = TRUE;
+ /* arrange for metadata setup */
if (G_UNLIKELY (sid)) {
/* dependent frame, no need to (ac)count for or consider further */
GST_LOG_OBJECT (parse, "sid: %d", sid);
/* occupies same time space as previous base frame */
if (G_LIKELY (GST_BUFFER_TIMESTAMP (buf) >= GST_BUFFER_DURATION (buf)))
GST_BUFFER_TIMESTAMP (buf) -= GST_BUFFER_DURATION (buf);
- /* only return if we already arranged for caps */
+ /* only shortcut if we already arranged for caps */
if (G_LIKELY (ac3parse->sample_rate > 0))
- return GST_FLOW_OK;
+ goto cleanup;
}
if (G_UNLIKELY (ac3parse->sample_rate != rate || ac3parse->channels != chans
gst_caps_set_simple (caps, "alignment", G_TYPE_STRING,
g_atomic_int_get (&ac3parse->align) == GST_AC3_PARSE_ALIGN_IEC61937 ?
"iec61937" : "frame", NULL);
- gst_buffer_set_caps (buf, caps);
gst_pad_set_caps (GST_BASE_PARSE_SRC_PAD (parse), caps);
gst_caps_unref (caps);
if (G_UNLIKELY (update_rate))
gst_base_parse_set_frame_rate (parse, rate, 256 * blocks, 2, 2);
- return GST_FLOW_OK;
+cleanup:
+ gst_buffer_unmap (buf, &map);
-/* ERRORS */
-broken_header:
- {
- /* this really shouldn't ever happen */
- GST_ELEMENT_ERROR (parse, STREAM, DECODE, (NULL), (NULL));
- return GST_FLOW_ERROR;
+ if (ret && framesize <= map.size) {
+ res = gst_base_parse_finish_frame (parse, frame, framesize);
}
+
+ return res;
}
static gboolean
}
static GstCaps *
-gst_ac3_parse_get_sink_caps (GstBaseParse * parse)
+gst_ac3_parse_get_sink_caps (GstBaseParse * parse, GstCaps * filter)
{
GstCaps *peercaps;
GstCaps *res;
+ /* FIXME: handle filter */
+
peercaps = gst_pad_get_allowed_caps (GST_BASE_PARSE_SRC_PAD (parse));
if (peercaps) {
guint i, n;
static gboolean gst_amr_parse_sink_setcaps (GstBaseParse * parse,
GstCaps * caps);
-static GstCaps *gst_amr_parse_sink_getcaps (GstBaseParse * parse);
+static GstCaps *gst_amr_parse_sink_getcaps (GstBaseParse * parse,
+ GstCaps * filter);
-static gboolean gst_amr_parse_check_valid_frame (GstBaseParse * parse,
- GstBaseParseFrame * frame, guint * framesize, gint * skipsize);
+static GstFlowReturn gst_amr_parse_handle_frame (GstBaseParse * parse,
+ GstBaseParseFrame * frame, gint * skipsize);
-static GstFlowReturn gst_amr_parse_parse_frame (GstBaseParse * parse,
- GstBaseParseFrame * frame);
-
-#define _do_init(bla) \
- GST_DEBUG_CATEGORY_INIT (amrparse_debug, "amrparse", 0, \
- "AMR-NB audio stream parser");
-
-GST_BOILERPLATE_FULL (GstAmrParse, gst_amr_parse, GstBaseParse,
- GST_TYPE_BASE_PARSE, _do_init);
+G_DEFINE_TYPE (GstAmrParse, gst_amr_parse, GST_TYPE_BASE_PARSE);
/**
- * gst_amr_parse_base_init:
- * @klass: #GstElementClass.
+ * gst_amr_parse_class_init:
+ * @klass: GstAmrParseClass.
*
*/
static void
-gst_amr_parse_base_init (gpointer klass)
+gst_amr_parse_class_init (GstAmrParseClass * klass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstBaseParseClass *parse_class = GST_BASE_PARSE_CLASS (klass);
+
+ GST_DEBUG_CATEGORY_INIT (amrparse_debug, "amrparse", 0,
+ "AMR-NB audio stream parser");
- gst_element_class_add_static_pad_template (element_class,
- &sink_template);
- gst_element_class_add_static_pad_template (element_class, &src_template);
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&sink_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&src_template));
gst_element_class_set_details_simple (element_class,
"AMR audio stream parser", "Codec/Parser/Audio",
"Adaptive Multi-Rate audio parser",
"Ronald Bultje <rbultje@ronald.bitfreak.net>");
-}
-
-
-/**
- * gst_amr_parse_class_init:
- * @klass: GstAmrParseClass.
- *
- */
-static void
-gst_amr_parse_class_init (GstAmrParseClass * klass)
-{
- GstBaseParseClass *parse_class = GST_BASE_PARSE_CLASS (klass);
parse_class->start = GST_DEBUG_FUNCPTR (gst_amr_parse_start);
parse_class->stop = GST_DEBUG_FUNCPTR (gst_amr_parse_stop);
parse_class->set_sink_caps = GST_DEBUG_FUNCPTR (gst_amr_parse_sink_setcaps);
parse_class->get_sink_caps = GST_DEBUG_FUNCPTR (gst_amr_parse_sink_getcaps);
- parse_class->parse_frame = GST_DEBUG_FUNCPTR (gst_amr_parse_parse_frame);
- parse_class->check_valid_frame =
- GST_DEBUG_FUNCPTR (gst_amr_parse_check_valid_frame);
+ parse_class->handle_frame = GST_DEBUG_FUNCPTR (gst_amr_parse_handle_frame);
}
*
*/
static void
-gst_amr_parse_init (GstAmrParse * amrparse, GstAmrParseClass * klass)
+gst_amr_parse_init (GstAmrParse * amrparse)
{
/* init rest */
gst_base_parse_set_min_frame_size (GST_BASE_PARSE (amrparse), 62);
*
* Returns: TRUE if the given data contains valid frame.
*/
-static gboolean
-gst_amr_parse_check_valid_frame (GstBaseParse * parse,
- GstBaseParseFrame * frame, guint * framesize, gint * skipsize)
+static GstFlowReturn
+gst_amr_parse_handle_frame (GstBaseParse * parse,
+ GstBaseParseFrame * frame, gint * skipsize)
{
GstBuffer *buffer;
- const guint8 *data;
- gint fsize, mode, dsize;
+ GstMapInfo map;
+ gint fsize = 0, mode, dsize;
GstAmrParse *amrparse;
+ GstFlowReturn ret = GST_FLOW_OK;
+ gboolean found = FALSE;
amrparse = GST_AMR_PARSE (parse);
buffer = frame->buffer;
- data = GST_BUFFER_DATA (buffer);
- dsize = GST_BUFFER_SIZE (buffer);
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ dsize = map.size;
GST_LOG ("buffer: %d bytes", dsize);
if (amrparse->need_header) {
if (dsize >= AMR_MIME_HEADER_SIZE &&
- gst_amr_parse_parse_header (amrparse, data, skipsize)) {
+ gst_amr_parse_parse_header (amrparse, map.data, skipsize)) {
amrparse->need_header = FALSE;
gst_base_parse_set_frame_rate (GST_BASE_PARSE (amrparse), 50, 1, 2, 2);
} else {
}
/* We return FALSE, so this frame won't get pushed forward. Instead,
the "skip" value is set, so next time we will receive a valid frame. */
- return FALSE;
+ goto done;
}
+ *skipsize = 1;
/* Does this look like a possible frame header candidate? */
- if ((data[0] & 0x83) == 0) {
+ if ((map.data[0] & 0x83) == 0) {
/* Yep. Retrieve the frame size */
- mode = (data[0] >> 3) & 0x0F;
+ mode = (map.data[0] >> 3) & 0x0F;
fsize = amrparse->block_size[mode] + 1; /* +1 for the header byte */
/* We recognize this data as a valid frame when:
* perform this check)
*/
if (fsize) {
- gboolean found = FALSE;
-
+ *skipsize = 0;
/* in sync, no further check */
if (!GST_BASE_PARSE_LOST_SYNC (parse)) {
found = TRUE;
} else if (dsize > fsize) {
/* enough data, check for next sync */
- if ((data[fsize] & 0x83) == 0)
+ if ((map.data[fsize] & 0x83) == 0)
found = TRUE;
} else if (GST_BASE_PARSE_DRAINING (parse)) {
/* not enough, but draining, so ok */
found = TRUE;
- } else {
- /* indicate we need not skip, but need more data */
- *skipsize = 0;
- *framesize = fsize + 1;
- }
- if (found) {
- *framesize = fsize;
- return TRUE;
}
}
}
- GST_LOG ("sync lost");
- return FALSE;
-}
+done:
+ gst_buffer_unmap (buffer, &map);
+ if (found && fsize <= map.size) {
+ ret = gst_base_parse_finish_frame (parse, frame, fsize);
+ }
-/**
- * gst_amr_parse_parse_frame:
- * @parse: #GstBaseParse.
- * @buffer: #GstBuffer.
- *
- * Implementation of "parse" vmethod in #GstBaseParse class.
- *
- * Returns: #GstFlowReturn defining the parsing status.
- */
-static GstFlowReturn
-gst_amr_parse_parse_frame (GstBaseParse * parse, GstBaseParseFrame * frame)
-{
- return GST_FLOW_OK;
+ return ret;
}
-
/**
* gst_amr_parse_start:
* @parse: #GstBaseParse.
}
static GstCaps *
-gst_amr_parse_sink_getcaps (GstBaseParse * parse)
+gst_amr_parse_sink_getcaps (GstBaseParse * parse, GstCaps * filter)
{
GstCaps *peercaps;
GstCaps *res;
+ /* FIXME: handle filter caps */
+
peercaps = gst_pad_get_allowed_caps (GST_BASE_PARSE_SRC_PAD (parse));
if (peercaps) {
guint i, n;
static gboolean gst_dca_parse_start (GstBaseParse * parse);
static gboolean gst_dca_parse_stop (GstBaseParse * parse);
-static gboolean gst_dca_parse_check_valid_frame (GstBaseParse * parse,
- GstBaseParseFrame * frame, guint * size, gint * skipsize);
-static GstFlowReturn gst_dca_parse_parse_frame (GstBaseParse * parse,
- GstBaseParseFrame * frame);
-static GstCaps *gst_dca_parse_get_sink_caps (GstBaseParse * parse);
+static GstFlowReturn gst_dca_parse_handle_frame (GstBaseParse * parse,
+ GstBaseParseFrame * frame, gint * skipsize);
+static GstCaps *gst_dca_parse_get_sink_caps (GstBaseParse * parse,
+ GstCaps * filter);
-GST_BOILERPLATE (GstDcaParse, gst_dca_parse, GstBaseParse, GST_TYPE_BASE_PARSE);
-
-static void
-gst_dca_parse_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_static_pad_template (element_class,
- &sink_template);
- gst_element_class_add_static_pad_template (element_class, &src_template);
-
- gst_element_class_set_details_simple (element_class,
- "DTS Coherent Acoustics audio stream parser", "Codec/Parser/Audio",
- "DCA parser", "Tim-Philipp Müller <tim centricular net>");
-}
+#define gst_dca_parse_parent_class parent_class
+G_DEFINE_TYPE (GstDcaParse, gst_dca_parse, GST_TYPE_BASE_PARSE);
static void
gst_dca_parse_class_init (GstDcaParseClass * klass)
{
GstBaseParseClass *parse_class = GST_BASE_PARSE_CLASS (klass);
+ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
GObjectClass *object_class = G_OBJECT_CLASS (klass);
GST_DEBUG_CATEGORY_INIT (dca_parse_debug, "dcaparse", 0,
parse_class->start = GST_DEBUG_FUNCPTR (gst_dca_parse_start);
parse_class->stop = GST_DEBUG_FUNCPTR (gst_dca_parse_stop);
- parse_class->check_valid_frame =
- GST_DEBUG_FUNCPTR (gst_dca_parse_check_valid_frame);
- parse_class->parse_frame = GST_DEBUG_FUNCPTR (gst_dca_parse_parse_frame);
+ parse_class->handle_frame = GST_DEBUG_FUNCPTR (gst_dca_parse_handle_frame);
parse_class->get_sink_caps = GST_DEBUG_FUNCPTR (gst_dca_parse_get_sink_caps);
+
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&sink_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&src_template));
+
+ gst_element_class_set_details_simple (element_class,
+ "DTS Coherent Acoustics audio stream parser", "Codec/Parser/Audio",
+ "DCA parser", "Tim-Philipp Müller <tim centricular net>");
}
static void
}
static void
-gst_dca_parse_init (GstDcaParse * dcaparse, GstDcaParseClass * klass)
+gst_dca_parse_init (GstDcaParse * dcaparse)
{
gst_base_parse_set_min_frame_size (GST_BASE_PARSE (dcaparse),
DCA_MIN_FRAMESIZE);
static gint
gst_dca_parse_find_sync (GstDcaParse * dcaparse, GstByteReader * reader,
- const GstBuffer * buf, guint32 * sync)
+ gsize bufsize, guint32 * sync)
{
guint32 best_sync = 0;
guint best_offset = G_MAXUINT;
/* Raw little endian */
off = gst_byte_reader_masked_scan_uint32 (reader, 0xffffffff, 0xfe7f0180,
- 0, GST_BUFFER_SIZE (buf));
+ 0, bufsize);
if (off >= 0 && off < best_offset) {
best_offset = off;
best_sync = 0xfe7f0180;
/* Raw big endian */
off = gst_byte_reader_masked_scan_uint32 (reader, 0xffffffff, 0x7ffe8001,
- 0, GST_BUFFER_SIZE (buf));
+ 0, bufsize);
if (off >= 0 && off < best_offset) {
best_offset = off;
best_sync = 0x7ffe8001;
/* 14-bit little endian */
off = gst_byte_reader_masked_scan_uint32 (reader, 0xffffffff, 0xff1f00e8,
- 0, GST_BUFFER_SIZE (buf));
+ 0, bufsize);
if (off >= 0 && off < best_offset) {
best_offset = off;
best_sync = 0xff1f00e8;
/* 14-bit big endian */
off = gst_byte_reader_masked_scan_uint32 (reader, 0xffffffff, 0x1fffe800,
- 0, GST_BUFFER_SIZE (buf));
+ 0, bufsize);
if (off >= 0 && off < best_offset) {
best_offset = off;
best_sync = 0x1fffe800;
return best_offset;
}
-static gboolean
-gst_dca_parse_check_valid_frame (GstBaseParse * parse,
- GstBaseParseFrame * frame, guint * framesize, gint * skipsize)
+static GstFlowReturn
+gst_dca_parse_handle_frame (GstBaseParse * parse,
+ GstBaseParseFrame * frame, gint * skipsize)
{
GstDcaParse *dcaparse = GST_DCA_PARSE (parse);
GstBuffer *buf = frame->buffer;
- GstByteReader r = GST_BYTE_READER_INIT_FROM_BUFFER (buf);
+ GstByteReader r;
gboolean parser_draining;
gboolean parser_in_sync;
gboolean terminator;
guint32 sync = 0;
- guint size, rate, chans, num_blocks, samples_per_block;
+ guint size, rate, chans, num_blocks, samples_per_block, depth;
+ gint block_size;
+ gint endianness;
gint off = -1;
+ GstMapInfo map;
+ GstFlowReturn ret = GST_FLOW_EOS;
- if (G_UNLIKELY (GST_BUFFER_SIZE (buf) < 16))
- return FALSE;
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+
+ if (G_UNLIKELY (map.size < 16)) {
+ *skipsize = 1;
+ goto cleanup;
+ }
parser_in_sync = !GST_BASE_PARSE_LOST_SYNC (parse);
+ gst_byte_reader_init (&r, map.data, map.size);
+
if (G_LIKELY (parser_in_sync && dcaparse->last_sync != 0)) {
off = gst_byte_reader_masked_scan_uint32 (&r, 0xffffffff,
- dcaparse->last_sync, 0, GST_BUFFER_SIZE (buf));
+ dcaparse->last_sync, 0, map.size);
}
if (G_UNLIKELY (off < 0)) {
- off = gst_dca_parse_find_sync (dcaparse, &r, buf, &sync);
+ off = gst_dca_parse_find_sync (dcaparse, &r, map.size, &sync);
}
/* didn't find anything that looks like a sync word, skip */
if (off < 0) {
- *skipsize = GST_BUFFER_SIZE (buf) - 3;
+ *skipsize = map.size - 3;
GST_DEBUG_OBJECT (dcaparse, "no sync, skipping %d bytes", *skipsize);
- return FALSE;
+ goto cleanup;
}
GST_LOG_OBJECT (parse, "possible sync %08x at buffer offset %d", sync, off);
/* possible frame header, but not at offset 0? skip bytes before sync */
if (off > 0) {
*skipsize = off;
- return FALSE;
+ goto cleanup;
}
/* make sure the values in the frame header look sane */
- if (!gst_dca_parse_parse_header (dcaparse, &r, &size, &rate, &chans, NULL,
- NULL, &num_blocks, &samples_per_block, &terminator)) {
+ if (!gst_dca_parse_parse_header (dcaparse, &r, &size, &rate, &chans, &depth,
+ &endianness, &num_blocks, &samples_per_block, &terminator)) {
*skipsize = 4;
- return FALSE;
+ goto cleanup;
}
GST_LOG_OBJECT (parse, "got frame, sync %08x, size %u, rate %d, channels %d",
sync, size, rate, chans);
- *framesize = size;
-
dcaparse->last_sync = sync;
parser_draining = GST_BASE_PARSE_DRAINING (parse);
if (!parser_in_sync && !parser_draining) {
/* check for second frame to be sure */
GST_DEBUG_OBJECT (dcaparse, "resyncing; checking next frame syncword");
- if (GST_BUFFER_SIZE (buf) >= (size + 16)) {
+ if (map.size >= (size + 16)) {
guint s2, r2, c2, n2, s3;
gboolean t;
- GST_MEMDUMP ("buf", GST_BUFFER_DATA (buf), size + 16);
- gst_byte_reader_init_from_buffer (&r, buf);
+ GST_MEMDUMP ("buf", map.data, size + 16);
+ gst_byte_reader_init (&r, map.data, map.size);
gst_byte_reader_skip_unchecked (&r, size);
if (!gst_dca_parse_parse_header (dcaparse, &r, &s2, &r2, &c2, NULL, NULL,
&n2, &s3, &t)) {
GST_DEBUG_OBJECT (dcaparse, "didn't find second syncword");
*skipsize = 4;
- return FALSE;
+ goto cleanup;
}
/* ok, got sync now, let's assume constant frame size */
gst_base_parse_set_min_frame_size (parse, size);
} else {
- /* FIXME: baseparse always seems to hand us buffers of min_frame_size
- * bytes, which is unhelpful here */
- GST_LOG_OBJECT (dcaparse, "next sync out of reach (%u < %u)",
- GST_BUFFER_SIZE (buf), size + 16);
- /* *skipsize = 0; */
- /* return FALSE; */
+ /* wait for some more data */
+ GST_LOG_OBJECT (dcaparse,
+ "next sync out of reach (%" G_GSIZE_FORMAT " < %u)", map.size,
+ size + 16);
+ goto cleanup;
}
}
- return TRUE;
-}
-
-static GstFlowReturn
-gst_dca_parse_parse_frame (GstBaseParse * parse, GstBaseParseFrame * frame)
-{
- GstDcaParse *dcaparse = GST_DCA_PARSE (parse);
- GstBuffer *buf = frame->buffer;
- GstByteReader r = GST_BYTE_READER_INIT_FROM_BUFFER (buf);
- guint size, rate, chans, depth, block_size, num_blocks, samples_per_block;
- gint endianness;
- gboolean terminator;
-
- if (!gst_dca_parse_parse_header (dcaparse, &r, &size, &rate, &chans, &depth,
- &endianness, &num_blocks, &samples_per_block, &terminator))
- goto broken_header;
+ /* found frame */
+ ret = GST_FLOW_OK;
+ /* metadata handling */
block_size = num_blocks * samples_per_block;
if (G_UNLIKELY (dcaparse->rate != rate || dcaparse->channels != chans
"endianness", G_TYPE_INT, endianness, "depth", G_TYPE_INT, depth,
"block-size", G_TYPE_INT, block_size, "frame-size", G_TYPE_INT, size,
NULL);
- gst_buffer_set_caps (buf, caps);
gst_pad_set_caps (GST_BASE_PARSE_SRC_PAD (parse), caps);
gst_caps_unref (caps);
gst_base_parse_set_frame_rate (parse, rate, block_size, 0, 0);
}
- return GST_FLOW_OK;
+cleanup:
+ gst_buffer_unmap (buf, &map);
-/* ERRORS */
-broken_header:
- {
- /* this really shouldn't ever happen */
- GST_ELEMENT_ERROR (parse, STREAM, DECODE, (NULL), (NULL));
- return GST_FLOW_ERROR;
+ if (ret == GST_FLOW_OK && size <= map.size) {
+ ret = gst_base_parse_finish_frame (parse, frame, size);
+ } else {
+ ret = GST_FLOW_OK;
}
+
+ return ret;
}
static GstCaps *
-gst_dca_parse_get_sink_caps (GstBaseParse * parse)
+gst_dca_parse_get_sink_caps (GstBaseParse * parse, GstCaps * filter)
{
GstCaps *peercaps;
GstCaps *res;
+ /* FIXME: handle filter caps */
+
peercaps = gst_pad_get_allowed_caps (GST_BASE_PARSE_SRC_PAD (parse));
if (peercaps) {
guint i, n;
static gboolean gst_flac_parse_start (GstBaseParse * parse);
static gboolean gst_flac_parse_stop (GstBaseParse * parse);
-static gboolean gst_flac_parse_check_valid_frame (GstBaseParse * parse,
- GstBaseParseFrame * frame, guint * framesize, gint * skipsize);
+static GstFlowReturn gst_flac_parse_handle_frame (GstBaseParse * parse,
+ GstBaseParseFrame * frame, gint * skipsize);
static GstFlowReturn gst_flac_parse_parse_frame (GstBaseParse * parse,
- GstBaseParseFrame * frame);
+ GstBaseParseFrame * frame, gint size);
static GstFlowReturn gst_flac_parse_pre_push_frame (GstBaseParse * parse,
GstBaseParseFrame * frame);
static gboolean gst_flac_parse_convert (GstBaseParse * parse,
GstFormat src_format, gint64 src_value, GstFormat dest_format,
gint64 * dest_value);
-static GstCaps *gst_flac_parse_get_sink_caps (GstBaseParse * parse);
+static GstCaps *gst_flac_parse_get_sink_caps (GstBaseParse * parse,
+ GstCaps * filter);
-GST_BOILERPLATE (GstFlacParse, gst_flac_parse, GstBaseParse,
- GST_TYPE_BASE_PARSE);
-
-static void
-gst_flac_parse_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_static_pad_template (element_class, &src_factory);
- gst_element_class_add_static_pad_template (element_class, &sink_factory);
-
- gst_element_class_set_details_simple (element_class, "FLAC audio parser",
- "Codec/Parser/Audio",
- "Parses audio with the FLAC lossless audio codec",
- "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
-
- GST_DEBUG_CATEGORY_INIT (flacparse_debug, "flacparse", 0,
- "Flac parser element");
-}
+#define gst_flac_parse_parent_class parent_class
+G_DEFINE_TYPE (GstFlacParse, gst_flac_parse, GST_TYPE_BASE_PARSE);
static void
gst_flac_parse_class_init (GstFlacParseClass * klass)
{
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
+ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
GstBaseParseClass *baseparse_class = GST_BASE_PARSE_CLASS (klass);
+ GST_DEBUG_CATEGORY_INIT (flacparse_debug, "flacparse", 0,
+ "Flac parser element");
+
gobject_class->finalize = gst_flac_parse_finalize;
gobject_class->set_property = gst_flac_parse_set_property;
gobject_class->get_property = gst_flac_parse_get_property;
baseparse_class->start = GST_DEBUG_FUNCPTR (gst_flac_parse_start);
baseparse_class->stop = GST_DEBUG_FUNCPTR (gst_flac_parse_stop);
- baseparse_class->check_valid_frame =
- GST_DEBUG_FUNCPTR (gst_flac_parse_check_valid_frame);
- baseparse_class->parse_frame = GST_DEBUG_FUNCPTR (gst_flac_parse_parse_frame);
+ baseparse_class->handle_frame =
+ GST_DEBUG_FUNCPTR (gst_flac_parse_handle_frame);
baseparse_class->pre_push_frame =
GST_DEBUG_FUNCPTR (gst_flac_parse_pre_push_frame);
baseparse_class->convert = GST_DEBUG_FUNCPTR (gst_flac_parse_convert);
baseparse_class->get_sink_caps =
GST_DEBUG_FUNCPTR (gst_flac_parse_get_sink_caps);
+
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&src_factory));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&sink_factory));
+
+ gst_element_class_set_details_simple (element_class, "FLAC audio parser",
+ "Codec/Parser/Audio",
+ "Parses audio with the FLAC lossless audio codec",
+ "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
}
static void
-gst_flac_parse_init (GstFlacParse * flacparse, GstFlacParseClass * klass)
+gst_flac_parse_init (GstFlacParse * flacparse)
{
flacparse->check_frame_checksums = DEFAULT_CHECK_FRAME_CHECKSUMS;
}
GstBaseParseFrame * frame, guint * ret)
{
GstBuffer *buffer;
- const guint8 *data;
- guint max, size, remaining;
+ GstMapInfo map;
+ guint max, remaining;
guint i, search_start, search_end;
FrameHeaderCheckReturn header_ret;
guint16 block_size;
+ gboolean result = FALSE;
buffer = frame->buffer;
- data = GST_BUFFER_DATA (buffer);
- size = GST_BUFFER_SIZE (buffer);
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
- if (size < flacparse->min_framesize)
+ if (map.size < flacparse->min_framesize)
goto need_more;
header_ret =
- gst_flac_parse_frame_header_is_valid (flacparse, data, size, TRUE,
+ gst_flac_parse_frame_header_is_valid (flacparse, map.data, map.size, TRUE,
&block_size);
if (header_ret == FRAME_HEADER_INVALID) {
*ret = 0;
- return FALSE;
- } else if (header_ret == FRAME_HEADER_MORE_DATA) {
- goto need_more;
+ goto cleanup;
}
+ if (header_ret == FRAME_HEADER_MORE_DATA)
+ goto need_more;
/* mind unknown framesize */
search_start = MAX (2, flacparse->min_framesize);
if (flacparse->max_framesize)
- search_end = MIN (size, flacparse->max_framesize + 9 + 2);
+ search_end = MIN (map.size, flacparse->max_framesize + 9 + 2);
else
- search_end = size;
+ search_end = map.size;
search_end -= 2;
- remaining = size;
+ remaining = map.size;
for (i = search_start; i < search_end; i++, remaining--) {
- if ((GST_READ_UINT16_BE (data + i) & 0xfffe) == 0xfff8) {
+ if ((GST_READ_UINT16_BE (map.data + i) & 0xfffe) == 0xfff8) {
header_ret =
- gst_flac_parse_frame_header_is_valid (flacparse, data + i, remaining,
- FALSE, NULL);
+ gst_flac_parse_frame_header_is_valid (flacparse, map.data + i,
+ remaining, FALSE, NULL);
if (header_ret == FRAME_HEADER_VALID) {
if (flacparse->check_frame_checksums) {
- guint16 actual_crc = gst_flac_calculate_crc16 (data, i - 2);
- guint16 expected_crc = GST_READ_UINT16_BE (data + i - 2);
+ guint16 actual_crc = gst_flac_calculate_crc16 (map.data, i - 2);
+ guint16 expected_crc = GST_READ_UINT16_BE (map.data + i - 2);
if (actual_crc != expected_crc)
continue;
}
*ret = i;
flacparse->block_size = block_size;
- return TRUE;
+ result = TRUE;
+ goto cleanup;
} else if (header_ret == FRAME_HEADER_MORE_DATA) {
goto need_more;
}
/* For the last frame output everything to the end */
if (G_UNLIKELY (GST_BASE_PARSE_DRAINING (flacparse))) {
if (flacparse->check_frame_checksums) {
- guint16 actual_crc = gst_flac_calculate_crc16 (data, size - 2);
- guint16 expected_crc = GST_READ_UINT16_BE (data + size - 2);
+ guint16 actual_crc = gst_flac_calculate_crc16 (map.data, map.size - 2);
+ guint16 expected_crc = GST_READ_UINT16_BE (map.data + map.size - 2);
if (actual_crc == expected_crc) {
- *ret = size;
+ *ret = map.size;
flacparse->block_size = block_size;
- return TRUE;
+ result = TRUE;
+ goto cleanup;
}
} else {
- *ret = size;
+ *ret = map.size;
flacparse->block_size = block_size;
- return TRUE;
+ result = TRUE;
+ goto cleanup;
}
}
max = flacparse->max_framesize + 16;
if (max == 16)
max = 1 << 24;
- *ret = MIN (size + 4096, max);
- return FALSE;
+ *ret = MIN (map.size + 4096, max);
+ result = TRUE;
+
+cleanup:
+ gst_buffer_unmap (buffer, &map);
+ return result;
}
-static gboolean
-gst_flac_parse_check_valid_frame (GstBaseParse * parse,
- GstBaseParseFrame * frame, guint * framesize, gint * skipsize)
+static GstFlowReturn
+gst_flac_parse_handle_frame (GstBaseParse * parse,
+ GstBaseParseFrame * frame, gint * skipsize)
{
GstFlacParse *flacparse = GST_FLAC_PARSE (parse);
GstBuffer *buffer = frame->buffer;
- const guint8 *data = GST_BUFFER_DATA (buffer);
+ GstMapInfo map;
+ gboolean result = TRUE;
+ GstFlowReturn ret = GST_FLOW_OK;
+ guint framesize;
- if (G_UNLIKELY (GST_BUFFER_SIZE (buffer) < 4))
- return FALSE;
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+
+ *skipsize = 1;
+
+ if (G_UNLIKELY (map.size < 4)) {
+ result = FALSE;
+ goto cleanup;
+ }
if (flacparse->state == GST_FLAC_PARSE_STATE_INIT) {
- if (memcmp (GST_BUFFER_DATA (buffer), "fLaC", 4) == 0) {
+ if (memcmp (map.data, "fLaC", 4) == 0) {
GST_DEBUG_OBJECT (flacparse, "fLaC marker found");
- *framesize = 4;
- return TRUE;
- } else if (data[0] == 0xff && (data[1] >> 2) == 0x3e) {
+ framesize = 4;
+ goto cleanup;
+ }
+ if (map.data[0] == 0xff && (map.data[1] >> 2) == 0x3e) {
GST_DEBUG_OBJECT (flacparse, "Found headerless FLAC");
/* Minimal size of a frame header */
gst_base_parse_set_min_frame_size (GST_BASE_PARSE (flacparse), 9);
flacparse->state = GST_FLAC_PARSE_STATE_GENERATE_HEADERS;
*skipsize = 0;
- return FALSE;
- } else {
- GST_DEBUG_OBJECT (flacparse, "fLaC marker not found");
- return FALSE;
+ result = FALSE;
+ goto cleanup;
}
- } else if (flacparse->state == GST_FLAC_PARSE_STATE_HEADERS) {
- guint size = 4 + ((data[1] << 16) | (data[2] << 8) | (data[3]));
+ GST_DEBUG_OBJECT (flacparse, "fLaC marker not found");
+ result = FALSE;
+ goto cleanup;
+ }
+
+ if (flacparse->state == GST_FLAC_PARSE_STATE_HEADERS) {
+ guint size = 4 + ((map.data[1] << 16) | (map.data[2] << 8) | (map.data[3]));
GST_DEBUG_OBJECT (flacparse, "Found metadata block of size %u", size);
- *framesize = size;
- return TRUE;
- } else {
- if ((GST_READ_UINT16_BE (data) & 0xfffe) == 0xfff8) {
- gboolean ret;
- guint next;
+ framesize = size;
+ goto cleanup;
+ }
- flacparse->offset = GST_BUFFER_OFFSET (buffer);
- flacparse->blocking_strategy = 0;
- flacparse->sample_number = 0;
-
- GST_DEBUG_OBJECT (flacparse, "Found sync code");
- ret = gst_flac_parse_frame_is_valid (flacparse, frame, &next);
- if (ret) {
- *framesize = next;
- return TRUE;
- } else {
- /* If we're at EOS and the frame was not valid, drop it! */
- if (G_UNLIKELY (GST_BASE_PARSE_DRAINING (flacparse))) {
- GST_WARNING_OBJECT (flacparse, "EOS");
- return FALSE;
- }
+ if ((GST_READ_UINT16_BE (map.data) & 0xfffe) == 0xfff8) {
+ gboolean ret;
+ guint next;
- if (next == 0) {
- } else if (next > GST_BUFFER_SIZE (buffer)) {
- GST_DEBUG_OBJECT (flacparse, "Requesting %u bytes", next);
- *skipsize = 0;
- gst_base_parse_set_min_frame_size (parse, next);
- return FALSE;
- } else {
- GST_ERROR_OBJECT (flacparse,
- "Giving up on invalid frame (%d bytes)",
- GST_BUFFER_SIZE (buffer));
- return FALSE;
- }
- }
- } else {
- GstByteReader reader = GST_BYTE_READER_INIT_FROM_BUFFER (buffer);
- gint off;
+ flacparse->offset = GST_BUFFER_OFFSET (buffer);
+ flacparse->blocking_strategy = 0;
+ flacparse->sample_number = 0;
- off =
- gst_byte_reader_masked_scan_uint32 (&reader, 0xfffc0000, 0xfff80000,
- 0, GST_BUFFER_SIZE (buffer));
+ GST_DEBUG_OBJECT (flacparse, "Found sync code");
+ ret = gst_flac_parse_frame_is_valid (flacparse, frame, &next);
+ if (ret) {
+ framesize = next;
+ goto cleanup;
+ } else {
+ /* If we're at EOS and the frame was not valid, drop it! */
+ if (G_UNLIKELY (GST_BASE_PARSE_DRAINING (flacparse))) {
+ GST_WARNING_OBJECT (flacparse, "EOS");
+ result = FALSE;
+ goto cleanup;
+ }
- if (off > 0) {
- GST_DEBUG_OBJECT (parse, "Possible sync at buffer offset %d", off);
- *skipsize = off;
- return FALSE;
+ if (next == 0) {
+ } else if (next > map.size) {
+ GST_DEBUG_OBJECT (flacparse, "Requesting %u bytes", next);
+ *skipsize = 0;
+ gst_base_parse_set_min_frame_size (parse, next);
+ result = FALSE;
+ goto cleanup;
} else {
- GST_DEBUG_OBJECT (flacparse, "Sync code not found");
- *skipsize = GST_BUFFER_SIZE (buffer) - 3;
- return FALSE;
+ GST_ERROR_OBJECT (flacparse,
+ "Giving up on invalid frame (%" G_GSIZE_FORMAT " bytes)", map.size);
+ result = FALSE;
+ goto cleanup;
}
}
+ } else {
+ GstByteReader reader;
+ gint off;
+
+ gst_byte_reader_init (&reader, map.data, map.size);
+ off =
+ gst_byte_reader_masked_scan_uint32 (&reader, 0xfffc0000, 0xfff80000,
+ 0, map.size);
+
+ if (off > 0) {
+ GST_DEBUG_OBJECT (parse, "Possible sync at buffer offset %d", off);
+ *skipsize = off;
+ result = FALSE;
+ goto cleanup;
+ } else {
+ GST_DEBUG_OBJECT (flacparse, "Sync code not found");
+ *skipsize = map.size - 3;
+ result = FALSE;
+ goto cleanup;
+ }
}
- return FALSE;
+ result = FALSE;
+
+cleanup:
+ gst_buffer_unmap (buffer, &map);
+
+ if (result)
+ *skipsize = 0;
+
+ if (result && framesize <= map.size) {
+ ret = gst_flac_parse_parse_frame (parse, frame, framesize);
+ if (ret == GST_BASE_PARSE_FLOW_DROPPED) {
+ frame->flags |= GST_BASE_PARSE_FRAME_FLAG_DROP;
+ ret = GST_FLOW_OK;
+ }
+ if (ret == GST_FLOW_OK)
+ ret = gst_base_parse_finish_frame (parse, frame, framesize);
+ }
+
+ return ret;
}
static gboolean
gst_flac_parse_handle_streaminfo (GstFlacParse * flacparse, GstBuffer * buffer)
{
- GstBitReader reader = GST_BIT_READER_INIT_FROM_BUFFER (buffer);
+ GstBitReader reader;
+ GstMapInfo map;
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ gst_bit_reader_init (&reader, map.data, map.size);
- if (GST_BUFFER_SIZE (buffer) != 4 + 34) {
- GST_ERROR_OBJECT (flacparse, "Invalid metablock size for STREAMINFO: %u",
- GST_BUFFER_SIZE (buffer));
- return FALSE;
+ if (map.size != 4 + 34) {
+ GST_ERROR_OBJECT (flacparse,
+ "Invalid metablock size for STREAMINFO: %" G_GSIZE_FORMAT "", map.size);
+ goto failure;
}
/* Skip metadata block header */
goto error;
if (flacparse->samplerate == 0) {
GST_ERROR_OBJECT (flacparse, "Invalid sample rate 0");
- return FALSE;
+ goto failure;
}
if (!gst_bit_reader_get_bits_uint8 (&reader, &flacparse->channels, 3))
if (flacparse->channels > 8) {
GST_ERROR_OBJECT (flacparse, "Invalid number of channels %u",
flacparse->channels);
- return FALSE;
+ goto failure;
}
if (!gst_bit_reader_get_bits_uint8 (&reader, &flacparse->bps, 5))
GST_FORMAT_DEFAULT, flacparse->total_samples, 0);
}
+ gst_buffer_unmap (buffer, &map);
+
GST_DEBUG_OBJECT (flacparse, "STREAMINFO:\n"
"\tmin/max blocksize: %u/%u,\n"
"\tmin/max framesize: %u/%u,\n"
error:
GST_ERROR_OBJECT (flacparse, "Failed to read data");
+failure:
+ gst_buffer_unmap (buffer, &map);
return FALSE;
}
gst_flac_parse_handle_vorbiscomment (GstFlacParse * flacparse,
GstBuffer * buffer)
{
- flacparse->tags = gst_tag_list_from_vorbiscomment_buffer (buffer,
- GST_BUFFER_DATA (buffer), 4, NULL);
+ GstMapInfo map;
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+
+ flacparse->tags =
+ gst_tag_list_from_vorbiscomment (map.data, map.size, map.data, 4, NULL);
+ gst_buffer_unmap (buffer, &map);
if (flacparse->tags == NULL) {
GST_ERROR_OBJECT (flacparse, "Invalid vorbiscomment block");
static gboolean
gst_flac_parse_handle_picture (GstFlacParse * flacparse, GstBuffer * buffer)
{
- GstByteReader reader = GST_BYTE_READER_INIT_FROM_BUFFER (buffer);
- const guint8 *data = GST_BUFFER_DATA (buffer);
+ GstByteReader reader;
+ GstMapInfo map;
guint32 img_len = 0, img_type = 0;
guint32 img_mimetype_len = 0, img_description_len = 0;
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ gst_byte_reader_init (&reader, map.data, map.size);
+
if (!gst_byte_reader_skip (&reader, 4))
goto error;
goto error;
if (!flacparse->tags)
- flacparse->tags = gst_tag_list_new ();
+ flacparse->tags = gst_tag_list_new_empty ();
gst_tag_list_add_id3_image (flacparse->tags,
- data + gst_byte_reader_get_pos (&reader), img_len, img_type);
+ map.data + gst_byte_reader_get_pos (&reader), img_len, img_type);
if (gst_tag_list_is_empty (flacparse->tags)) {
gst_tag_list_free (flacparse->tags);
flacparse->tags = NULL;
}
+ gst_buffer_unmap (buffer, &map);
return TRUE;
error:
GST_ERROR_OBJECT (flacparse, "Error reading data");
+ gst_buffer_unmap (buffer, &map);
return FALSE;
}
{
GstByteReader br;
gint64 offset = 0, samples = 0;
+ GstMapInfo map;
GST_DEBUG_OBJECT (flacparse,
"parsing seektable; base offset %" G_GINT64_FORMAT, boffset);
if (boffset <= 0)
- goto done;
+ goto exit;
+
+ gst_buffer_map (flacparse->seektable, &map, GST_MAP_READ);
+ gst_byte_reader_init (&br, map.data, map.size);
- gst_byte_reader_init_from_buffer (&br, flacparse->seektable);
/* skip header */
if (!gst_byte_reader_skip (&br, 4))
goto done;
}
done:
+ gst_buffer_unmap (flacparse->seektable, &map);
+exit:
gst_buffer_unref (flacparse->seektable);
flacparse->seektable = NULL;
}
/* copy buffer to avoid problems with circular refcounts */
buf = gst_buffer_copy (buf);
/* again, for good measure */
- GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_IN_CAPS);
+ GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_HEADER);
gst_value_set_buffer (&value, buf);
gst_buffer_unref (buf);
gst_value_array_append_value (array_val, &value);
for (l = flacparse->headers; l; l = l->next) {
GstBuffer *header = l->data;
- const guint8 *data = GST_BUFFER_DATA (header);
- guint size = GST_BUFFER_SIZE (header);
+ GstMapInfo map;
+
+ gst_buffer_map (header, &map, GST_MAP_READ);
- GST_BUFFER_FLAG_SET (header, GST_BUFFER_FLAG_IN_CAPS);
+ GST_BUFFER_FLAG_SET (header, GST_BUFFER_FLAG_HEADER);
- if (size == 4 && memcmp (data, "fLaC", 4) == 0) {
+ if (map.size == 4 && memcmp (map.data, "fLaC", 4) == 0) {
marker = header;
- } else if (size > 1 && (data[0] & 0x7f) == 0) {
+ } else if (map.size > 1 && (map.data[0] & 0x7f) == 0) {
streaminfo = header;
- } else if (size > 1 && (data[0] & 0x7f) == 4) {
+ } else if (map.size > 1 && (map.data[0] & 0x7f) == 4) {
vorbiscomment = header;
}
+
+ gst_buffer_unmap (header, &map);
}
if (marker == NULL || streaminfo == NULL || vorbiscomment == NULL) {
{
GstBuffer *buf;
guint16 num;
+ GstMapInfo sinfomap, writemap;
+
+ gst_buffer_map (streaminfo, &sinfomap, GST_MAP_READ);
/* minus one for the marker that is merged with streaminfo here */
num = g_list_length (flacparse->headers) - 1;
- buf = gst_buffer_new_and_alloc (13 + GST_BUFFER_SIZE (streaminfo));
- GST_BUFFER_DATA (buf)[0] = 0x7f;
- memcpy (GST_BUFFER_DATA (buf) + 1, "FLAC", 4);
- GST_BUFFER_DATA (buf)[5] = 0x01; /* mapping version major */
- GST_BUFFER_DATA (buf)[6] = 0x00; /* mapping version minor */
- GST_BUFFER_DATA (buf)[7] = (num & 0xFF00) >> 8;
- GST_BUFFER_DATA (buf)[8] = (num & 0x00FF) >> 0;
- memcpy (GST_BUFFER_DATA (buf) + 9, "fLaC", 4);
- memcpy (GST_BUFFER_DATA (buf) + 13, GST_BUFFER_DATA (streaminfo),
- GST_BUFFER_SIZE (streaminfo));
+ buf = gst_buffer_new_and_alloc (13 + sinfomap.size);
+ gst_buffer_map (buf, &writemap, GST_MAP_WRITE);
+
+ writemap.data[0] = 0x7f;
+ memcpy (writemap.data + 1, "FLAC", 4);
+ writemap.data[5] = 0x01; /* mapping version major */
+ writemap.data[6] = 0x00; /* mapping version minor */
+ writemap.data[7] = (num & 0xFF00) >> 8;
+ writemap.data[8] = (num & 0x00FF) >> 0;
+ memcpy (writemap.data + 9, "fLaC", 4);
+ memcpy (writemap.data + 13, sinfomap.data, sinfomap.size);
_value_array_append_buffer (&array, buf);
+
+ gst_buffer_unmap (streaminfo, &sinfomap);
+ gst_buffer_unmap (buf, &writemap);
gst_buffer_unref (buf);
}
flacparse->headers =
g_list_delete_link (flacparse->headers, flacparse->headers);
- buf = gst_buffer_make_metadata_writable (buf);
- gst_buffer_set_caps (buf,
- GST_PAD_CAPS (GST_BASE_PARSE_SRC_PAD (GST_BASE_PARSE (flacparse))));
+ buf = gst_buffer_make_writable (buf);
/* init, set and give away frame */
gst_base_parse_frame_init (&frame);
res = FALSE;
break;
}
+ gst_base_parse_frame_free (&frame);
}
g_list_foreach (flacparse->headers, (GFunc) gst_mini_object_unref, NULL);
g_list_free (flacparse->headers);
gst_flac_parse_generate_headers (GstFlacParse * flacparse)
{
GstBuffer *marker, *streaminfo, *vorbiscomment;
- guint8 *data;
+ GstMapInfo map;
marker = gst_buffer_new_and_alloc (4);
- memcpy (GST_BUFFER_DATA (marker), "fLaC", 4);
+ gst_buffer_map (marker, &map, GST_MAP_WRITE);
+ memcpy (map.data, "fLaC", 4);
+ gst_buffer_unmap (marker, &map);
GST_BUFFER_TIMESTAMP (marker) = GST_CLOCK_TIME_NONE;
GST_BUFFER_DURATION (marker) = GST_CLOCK_TIME_NONE;
GST_BUFFER_OFFSET (marker) = 0;
flacparse->headers = g_list_append (flacparse->headers, marker);
streaminfo = gst_buffer_new_and_alloc (4 + 34);
- data = GST_BUFFER_DATA (streaminfo);
- memset (data, 0, 4 + 34);
+ gst_buffer_map (streaminfo, &map, GST_MAP_WRITE);
+ memset (map.data, 0, 4 + 34);
/* metadata block header */
- data[0] = 0x00; /* is_last = 0; type = 0; */
- data[1] = 0x00; /* length = 34; */
- data[2] = 0x00;
- data[3] = 0x22;
+ map.data[0] = 0x00; /* is_last = 0; type = 0; */
+ map.data[1] = 0x00; /* length = 34; */
+ map.data[2] = 0x00;
+ map.data[3] = 0x22;
/* streaminfo */
- data[4] = (flacparse->block_size >> 8) & 0xff; /* min blocksize = blocksize; */
- data[5] = (flacparse->block_size) & 0xff;
- data[6] = (flacparse->block_size >> 8) & 0xff; /* max blocksize = blocksize; */
- data[7] = (flacparse->block_size) & 0xff;
+ map.data[4] = (flacparse->block_size >> 8) & 0xff; /* min blocksize = blocksize; */
+ map.data[5] = (flacparse->block_size) & 0xff;
+ map.data[6] = (flacparse->block_size >> 8) & 0xff; /* max blocksize = blocksize; */
+ map.data[7] = (flacparse->block_size) & 0xff;
- data[8] = 0x00; /* min framesize = 0; */
- data[9] = 0x00;
- data[10] = 0x00;
- data[11] = 0x00; /* max framesize = 0; */
- data[12] = 0x00;
- data[13] = 0x00;
+ map.data[8] = 0x00; /* min framesize = 0; */
+ map.data[9] = 0x00;
+ map.data[10] = 0x00;
+ map.data[11] = 0x00; /* max framesize = 0; */
+ map.data[12] = 0x00;
+ map.data[13] = 0x00;
- data[14] = (flacparse->samplerate >> 12) & 0xff;
- data[15] = (flacparse->samplerate >> 4) & 0xff;
- data[16] = (flacparse->samplerate >> 0) & 0xf0;
+ map.data[14] = (flacparse->samplerate >> 12) & 0xff;
+ map.data[15] = (flacparse->samplerate >> 4) & 0xff;
+ map.data[16] = (flacparse->samplerate >> 0) & 0xf0;
- data[16] |= (flacparse->channels - 1) << 1;
+ map.data[16] |= (flacparse->channels - 1) << 1;
- data[16] |= ((flacparse->bps - 1) >> 4) & 0x01;
- data[17] = (((flacparse->bps - 1)) & 0x0f) << 4;
+ map.data[16] |= ((flacparse->bps - 1) >> 4) & 0x01;
+ map.data[17] = (((flacparse->bps - 1)) & 0x0f) << 4;
{
gint64 duration;
- GstFormat fmt = GST_FORMAT_TIME;
- if (gst_pad_query_peer_duration (GST_BASE_PARSE_SINK_PAD (GST_BASE_PARSE
- (flacparse)), &fmt, &duration) && fmt == GST_FORMAT_TIME) {
+ if (gst_pad_peer_query_duration (GST_BASE_PARSE_SINK_PAD (flacparse),
+ GST_FORMAT_TIME, &duration)) {
duration = GST_CLOCK_TIME_TO_FRAMES (duration, flacparse->samplerate);
- data[17] |= (duration >> 32) & 0xff;
- data[18] |= (duration >> 24) & 0xff;
- data[19] |= (duration >> 16) & 0xff;
- data[20] |= (duration >> 8) & 0xff;
- data[21] |= (duration >> 0) & 0xff;
+ map.data[17] |= (duration >> 32) & 0xff;
+ map.data[18] |= (duration >> 24) & 0xff;
+ map.data[19] |= (duration >> 16) & 0xff;
+ map.data[20] |= (duration >> 8) & 0xff;
+ map.data[21] |= (duration >> 0) & 0xff;
}
}
/* MD5 = 0; */
+ gst_buffer_unmap (streaminfo, &map);
GST_BUFFER_TIMESTAMP (streaminfo) = GST_CLOCK_TIME_NONE;
GST_BUFFER_DURATION (streaminfo) = GST_CLOCK_TIME_NONE;
GST_BUFFER_OFFSET (streaminfo) = 0;
/* empty vorbiscomment */
{
- GstTagList *taglist = gst_tag_list_new ();
+ GstTagList *taglist = gst_tag_list_new_empty ();
guchar header[4];
guint size;
sizeof (header), NULL);
gst_tag_list_free (taglist);
+ gst_buffer_map (vorbiscomment, &map, GST_MAP_WRITE);
+
/* Get rid of framing bit */
- if (GST_BUFFER_DATA (vorbiscomment)[GST_BUFFER_SIZE (vorbiscomment) -
- 1] == 1) {
+ if (map.data[map.size - 1] == 1) {
GstBuffer *sub;
sub =
- gst_buffer_create_sub (vorbiscomment, 0,
- GST_BUFFER_SIZE (vorbiscomment) - 1);
+ gst_buffer_copy_region (vorbiscomment, GST_BUFFER_COPY_ALL, 0,
+ map.size - 1);
+ gst_buffer_unmap (vorbiscomment, &map);
gst_buffer_unref (vorbiscomment);
vorbiscomment = sub;
+ gst_buffer_map (vorbiscomment, &map, GST_MAP_WRITE);
}
- size = GST_BUFFER_SIZE (vorbiscomment) - 4;
- GST_BUFFER_DATA (vorbiscomment)[1] = ((size & 0xFF0000) >> 16);
- GST_BUFFER_DATA (vorbiscomment)[2] = ((size & 0x00FF00) >> 8);
- GST_BUFFER_DATA (vorbiscomment)[3] = (size & 0x0000FF);
+ size = map.size - 4;
+ map.data[1] = ((size & 0xFF0000) >> 16);
+ map.data[2] = ((size & 0x00FF00) >> 8);
+ map.data[3] = (size & 0x0000FF);
+ gst_buffer_unmap (vorbiscomment, &map);
GST_BUFFER_TIMESTAMP (vorbiscomment) = GST_CLOCK_TIME_NONE;
GST_BUFFER_DURATION (vorbiscomment) = GST_CLOCK_TIME_NONE;
}
static GstFlowReturn
-gst_flac_parse_parse_frame (GstBaseParse * parse, GstBaseParseFrame * frame)
+gst_flac_parse_parse_frame (GstBaseParse * parse, GstBaseParseFrame * frame,
+ gint size)
{
GstFlacParse *flacparse = GST_FLAC_PARSE (parse);
- GstBuffer *buffer = frame->buffer;
- const guint8 *data = GST_BUFFER_DATA (buffer);
+ GstBuffer *buffer = frame->buffer, *sbuffer;
+ GstMapInfo map;
+ GstFlowReturn res = GST_FLOW_ERROR;
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
if (flacparse->state == GST_FLAC_PARSE_STATE_INIT) {
- GST_BUFFER_TIMESTAMP (buffer) = GST_CLOCK_TIME_NONE;
- GST_BUFFER_DURATION (buffer) = GST_CLOCK_TIME_NONE;
- GST_BUFFER_OFFSET (buffer) = 0;
- GST_BUFFER_OFFSET_END (buffer) = 0;
+ sbuffer = gst_buffer_copy_region (buffer, GST_BUFFER_COPY_ALL, 0, size);
+ GST_BUFFER_TIMESTAMP (sbuffer) = GST_CLOCK_TIME_NONE;
+ GST_BUFFER_DURATION (sbuffer) = GST_CLOCK_TIME_NONE;
+ GST_BUFFER_OFFSET (sbuffer) = 0;
+ GST_BUFFER_OFFSET_END (sbuffer) = 0;
/* 32 bits metadata block */
gst_base_parse_set_min_frame_size (GST_BASE_PARSE (flacparse), 4);
flacparse->state = GST_FLAC_PARSE_STATE_HEADERS;
- flacparse->headers =
- g_list_append (flacparse->headers, gst_buffer_ref (buffer));
+ flacparse->headers = g_list_append (flacparse->headers, sbuffer);
- return GST_BASE_PARSE_FLOW_DROPPED;
+ res = GST_BASE_PARSE_FLOW_DROPPED;
} else if (flacparse->state == GST_FLAC_PARSE_STATE_HEADERS) {
- gboolean is_last = ((data[0] & 0x80) == 0x80);
- guint type = (data[0] & 0x7F);
+ gboolean is_last = ((map.data[0] & 0x80) == 0x80);
+ guint type = (map.data[0] & 0x7F);
if (type == 127) {
GST_WARNING_OBJECT (flacparse, "Invalid metadata block type");
- return GST_BASE_PARSE_FLOW_DROPPED;
+ res = GST_BASE_PARSE_FLOW_DROPPED;
+ goto cleanup;
}
GST_DEBUG_OBJECT (flacparse, "Handling metadata block of type %u", type);
+ sbuffer = gst_buffer_copy_region (buffer, GST_BUFFER_COPY_ALL, 0, size);
+
switch (type) {
case 0: /* STREAMINFO */
- if (!gst_flac_parse_handle_streaminfo (flacparse, buffer))
- return GST_FLOW_ERROR;
+ if (!gst_flac_parse_handle_streaminfo (flacparse, sbuffer))
+ goto cleanup;
break;
case 3: /* SEEKTABLE */
- if (!gst_flac_parse_handle_seektable (flacparse, buffer))
- return GST_FLOW_ERROR;
+ if (!gst_flac_parse_handle_seektable (flacparse, sbuffer))
+ goto cleanup;
break;
case 4: /* VORBIS_COMMENT */
- if (!gst_flac_parse_handle_vorbiscomment (flacparse, buffer))
- return GST_FLOW_ERROR;
+ if (!gst_flac_parse_handle_vorbiscomment (flacparse, sbuffer))
+ goto cleanup;
break;
case 6: /* PICTURE */
- if (!gst_flac_parse_handle_picture (flacparse, buffer))
- return GST_FLOW_ERROR;
+ if (!gst_flac_parse_handle_picture (flacparse, sbuffer))
+ goto cleanup;
break;
case 1: /* PADDING */
case 2: /* APPLICATION */
break;
}
- GST_BUFFER_TIMESTAMP (buffer) = GST_CLOCK_TIME_NONE;
- GST_BUFFER_DURATION (buffer) = GST_CLOCK_TIME_NONE;
- GST_BUFFER_OFFSET (buffer) = 0;
- GST_BUFFER_OFFSET_END (buffer) = 0;
+ GST_BUFFER_TIMESTAMP (sbuffer) = GST_CLOCK_TIME_NONE;
+ GST_BUFFER_DURATION (sbuffer) = GST_CLOCK_TIME_NONE;
+ GST_BUFFER_OFFSET (sbuffer) = 0;
+ GST_BUFFER_OFFSET_END (sbuffer) = 0;
- flacparse->headers =
- g_list_append (flacparse->headers, gst_buffer_ref (buffer));
+ flacparse->headers = g_list_append (flacparse->headers, sbuffer);
if (is_last) {
if (!gst_flac_parse_handle_headers (flacparse))
- return GST_FLOW_ERROR;
+ goto cleanup;
/* Minimal size of a frame header */
gst_base_parse_set_min_frame_size (GST_BASE_PARSE (flacparse), MAX (9,
}
/* DROPPED because we pushed already or will push all headers manually */
- return GST_BASE_PARSE_FLOW_DROPPED;
+ res = GST_BASE_PARSE_FLOW_DROPPED;
} else {
if (flacparse->offset != GST_BUFFER_OFFSET (buffer)) {
FrameHeaderCheckReturn ret;
flacparse->offset = GST_BUFFER_OFFSET (buffer);
ret =
gst_flac_parse_frame_header_is_valid (flacparse,
- GST_BUFFER_DATA (buffer), GST_BUFFER_SIZE (buffer), TRUE, NULL);
+ map.data, map.size, TRUE, NULL);
if (ret != FRAME_HEADER_VALID) {
GST_ERROR_OBJECT (flacparse,
"Baseclass didn't provide a complete frame");
- return GST_FLOW_ERROR;
+ goto cleanup;
}
}
if (flacparse->block_size == 0) {
GST_ERROR_OBJECT (flacparse, "Unparsed frame");
- return GST_FLOW_ERROR;
+ goto cleanup;
}
if (flacparse->seektable)
"Generating headers for variable blocksize streams not supported");
if (!gst_flac_parse_handle_headers (flacparse))
- return GST_FLOW_ERROR;
+ goto cleanup;
} else {
GST_DEBUG_OBJECT (flacparse, "Generating headers");
if (!gst_flac_parse_generate_headers (flacparse))
- return GST_FLOW_ERROR;
+ goto cleanup;
if (!gst_flac_parse_handle_headers (flacparse))
- return GST_FLOW_ERROR;
+ goto cleanup;
}
flacparse->state = GST_FLAC_PARSE_STATE_DATA;
}
flacparse->offset = -1;
flacparse->blocking_strategy = 0;
flacparse->sample_number = 0;
- return GST_FLOW_OK;
+ res = GST_FLOW_OK;
}
+
+cleanup:
+ gst_buffer_unmap (buffer, &map);
+
+ return res;
}
static GstFlowReturn
/* Push tags */
if (flacparse->tags) {
- gst_element_found_tags (GST_ELEMENT (flacparse), flacparse->tags);
+ gst_pad_push_event (GST_BASE_PARSE_SRC_PAD (flacparse),
+ gst_event_new_tag (flacparse->tags));
flacparse->tags = NULL;
}
}
static GstCaps *
-gst_flac_parse_get_sink_caps (GstBaseParse * parse)
+gst_flac_parse_get_sink_caps (GstBaseParse * parse, GstCaps * filter)
{
GstCaps *peercaps;
GstCaps *res;
+ /* FIXME: handle filter caps */
+
peercaps = gst_pad_get_allowed_caps (GST_BASE_PARSE_SRC_PAD (parse));
if (peercaps) {
guint i, n;
static gboolean gst_mpeg_audio_parse_start (GstBaseParse * parse);
static gboolean gst_mpeg_audio_parse_stop (GstBaseParse * parse);
-static gboolean gst_mpeg_audio_parse_check_valid_frame (GstBaseParse * parse,
- GstBaseParseFrame * frame, guint * size, gint * skipsize);
-static GstFlowReturn gst_mpeg_audio_parse_parse_frame (GstBaseParse * parse,
- GstBaseParseFrame * frame);
+static GstFlowReturn gst_mpeg_audio_parse_handle_frame (GstBaseParse * parse,
+ GstBaseParseFrame * frame, gint * skipsize);
static GstFlowReturn gst_mpeg_audio_parse_pre_push_frame (GstBaseParse * parse,
GstBaseParseFrame * frame);
static gboolean gst_mpeg_audio_parse_convert (GstBaseParse * parse,
GstFormat src_format, gint64 src_value,
GstFormat dest_format, gint64 * dest_value);
-static GstCaps *gst_mpeg_audio_parse_get_sink_caps (GstBaseParse * parse);
+static GstCaps *gst_mpeg_audio_parse_get_sink_caps (GstBaseParse * parse,
+ GstCaps * filter);
+
+static void gst_mpeg_audio_parse_handle_first_frame (GstMpegAudioParse *
+ mp3parse, GstBuffer * buf);
-GST_BOILERPLATE (GstMpegAudioParse, gst_mpeg_audio_parse, GstBaseParse,
- GST_TYPE_BASE_PARSE);
+#define gst_mpeg_audio_parse_parent_class parent_class
+G_DEFINE_TYPE (GstMpegAudioParse, gst_mpeg_audio_parse, GST_TYPE_BASE_PARSE);
#define GST_TYPE_MPEG_AUDIO_CHANNEL_MODE \
(gst_mpeg_audio_channel_mode_get_type())
}
static void
-gst_mpeg_audio_parse_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_static_pad_template (element_class,
- &sink_template);
- gst_element_class_add_static_pad_template (element_class, &src_template);
-
- gst_element_class_set_details_simple (element_class, "MPEG1 Audio Parser",
- "Codec/Parser/Audio",
- "Parses and frames mpeg1 audio streams (levels 1-3), provides seek",
- "Jan Schmidt <thaytan@mad.scientist.com>,"
- "Mark Nauwelaerts <mark.nauwelaerts@collabora.co.uk>");
-}
-
-static void
gst_mpeg_audio_parse_class_init (GstMpegAudioParseClass * klass)
{
GstBaseParseClass *parse_class = GST_BASE_PARSE_CLASS (klass);
+ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
GObjectClass *object_class = G_OBJECT_CLASS (klass);
GST_DEBUG_CATEGORY_INIT (mpeg_audio_parse_debug, "mpegaudioparse", 0,
parse_class->start = GST_DEBUG_FUNCPTR (gst_mpeg_audio_parse_start);
parse_class->stop = GST_DEBUG_FUNCPTR (gst_mpeg_audio_parse_stop);
- parse_class->check_valid_frame =
- GST_DEBUG_FUNCPTR (gst_mpeg_audio_parse_check_valid_frame);
- parse_class->parse_frame =
- GST_DEBUG_FUNCPTR (gst_mpeg_audio_parse_parse_frame);
+ parse_class->handle_frame =
+ GST_DEBUG_FUNCPTR (gst_mpeg_audio_parse_handle_frame);
parse_class->pre_push_frame =
GST_DEBUG_FUNCPTR (gst_mpeg_audio_parse_pre_push_frame);
parse_class->convert = GST_DEBUG_FUNCPTR (gst_mpeg_audio_parse_convert);
"channel mode", "MPEG audio channel mode", NULL);
g_type_class_ref (GST_TYPE_MPEG_AUDIO_CHANNEL_MODE);
+
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&sink_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&src_template));
+
+ gst_element_class_set_details_simple (element_class, "MPEG1 Audio Parser",
+ "Codec/Parser/Audio",
+ "Parses and frames mpeg1 audio streams (levels 1-3), provides seek",
+ "Jan Schmidt <thaytan@mad.scientist.com>,"
+ "Mark Nauwelaerts <mark.nauwelaerts@collabora.co.uk>");
}
static void
}
static void
-gst_mpeg_audio_parse_init (GstMpegAudioParse * mp3parse,
- GstMpegAudioParseClass * klass)
+gst_mpeg_audio_parse_init (GstMpegAudioParse * mp3parse)
{
gst_mpeg_audio_parse_reset (mp3parse);
}
guint32 header, int bpf, gboolean at_eos, gint * valid)
{
guint32 next_header;
- const guint8 *data;
- guint available;
+ GstMapInfo map;
+ gboolean res = TRUE;
int frames_found = 1;
int offset = bpf;
- available = GST_BUFFER_SIZE (buf);
- data = GST_BUFFER_DATA (buf);
+ gst_buffer_map (buf, &map, GST_MAP_READ);
while (frames_found < MIN_RESYNC_FRAMES) {
/* Check if we have enough data for all these frames, plus the next
frame header. */
- if (available < offset + 4) {
+ if (map.size < offset + 4) {
if (at_eos) {
/* Running out of data at EOS is fine; just accept it */
*valid = TRUE;
- return TRUE;
+ goto cleanup;
} else {
*valid = offset + 4;
- return FALSE;
+ res = FALSE;
+ goto cleanup;
}
}
- next_header = GST_READ_UINT32_BE (data + offset);
+ next_header = GST_READ_UINT32_BE (map.data + offset);
GST_DEBUG_OBJECT (mp3parse, "At %d: header=%08X, header2=%08X, bpf=%d",
offset, (unsigned int) header, (unsigned int) next_header, bpf);
(guint) header, (guint) header & HDRMASK, (guint) next_header,
(guint) next_header & HDRMASK, bpf);
*valid = FALSE;
- return TRUE;
+ goto cleanup;
} else if ((((next_header >> 12) & 0xf) == 0) ||
(((next_header >> 12) & 0xf) == 0xf)) {
/* The essential parts were the same, but the bitrate held an
invalid value - also reject */
GST_DEBUG_OBJECT (mp3parse, "next header invalid (bitrate)");
*valid = FALSE;
- return TRUE;
+ goto cleanup;
}
bpf = mp3_type_frame_length_from_header (mp3parse, next_header,
}
*valid = TRUE;
- return TRUE;
+
+cleanup:
+ gst_buffer_unmap (buf, &map);
+ return res;
}
static gboolean
return TRUE;
}
-static gboolean
-gst_mpeg_audio_parse_check_valid_frame (GstBaseParse * parse,
- GstBaseParseFrame * frame, guint * framesize, gint * skipsize)
+static GstFlowReturn
+gst_mpeg_audio_parse_handle_frame (GstBaseParse * parse,
+ GstBaseParseFrame * frame, gint * skipsize)
{
GstMpegAudioParse *mp3parse = GST_MPEG_AUDIO_PARSE (parse);
GstBuffer *buf = frame->buffer;
- GstByteReader reader = GST_BYTE_READER_INIT_FROM_BUFFER (buf);
+ GstByteReader reader;
gint off, bpf;
gboolean lost_sync, draining, valid, caps_change;
guint32 header;
guint bitrate, layer, rate, channels, version, mode, crc;
+ GstMapInfo map;
+ gboolean res = FALSE;
- if (G_UNLIKELY (GST_BUFFER_SIZE (buf) < 6))
- return FALSE;
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ if (G_UNLIKELY (map.size < 6)) {
+ *skipsize = 1;
+ goto cleanup;
+ }
+
+ gst_byte_reader_init (&reader, map.data, map.size);
off = gst_byte_reader_masked_scan_uint32 (&reader, 0xffe00000, 0xffe00000,
- 0, GST_BUFFER_SIZE (buf));
+ 0, map.size);
GST_LOG_OBJECT (parse, "possible sync at buffer offset %d", off);
/* didn't find anything that looks like a sync word, skip */
if (off < 0) {
- *skipsize = GST_BUFFER_SIZE (buf) - 3;
- return FALSE;
+ *skipsize = map.size - 3;
+ goto cleanup;
}
/* possible frame header, but not at offset 0? skip bytes before sync */
if (off > 0) {
*skipsize = off;
- return FALSE;
+ goto cleanup;
}
/* make sure the values in the frame header look sane */
- header = GST_READ_UINT32_BE (GST_BUFFER_DATA (buf));
+ header = GST_READ_UINT32_BE (map.data);
if (!gst_mpeg_audio_parse_head_check (mp3parse, header)) {
*skipsize = 1;
- return FALSE;
+ goto cleanup;
}
GST_LOG_OBJECT (parse, "got frame");
/* not enough data */
gst_base_parse_set_min_frame_size (parse, valid);
*skipsize = 0;
- return FALSE;
+ goto cleanup;
} else {
if (!valid) {
*skipsize = off + 2;
- return FALSE;
+ goto cleanup;
}
}
} else if (draining && lost_sync && caps_change && mp3parse->rate > 0) {
/* avoid caps jitter that we can't be sure of */
*skipsize = off + 2;
- return FALSE;
+ goto cleanup;
}
/* restore default minimum */
gst_base_parse_set_min_frame_size (parse, MIN_FRAME_SIZE);
- *framesize = bpf;
- return TRUE;
+ res = TRUE;
+
+ /* metadata handling */
+ if (G_UNLIKELY (caps_change)) {
+ GstCaps *caps = gst_caps_new_simple ("audio/mpeg",
+ "mpegversion", G_TYPE_INT, 1,
+ "mpegaudioversion", G_TYPE_INT, version,
+ "layer", G_TYPE_INT, layer,
+ "rate", G_TYPE_INT, rate,
+ "channels", G_TYPE_INT, channels, "parsed", G_TYPE_BOOLEAN, TRUE, NULL);
+ gst_pad_set_caps (GST_BASE_PARSE_SRC_PAD (parse), caps);
+ gst_caps_unref (caps);
+
+ mp3parse->rate = rate;
+ mp3parse->channels = channels;
+ mp3parse->layer = layer;
+ mp3parse->version = version;
+
+ /* see http://www.codeproject.com/audio/MPEGAudioInfo.asp */
+ if (mp3parse->layer == 1)
+ mp3parse->spf = 384;
+ else if (mp3parse->layer == 2)
+ mp3parse->spf = 1152;
+ else if (mp3parse->version == 1) {
+ mp3parse->spf = 1152;
+ } else {
+ /* MPEG-2 or "2.5" */
+ mp3parse->spf = 576;
+ }
+
+ /* lead_in:
+ * We start pushing 9 frames earlier (29 frames for MPEG2) than
+ * segment start to be able to decode the first frame we want.
+ * 9 (29) frames are the theoretical maximum of frames that contain
+ * data for the current frame (bit reservoir).
+ *
+ * lead_out:
+ * Some mp3 streams have an offset in the timestamps, for which we have to
+ * push the frame *after* the end position in order for the decoder to be
+ * able to decode everything up until the segment.stop position. */
+ gst_base_parse_set_frame_rate (parse, mp3parse->rate, mp3parse->spf,
+ (version == 1) ? 10 : 30, 2);
+ }
+
+ mp3parse->hdr_bitrate = bitrate;
+
+ /* For first frame; check for seek tables and output a codec tag */
+ gst_mpeg_audio_parse_handle_first_frame (mp3parse, buf);
+
+ /* store some frame info for later processing */
+ mp3parse->last_crc = crc;
+ mp3parse->last_mode = mode;
+
+cleanup:
+ gst_buffer_unmap (buf, &map);
+
+ if (res && bpf <= map.size) {
+ return gst_base_parse_finish_frame (parse, frame, bpf);
+ }
+
+ return GST_FLOW_OK;
}
static void
gint offset_xing, offset_vbri;
guint64 avail;
gint64 upstream_total_bytes = 0;
- GstFormat fmt = GST_FORMAT_BYTES;
guint32 read_id_xing = 0, read_id_vbri = 0;
- const guint8 *data;
+ GstMapInfo map;
+ guint8 *data;
guint bitrate;
if (mp3parse->sent_codec_tag)
offset_vbri += 4;
/* Check if we have enough data to read the Xing header */
- avail = GST_BUFFER_SIZE (buf);
- data = GST_BUFFER_DATA (buf);
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ data = map.data;
+ avail = map.size;
if (avail >= offset_xing + 4) {
read_id_xing = GST_READ_UINT32_BE (data + offset_xing);
}
/* obtain real upstream total bytes */
- fmt = GST_FORMAT_BYTES;
- if (!gst_pad_query_peer_duration (GST_BASE_PARSE_SINK_PAD (GST_BASE_PARSE
- (mp3parse)), &fmt, &upstream_total_bytes))
+ if (!gst_pad_peer_query_duration (GST_BASE_PARSE_SINK_PAD (mp3parse),
+ GST_FORMAT_BYTES, &upstream_total_bytes))
upstream_total_bytes = 0;
if (read_id_xing == xing_id || read_id_xing == info_id) {
if (avail < bytes_needed) {
GST_DEBUG_OBJECT (mp3parse,
"Not enough data to read Xing header (need %d)", bytes_needed);
- return;
+ goto cleanup;
}
GST_DEBUG_OBJECT (mp3parse, "Reading Xing header");
if (avail < offset_vbri + 26) {
GST_DEBUG_OBJECT (mp3parse,
"Not enough data to read VBRI header (need %d)", offset_vbri + 26);
- return;
+ goto cleanup;
}
GST_DEBUG_OBJECT (mp3parse, "Reading VBRI header");
if (GST_READ_UINT16_BE (data) != 0x0001) {
GST_WARNING_OBJECT (mp3parse,
"Unsupported VBRI version 0x%x", GST_READ_UINT16_BE (data));
- return;
+ goto cleanup;
}
data += 2;
GST_DEBUG_OBJECT (mp3parse,
"Not enough data to read VBRI header (need %d)",
offset_vbri + 26 + nseek_points * seek_bytes);
- return;
+ goto cleanup;
}
- data = GST_BUFFER_DATA (buf);
+ data = map.data;
data += offset_vbri + 26;
/* VBRI seek table: frame/seek_frames -> byte */
bitrate = 0;
gst_base_parse_set_average_bitrate (GST_BASE_PARSE (mp3parse), bitrate);
-}
-static GstFlowReturn
-gst_mpeg_audio_parse_parse_frame (GstBaseParse * parse,
- GstBaseParseFrame * frame)
-{
- GstMpegAudioParse *mp3parse = GST_MPEG_AUDIO_PARSE (parse);
- GstBuffer *buf = frame->buffer;
- guint bitrate, layer, rate, channels, version, mode, crc;
-
- g_return_val_if_fail (GST_BUFFER_SIZE (buf) >= 4, GST_FLOW_ERROR);
-
- if (!mp3_type_frame_length_from_header (mp3parse,
- GST_READ_UINT32_BE (GST_BUFFER_DATA (buf)),
- &version, &layer, &channels, &bitrate, &rate, &mode, &crc))
- goto broken_header;
-
- if (G_UNLIKELY (channels != mp3parse->channels || rate != mp3parse->rate ||
- layer != mp3parse->layer || version != mp3parse->version)) {
- GstCaps *caps = gst_caps_new_simple ("audio/mpeg",
- "mpegversion", G_TYPE_INT, 1,
- "mpegaudioversion", G_TYPE_INT, version,
- "layer", G_TYPE_INT, layer,
- "rate", G_TYPE_INT, rate,
- "channels", G_TYPE_INT, channels, "parsed", G_TYPE_BOOLEAN, TRUE, NULL);
- gst_buffer_set_caps (buf, caps);
- gst_pad_set_caps (GST_BASE_PARSE_SRC_PAD (parse), caps);
- gst_caps_unref (caps);
-
- mp3parse->rate = rate;
- mp3parse->channels = channels;
- mp3parse->layer = layer;
- mp3parse->version = version;
-
- /* see http://www.codeproject.com/audio/MPEGAudioInfo.asp */
- if (mp3parse->layer == 1)
- mp3parse->spf = 384;
- else if (mp3parse->layer == 2)
- mp3parse->spf = 1152;
- else if (mp3parse->version == 1) {
- mp3parse->spf = 1152;
- } else {
- /* MPEG-2 or "2.5" */
- mp3parse->spf = 576;
- }
-
- /* lead_in:
- * We start pushing 9 frames earlier (29 frames for MPEG2) than
- * segment start to be able to decode the first frame we want.
- * 9 (29) frames are the theoretical maximum of frames that contain
- * data for the current frame (bit reservoir).
- *
- * lead_out:
- * Some mp3 streams have an offset in the timestamps, for which we have to
- * push the frame *after* the end position in order for the decoder to be
- * able to decode everything up until the segment.stop position. */
- gst_base_parse_set_frame_rate (parse, mp3parse->rate, mp3parse->spf,
- (version == 1) ? 10 : 30, 2);
- }
-
- mp3parse->hdr_bitrate = bitrate;
-
- /* For first frame; check for seek tables and output a codec tag */
- gst_mpeg_audio_parse_handle_first_frame (mp3parse, buf);
-
- /* store some frame info for later processing */
- mp3parse->last_crc = crc;
- mp3parse->last_mode = mode;
-
- return GST_FLOW_OK;
-
-/* ERRORS */
-broken_header:
- {
- /* this really shouldn't ever happen */
- GST_ELEMENT_ERROR (parse, STREAM, DECODE, (NULL), (NULL));
- return GST_FLOW_ERROR;
- }
+cleanup:
+ gst_buffer_unmap (buf, &map);
}
static gboolean
codec = g_strdup_printf ("MPEG %d Audio, Layer %d",
mp3parse->version, mp3parse->layer);
}
- taglist = gst_tag_list_new ();
- gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE,
- GST_TAG_AUDIO_CODEC, codec, NULL);
+ taglist = gst_tag_list_new (GST_TAG_AUDIO_CODEC, codec, NULL);
if (mp3parse->hdr_bitrate > 0 && mp3parse->xing_bitrate == 0 &&
mp3parse->vbri_bitrate == 0) {
/* We don't have a VBR bitrate, so post the available bitrate as
gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE,
GST_TAG_NOMINAL_BITRATE, mp3parse->hdr_bitrate, NULL);
}
- gst_element_found_tags_for_pad (GST_ELEMENT (mp3parse),
- GST_BASE_PARSE_SRC_PAD (mp3parse), taglist);
+ gst_pad_push_event (GST_BASE_PARSE_SRC_PAD (mp3parse),
+ gst_event_new_tag (taglist));
g_free (codec);
/* also signals the end of first-frame processing */
gboolean using_crc;
if (!taglist) {
- taglist = gst_tag_list_new ();
+ taglist = gst_tag_list_new_empty ();
}
mp3parse->last_posted_crc = mp3parse->last_crc;
if (mp3parse->last_posted_crc == CRC_PROTECTED) {
if (mp3parse->last_posted_channel_mode != mp3parse->last_mode) {
if (!taglist) {
- taglist = gst_tag_list_new ();
+ taglist = gst_tag_list_new_empty ();
}
mp3parse->last_posted_channel_mode = mp3parse->last_mode;
/* if the taglist exists, we need to send it */
if (taglist) {
- gst_element_found_tags_for_pad (GST_ELEMENT (mp3parse),
- GST_BASE_PARSE_SRC_PAD (mp3parse), taglist);
+ gst_pad_push_event (GST_BASE_PARSE_SRC_PAD (mp3parse),
+ gst_event_new_tag (taglist));
}
/* usual clipping applies */
}
static GstCaps *
-gst_mpeg_audio_parse_get_sink_caps (GstBaseParse * parse)
+gst_mpeg_audio_parse_get_sink_caps (GstBaseParse * parse, GstCaps * filter)
{
GstCaps *peercaps;
GstCaps *res;
+ /* FIXME: handle filter caps */
+
peercaps = gst_pad_get_allowed_caps (GST_BASE_PARSE_SRC_PAD (parse));
if (peercaps) {
guint i, n;
libgstauparse_la_SOURCES = gstauparse.c
libgstauparse_la_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_BASE_CFLAGS) $(GST_CFLAGS)
-libgstauparse_la_LIBADD = $(GST_LIBS) $(GST_BASE_LIBS)
+libgstauparse_la_LIBADD = $(GST_LIBS) $(GST_BASE_LIBS) -lgstaudio-$(GST_MAJORMINOR)
libgstauparse_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
libgstauparse_la_LIBTOOLFLAGS = --tag=disable-static
GST_STATIC_CAPS ("audio/x-au")
);
+#define GST_AU_PARSE_RAW_PAD_TEMPLATE_CAPS \
+ "audio/x-raw, " \
+ "format= (string) { S8, S16LE, S16BE, S24LE, S24BE, " \
+ "S32LE, S32BE, F32LE, F32BE, " \
+ "F64LE, F64BE }, " \
+ "rate = (int) [ 8000, 192000 ], " \
+ "channels = (int) 1, " \
+ "layout = (string) interleaved;" \
+ "audio/x-raw, " \
+ "format= (string) { S8, S16LE, S16BE, S24LE, S24BE, " \
+ "S32LE, S32BE, F32LE, F32BE, " \
+ "F64LE, F64BE }, " \
+ "rate = (int) [ 8000, 192000 ], " \
+ "channels = (int) 2, " \
+ "channel-mask = (bitmask) 0x3," \
+ "layout = (string) interleaved"
+
#define GST_AU_PARSE_ALAW_PAD_TEMPLATE_CAPS \
"audio/x-alaw, " \
"rate = (int) [ 8000, 192000 ], " \
static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
- GST_PAD_SOMETIMES,
- GST_STATIC_CAPS (GST_AUDIO_INT_PAD_TEMPLATE_CAPS "; "
- GST_AUDIO_FLOAT_PAD_TEMPLATE_CAPS ";"
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_AU_PARSE_RAW_PAD_TEMPLATE_CAPS "; "
GST_AU_PARSE_ALAW_PAD_TEMPLATE_CAPS ";"
GST_AU_PARSE_MULAW_PAD_TEMPLATE_CAPS ";"
GST_AU_PARSE_ADPCM_PAD_TEMPLATE_CAPS));
static void gst_au_parse_dispose (GObject * object);
-static GstFlowReturn gst_au_parse_chain (GstPad * pad, GstBuffer * buf);
+static GstFlowReturn gst_au_parse_chain (GstPad * pad, GstObject * parent,
+ GstBuffer * buf);
static GstStateChangeReturn gst_au_parse_change_state (GstElement * element,
GstStateChange transition);
static void gst_au_parse_reset (GstAuParse * auparse);
-static gboolean gst_au_parse_remove_srcpad (GstAuParse * auparse);
-static gboolean gst_au_parse_add_srcpad (GstAuParse * auparse, GstCaps * caps);
-static gboolean gst_au_parse_src_query (GstPad * pad, GstQuery * query);
-static gboolean gst_au_parse_src_event (GstPad * pad, GstEvent * event);
-static gboolean gst_au_parse_sink_event (GstPad * pad, GstEvent * event);
+static gboolean gst_au_parse_src_query (GstPad * pad, GstObject * parent,
+ GstQuery * query);
+static gboolean gst_au_parse_src_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+static gboolean gst_au_parse_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
static gboolean gst_au_parse_src_convert (GstAuParse * auparse,
GstFormat src_format, gint64 srcval, GstFormat dest_format,
gint64 * destval);
-GST_BOILERPLATE (GstAuParse, gst_au_parse, GstElement, GST_TYPE_ELEMENT);
-
-static void
-gst_au_parse_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_static_pad_template (element_class,
- &sink_template);
- gst_element_class_add_static_pad_template (element_class, &src_template);
- gst_element_class_set_details_simple (element_class, "AU audio demuxer",
- "Codec/Demuxer/Audio",
- "Parse an .au file into raw audio",
- "Erik Walthinsen <omega@cse.ogi.edu>");
-
- GST_DEBUG_CATEGORY_INIT (auparse_debug, "auparse", 0, ".au parser");
-}
+#define gst_au_parse_parent_class parent_class
+G_DEFINE_TYPE (GstAuParse, gst_au_parse, GST_TYPE_ELEMENT);
static void
gst_au_parse_class_init (GstAuParseClass * klass)
GObjectClass *gobject_class;
GstElementClass *gstelement_class;
+ GST_DEBUG_CATEGORY_INIT (auparse_debug, "auparse", 0, ".au parser");
+
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
gstelement_class->change_state =
GST_DEBUG_FUNCPTR (gst_au_parse_change_state);
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&src_template));
+ gst_element_class_set_details_simple (gstelement_class,
+ "AU audio demuxer",
+ "Codec/Demuxer/Audio",
+ "Parse an .au file into raw audio",
+ "Erik Walthinsen <omega@cse.ogi.edu>");
}
static void
-gst_au_parse_init (GstAuParse * auparse, GstAuParseClass * klass)
+gst_au_parse_init (GstAuParse * auparse)
{
auparse->sinkpad = gst_pad_new_from_static_template (&sink_template, "sink");
gst_pad_set_chain_function (auparse->sinkpad,
GST_DEBUG_FUNCPTR (gst_au_parse_sink_event));
gst_element_add_pad (GST_ELEMENT (auparse), auparse->sinkpad);
- auparse->srcpad = NULL;
+ auparse->srcpad = gst_pad_new_from_static_template (&src_template, "src");
+ gst_pad_set_query_function (auparse->srcpad,
+ GST_DEBUG_FUNCPTR (gst_au_parse_src_query));
+ gst_pad_set_event_function (auparse->srcpad,
+ GST_DEBUG_FUNCPTR (gst_au_parse_src_event));
+ gst_pad_use_fixed_caps (auparse->srcpad);
+ gst_element_add_pad (GST_ELEMENT (auparse), auparse->srcpad);
+
auparse->adapter = gst_adapter_new ();
gst_au_parse_reset (auparse);
}
static void
gst_au_parse_reset (GstAuParse * auparse)
{
- gst_au_parse_remove_srcpad (auparse);
-
auparse->offset = 0;
auparse->buffer_offset = 0;
auparse->encoding = 0;
/* gst_segment_init (&auparse->segment, GST_FORMAT_TIME); */
}
-static gboolean
-gst_au_parse_add_srcpad (GstAuParse * auparse, GstCaps * new_caps)
+static void
+gst_au_parse_negotiate_srcpad (GstAuParse * auparse, GstCaps * new_caps)
{
if (auparse->src_caps && gst_caps_is_equal (new_caps, auparse->src_caps)) {
GST_LOG_OBJECT (auparse, "same caps, nothing to do");
- return TRUE;
+ return;
}
gst_caps_replace (&auparse->src_caps, new_caps);
- if (auparse->srcpad != NULL) {
- GST_DEBUG_OBJECT (auparse, "Changing src pad caps to %" GST_PTR_FORMAT,
- auparse->src_caps);
- gst_pad_set_caps (auparse->srcpad, auparse->src_caps);
- }
-
- if (auparse->srcpad == NULL) {
- auparse->srcpad = gst_pad_new_from_static_template (&src_template, "src");
- g_return_val_if_fail (auparse->srcpad != NULL, FALSE);
-
-#if 0
- gst_pad_set_query_type_function (auparse->srcpad,
- GST_DEBUG_FUNCPTR (gst_au_parse_src_get_query_types));
-#endif
- gst_pad_set_query_function (auparse->srcpad,
- GST_DEBUG_FUNCPTR (gst_au_parse_src_query));
- gst_pad_set_event_function (auparse->srcpad,
- GST_DEBUG_FUNCPTR (gst_au_parse_src_event));
-
- gst_pad_use_fixed_caps (auparse->srcpad);
- gst_pad_set_active (auparse->srcpad, TRUE);
-
- if (auparse->src_caps)
- gst_pad_set_caps (auparse->srcpad, auparse->src_caps);
-
- GST_DEBUG_OBJECT (auparse, "Adding src pad with caps %" GST_PTR_FORMAT,
- auparse->src_caps);
+ GST_DEBUG_OBJECT (auparse, "Changing src pad caps to %" GST_PTR_FORMAT,
+ auparse->src_caps);
+ gst_pad_set_caps (auparse->srcpad, auparse->src_caps);
- gst_object_ref (auparse->srcpad);
- if (!gst_element_add_pad (GST_ELEMENT (auparse), auparse->srcpad))
- return FALSE;
- gst_element_no_more_pads (GST_ELEMENT (auparse));
- }
-
- return TRUE;
-}
-
-static gboolean
-gst_au_parse_remove_srcpad (GstAuParse * auparse)
-{
- gboolean res = TRUE;
-
- if (auparse->srcpad != NULL) {
- GST_DEBUG_OBJECT (auparse, "Removing src pad");
- res = gst_element_remove_pad (GST_ELEMENT (auparse), auparse->srcpad);
- g_return_val_if_fail (res != FALSE, FALSE);
- gst_object_unref (auparse->srcpad);
- auparse->srcpad = NULL;
- }
-
- return res;
+ return;
}
static GstFlowReturn
guint32 size;
guint8 *head;
gchar layout[7] = { 0, };
- gint law = 0, depth = 0, ieee = 0;
+ GstAudioFormat format = GST_AUDIO_FORMAT_UNKNOWN;
+ gint law = 0;
+ guint endianness;
- head = (guint8 *) gst_adapter_peek (auparse->adapter, 24);
+ head = (guint8 *) gst_adapter_map (auparse->adapter, 24);
g_assert (head != NULL);
GST_DEBUG_OBJECT (auparse, "[%c%c%c%c]", head[0], head[1], head[2], head[3]);
switch (GST_READ_UINT32_BE (head)) {
/* normal format is big endian (au is a Sparc format) */
case 0x2e736e64:{ /* ".snd" */
- auparse->endianness = G_BIG_ENDIAN;
+ endianness = G_BIG_ENDIAN;
break;
}
/* and of course, someone had to invent a little endian
* version. Used by DEC systems. */
case 0x646e732e: /* dns. */
case 0x0064732e:{ /* other source say it is "dns." */
- auparse->endianness = G_LITTLE_ENDIAN;
+ endianness = G_LITTLE_ENDIAN;
break;
}
default:{
switch (auparse->encoding) {
case 1: /* 8-bit ISDN mu-law G.711 */
law = 1;
- depth = 8;
break;
case 27: /* 8-bit ISDN A-law G.711 */
law = 2;
- depth = 8;
break;
- case 2: /* 8-bit linear PCM */
- depth = 8;
+ case 2: /* 8-bit linear PCM, FIXME signed? */
+ format = GST_AUDIO_FORMAT_S8;
+ auparse->sample_size = auparse->channels;
break;
case 3: /* 16-bit linear PCM */
- depth = 16;
+ if (endianness == G_LITTLE_ENDIAN)
+ format = GST_AUDIO_FORMAT_S16LE;
+ else
+ format = GST_AUDIO_FORMAT_S16BE;
+ auparse->sample_size = auparse->channels * 2;
break;
case 4: /* 24-bit linear PCM */
- depth = 24;
+ if (endianness == G_LITTLE_ENDIAN)
+ format = GST_AUDIO_FORMAT_S24LE;
+ else
+ format = GST_AUDIO_FORMAT_S24BE;
+ auparse->sample_size = auparse->channels * 3;
break;
case 5: /* 32-bit linear PCM */
- depth = 32;
+ if (endianness == G_LITTLE_ENDIAN)
+ format = GST_AUDIO_FORMAT_S32LE;
+ else
+ format = GST_AUDIO_FORMAT_S32BE;
+ auparse->sample_size = auparse->channels * 4;
break;
case 6: /* 32-bit IEEE floating point */
- ieee = 1;
- depth = 32;
+ if (endianness == G_LITTLE_ENDIAN)
+ format = GST_AUDIO_FORMAT_F32LE;
+ else
+ format = GST_AUDIO_FORMAT_F32BE;
+ auparse->sample_size = auparse->channels * 4;
break;
case 7: /* 64-bit IEEE floating point */
- ieee = 1;
- depth = 64;
+ if (endianness == G_LITTLE_ENDIAN)
+ format = GST_AUDIO_FORMAT_F64LE;
+ else
+ format = GST_AUDIO_FORMAT_F64BE;
+ auparse->sample_size = auparse->channels * 8;
break;
case 23: /* 4-bit CCITT G.721 ADPCM 32kbps -> modplug/libsndfile (compressed 8-bit mu-law) */
"rate", G_TYPE_INT, auparse->samplerate,
"channels", G_TYPE_INT, auparse->channels, NULL);
auparse->sample_size = auparse->channels;
- } else if (ieee) {
- tempcaps = gst_caps_new_simple ("audio/x-raw-float",
+ } else if (format != GST_AUDIO_FORMAT_UNKNOWN) {
+ const GstCaps *templ_caps = gst_pad_get_pad_template_caps (auparse->srcpad);
+ GstCaps *intersection;
+
+ tempcaps = gst_caps_new_simple ("audio/x-raw",
+ "format", G_TYPE_STRING, gst_audio_format_to_string (format),
"rate", G_TYPE_INT, auparse->samplerate,
- "channels", G_TYPE_INT, auparse->channels,
- "endianness", G_TYPE_INT, auparse->endianness,
- "width", G_TYPE_INT, depth, NULL);
- auparse->sample_size = auparse->channels * depth / 8;
+ "channels", G_TYPE_INT, auparse->channels, NULL);
+
+ intersection = gst_caps_intersect (tempcaps, templ_caps);
+ gst_caps_unref (tempcaps);
+ tempcaps = intersection;
} else if (layout[0]) {
tempcaps = gst_caps_new_simple ("audio/x-adpcm",
"layout", G_TYPE_STRING, layout, NULL);
auparse->sample_size = 0;
- } else {
- tempcaps = gst_caps_new_simple ("audio/x-raw-int",
- "rate", G_TYPE_INT, auparse->samplerate,
- "channels", G_TYPE_INT, auparse->channels,
- "endianness", G_TYPE_INT, auparse->endianness,
- "depth", G_TYPE_INT, depth, "width", G_TYPE_INT, depth,
- /* FIXME: signed TRUE even for 8-bit PCM? */
- "signed", G_TYPE_BOOLEAN, TRUE, NULL);
- auparse->sample_size = auparse->channels * depth / 8;
- }
+ } else
+ goto unknown_format;
GST_DEBUG_OBJECT (auparse, "sample_size=%d", auparse->sample_size);
- if (!gst_au_parse_add_srcpad (auparse, tempcaps))
- goto add_pad_failed;
+ gst_au_parse_negotiate_srcpad (auparse, tempcaps);
GST_DEBUG_OBJECT (auparse, "offset=%" G_GINT64_FORMAT, auparse->offset);
+ gst_adapter_unmap (auparse->adapter);
gst_adapter_flush (auparse->adapter, auparse->offset);
gst_caps_unref (tempcaps);
/* ERRORS */
unknown_header:
{
+ gst_adapter_unmap (auparse->adapter);
GST_ELEMENT_ERROR (auparse, STREAM, WRONG_TYPE, (NULL), (NULL));
return GST_FLOW_ERROR;
}
unsupported_sample_rate:
{
+ gst_adapter_unmap (auparse->adapter);
GST_ELEMENT_ERROR (auparse, STREAM, FORMAT, (NULL),
("Unsupported samplerate: %u", auparse->samplerate));
return GST_FLOW_ERROR;
}
unsupported_number_of_channels:
{
+ gst_adapter_unmap (auparse->adapter);
GST_ELEMENT_ERROR (auparse, STREAM, FORMAT, (NULL),
("Unsupported number of channels: %u", auparse->channels));
return GST_FLOW_ERROR;
}
unknown_format:
{
+ gst_adapter_unmap (auparse->adapter);
GST_ELEMENT_ERROR (auparse, STREAM, FORMAT, (NULL),
("Unsupported encoding: %u", auparse->encoding));
return GST_FLOW_ERROR;
}
-add_pad_failed:
- {
- GST_ELEMENT_ERROR (auparse, STREAM, FAILED, (NULL),
- ("Failed to add srcpad"));
- gst_caps_unref (tempcaps);
- return GST_FLOW_ERROR;
- }
}
#define AU_HEADER_SIZE 24
static GstFlowReturn
-gst_au_parse_chain (GstPad * pad, GstBuffer * buf)
+gst_au_parse_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
{
GstFlowReturn ret = GST_FLOW_OK;
GstAuParse *auparse;
gint64 timestamp;
gint64 duration;
gint64 offset;
+ GstSegment segment;
- auparse = GST_AU_PARSE (gst_pad_get_parent (pad));
+ auparse = GST_AU_PARSE (parent);
- GST_LOG_OBJECT (auparse, "got buffer of size %u", GST_BUFFER_SIZE (buf));
+ GST_LOG_OBJECT (auparse, "got buffer of size %" G_GSIZE_FORMAT,
+ gst_buffer_get_size (buf));
gst_adapter_push (auparse->adapter, buf);
buf = NULL;
/* if we haven't seen any data yet... */
- if (auparse->srcpad == NULL) {
+ if (!gst_pad_has_current_caps (auparse->srcpad)) {
if (gst_adapter_available (auparse->adapter) < AU_HEADER_SIZE) {
GST_DEBUG_OBJECT (auparse, "need more data to parse header");
ret = GST_FLOW_OK;
if (ret != GST_FLOW_OK)
goto out;
- gst_pad_push_event (auparse->srcpad,
- gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME,
- 0, GST_CLOCK_TIME_NONE, 0));
+ gst_segment_init (&segment, GST_FORMAT_TIME);
+ gst_pad_push_event (auparse->srcpad, gst_event_new_segment (&segment));
}
avail = gst_adapter_available (auparse->adapter);
if (sendnow > 0) {
GstBuffer *outbuf;
- const guint8 *data;
gint64 pos;
- ret = gst_pad_alloc_buffer_and_set_caps (auparse->srcpad,
- auparse->buffer_offset, sendnow, GST_PAD_CAPS (auparse->srcpad),
- &outbuf);
-
- if (ret != GST_FLOW_OK) {
- GST_DEBUG_OBJECT (auparse, "pad alloc flow: %s", gst_flow_get_name (ret));
- goto out;
- }
-
- data = gst_adapter_peek (auparse->adapter, sendnow);
- memcpy (GST_BUFFER_DATA (outbuf), data, sendnow);
- gst_adapter_flush (auparse->adapter, sendnow);
+ outbuf = gst_adapter_take_buffer (auparse->adapter, sendnow);
+ outbuf = gst_buffer_make_writable (outbuf);
pos = auparse->buffer_offset - auparse->offset;
pos = MAX (pos, 0);
out:
- gst_object_unref (auparse);
return ret;
}
}
static gboolean
-gst_au_parse_src_query (GstPad * pad, GstQuery * query)
+gst_au_parse_src_query (GstPad * pad, GstObject * parent, GstQuery * query)
{
GstAuParse *auparse;
gboolean ret = FALSE;
- auparse = GST_AU_PARSE (gst_pad_get_parent (pad));
+ auparse = GST_AU_PARSE (parent);
switch (GST_QUERY_TYPE (query)) {
case GST_QUERY_DURATION:{
- GstFormat bformat = GST_FORMAT_BYTES;
GstFormat format;
gint64 len, val;
gst_query_parse_duration (query, &format, NULL);
- if (!gst_pad_query_peer_duration (auparse->sinkpad, &bformat, &len)) {
+ if (!gst_pad_peer_query_duration (auparse->sinkpad, GST_FORMAT_BYTES,
+ &len)) {
GST_DEBUG_OBJECT (auparse, "failed to query upstream length");
break;
}
len -= auparse->offset;
GST_OBJECT_UNLOCK (auparse);
- ret = gst_au_parse_src_convert (auparse, bformat, len, format, &val);
+ ret =
+ gst_au_parse_src_convert (auparse, GST_FORMAT_BYTES, len, format,
+ &val);
if (ret) {
gst_query_set_duration (query, format, val);
break;
}
case GST_QUERY_POSITION:{
- GstFormat bformat = GST_FORMAT_BYTES;
GstFormat format;
gint64 pos, val;
gst_query_parse_position (query, &format, NULL);
- if (!gst_pad_query_peer_position (auparse->sinkpad, &bformat, &pos)) {
+ if (!gst_pad_peer_query_position (auparse->sinkpad, GST_FORMAT_BYTES,
+ &pos)) {
GST_DEBUG_OBJECT (auparse, "failed to query upstream position");
break;
}
break;
}
default:
- ret = gst_pad_query_default (pad, query);
+ ret = gst_pad_query_default (pad, parent, query);
break;
}
- gst_object_unref (auparse);
return ret;
}
}
static gboolean
-gst_au_parse_sink_event (GstPad * pad, GstEvent * event)
+gst_au_parse_sink_event (GstPad * pad, GstObject * parent, GstEvent * event)
{
GstAuParse *auparse;
gboolean ret = TRUE;
- auparse = GST_AU_PARSE (gst_pad_get_parent (pad));
+ auparse = GST_AU_PARSE (parent);
switch (GST_EVENT_TYPE (event)) {
- case GST_EVENT_NEWSEGMENT:
+ case GST_EVENT_CAPS:
{
- GstFormat format;
- gdouble rate, arate;
- gint64 start, stop, time, offset = 0;
- gboolean update;
+ /* discard, we'll come up with proper src caps */
+ gst_event_unref (event);
+ break;
+ }
+ case GST_EVENT_SEGMENT:
+ {
+ gint64 start, stop, offset = 0;
GstSegment segment;
GstEvent *new_event = NULL;
- gst_segment_init (&segment, GST_FORMAT_UNDEFINED);
- gst_event_parse_new_segment_full (event, &update, &rate, &arate, &format,
- &start, &stop, &time);
- gst_segment_set_newsegment_full (&segment, update, rate, arate, format,
- start, stop, time);
+ /* some debug output */
+ gst_event_copy_segment (event, &segment);
+ GST_DEBUG_OBJECT (auparse, "received newsegment %" GST_SEGMENT_FORMAT,
+ &segment);
+ start = segment.start;
+ stop = segment.stop;
if (auparse->sample_size > 0) {
if (start > 0) {
offset = start;
GST_FORMAT_TIME, &stop);
}
- if (auparse->srcpad) {
- GST_INFO_OBJECT (auparse,
- "new segment: %" GST_TIME_FORMAT " ... %" GST_TIME_FORMAT,
- GST_TIME_ARGS (start), GST_TIME_ARGS (stop));
+ GST_INFO_OBJECT (auparse,
+ "new segment: %" GST_TIME_FORMAT " ... %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (start), GST_TIME_ARGS (stop));
- new_event = gst_event_new_new_segment_full (update, rate, arate,
- GST_FORMAT_TIME, start, stop, start);
+ gst_segment_init (&segment, GST_FORMAT_TIME);
+ segment.start = segment.time = start;
+ segment.stop = stop;
+ new_event = gst_event_new_segment (&segment);
- ret = gst_pad_push_event (auparse->srcpad, new_event);
- }
+ ret = gst_pad_push_event (auparse->srcpad, new_event);
auparse->buffer_offset = offset;
}
/* fall-through */
default:
- ret = gst_pad_event_default (pad, event);
+ ret = gst_pad_event_default (pad, parent, event);
break;
}
- gst_object_unref (auparse);
return ret;
}
static gboolean
-gst_au_parse_src_event (GstPad * pad, GstEvent * event)
+gst_au_parse_src_event (GstPad * pad, GstObject * parent, GstEvent * event)
{
GstAuParse *auparse;
gboolean ret;
- auparse = GST_AU_PARSE (gst_pad_get_parent (pad));
+ auparse = GST_AU_PARSE (parent);
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_SEEK:
ret = gst_au_parse_handle_seek (auparse, event);
break;
default:
- ret = gst_pad_event_default (pad, event);
+ ret = gst_pad_event_default (pad, parent, event);
break;
}
- gst_object_unref (auparse);
return ret;
}
GstAuParse *auparse = GST_AU_PARSE (element);
GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
- ret = parent_class->change_state (element, transition);
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
if (ret == GST_STATE_CHANGE_FAILURE)
return ret;
guint sample_size;
guint encoding;
guint samplerate;
- guint endianness;
guint channels;
};
static void gst_auto_audio_sink_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
-GST_BOILERPLATE (GstAutoAudioSink, gst_auto_audio_sink, GstBin, GST_TYPE_BIN);
+#define gst_auto_audio_sink_parent_class parent_class
+G_DEFINE_TYPE (GstAutoAudioSink, gst_auto_audio_sink, GST_TYPE_BIN);
static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_STATIC_CAPS_ANY);
static void
-gst_auto_audio_sink_base_init (gpointer klass)
-{
- GstElementClass *eklass = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_static_pad_template (eklass, &sink_template);
-
- gst_element_class_set_details_simple (eklass, "Auto audio sink",
- "Sink/Audio",
- "Wrapper audio sink for automatically detected audio sink",
- "Jan Schmidt <thaytan@noraisin.net>");
-}
-
-static void
gst_auto_audio_sink_class_init (GstAutoAudioSinkClass * klass)
{
GObjectClass *gobject_class;
g_param_spec_boxed ("filter-caps", "Filter caps",
"Filter sink candidates using these caps.", GST_TYPE_CAPS,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gst_element_class_add_pad_template (eklass,
+ gst_static_pad_template_get (&sink_template));
+
+ gst_element_class_set_details_simple (eklass, "Auto audio sink",
+ "Sink/Audio",
+ "Wrapper audio sink for automatically detected audio sink",
+ "Jan Schmidt <thaytan@noraisin.net>");
}
static void
gst_bin_remove (GST_BIN (sink), sink->kid);
sink->kid = NULL;
/* Don't lose the SINK flag */
- GST_OBJECT_FLAG_SET (sink, GST_ELEMENT_IS_SINK);
+ GST_OBJECT_FLAG_SET (sink, GST_ELEMENT_FLAG_SINK);
}
}
gst_object_unref (targetpad);
}
-static GstStaticCaps raw_caps =
- GST_STATIC_CAPS ("audio/x-raw-int; audio/x-raw-float");
+static GstStaticCaps raw_caps = GST_STATIC_CAPS ("audio/x-raw");
static void
-gst_auto_audio_sink_init (GstAutoAudioSink * sink,
- GstAutoAudioSinkClass * g_class)
+gst_auto_audio_sink_init (GstAutoAudioSink * sink)
{
sink->pad = gst_ghost_pad_new_no_target ("sink", GST_PAD_SINK);
gst_element_add_pad (GST_ELEMENT (sink), sink->pad);
sink->filter_caps = gst_static_caps_get (&raw_caps);
/* mark as sink */
- GST_OBJECT_FLAG_SET (sink, GST_ELEMENT_IS_SINK);
+ GST_OBJECT_FLAG_SET (sink, GST_ELEMENT_FLAG_SINK);
}
static gboolean
GstElement *element;
gchar *name, *marker;
- marker = g_strdup (GST_PLUGIN_FEATURE (factory)->name);
+ marker = g_strdup (GST_OBJECT_NAME (factory));
if (g_str_has_suffix (marker, "sink"))
marker[strlen (marker) - 4] = '\0';
if (g_str_has_prefix (marker, "gst"))
GstCaps *el_caps = NULL;
gboolean no_match = TRUE;
- list = gst_registry_feature_filter (gst_registry_get_default (),
+ list = gst_registry_feature_filter (gst_registry_get (),
(GstPluginFeatureFilter) gst_auto_audio_sink_factory_filter, FALSE, sink);
list = g_list_sort (list, (GCompareFunc) gst_auto_audio_sink_compare_ranks);
if ((el = gst_auto_audio_sink_create_element_with_pretty_name (sink, f))) {
GstStateChangeReturn ret;
- GST_DEBUG_OBJECT (sink, "Testing %s", GST_PLUGIN_FEATURE (f)->name);
+ GST_DEBUG_OBJECT (sink, "Testing %s", GST_OBJECT_NAME (f));
/* If autoaudiosink has been provided with filter caps,
* accept only sinks that match with the filter caps */
if (sink->filter_caps) {
el_pad = gst_element_get_static_pad (GST_ELEMENT (el), "sink");
- el_caps = gst_pad_get_caps (el_pad);
+ el_caps = gst_pad_query_caps (el_pad, NULL);
gst_object_unref (el_pad);
GST_DEBUG_OBJECT (sink,
"Checking caps: %" GST_PTR_FORMAT " vs. %" GST_PTR_FORMAT,
static void gst_auto_audio_src_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
-GST_BOILERPLATE (GstAutoAudioSrc, gst_auto_audio_src, GstBin, GST_TYPE_BIN);
+#define gst_auto_audio_src_parent_class parent_class
+G_DEFINE_TYPE (GstAutoAudioSrc, gst_auto_audio_src, GST_TYPE_BIN);
static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_STATIC_CAPS_ANY);
static void
-gst_auto_audio_src_base_init (gpointer klass)
-{
- GstElementClass *eklass = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_static_pad_template (eklass, &src_template);
-
- gst_element_class_set_details_simple (eklass, "Auto audio source",
- "Source/Audio",
- "Wrapper audio source for automatically detected audio source",
- "Jan Schmidt <thaytan@noraisin.net>, "
- "Stefan Kost <ensonic@users.sf.net>");
-}
-
-static void
gst_auto_audio_src_class_init (GstAutoAudioSrcClass * klass)
{
GObjectClass *gobject_class;
g_param_spec_boxed ("filter-caps", "Filter caps",
"Filter sink candidates using these caps.", GST_TYPE_CAPS,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gst_element_class_add_pad_template (eklass,
+ gst_static_pad_template_get (&src_template));
+
+ gst_element_class_set_details_simple (eklass, "Auto audio source",
+ "Source/Audio",
+ "Wrapper audio source for automatically detected audio source",
+ "Jan Schmidt <thaytan@noraisin.net>, "
+ "Stefan Kost <ensonic@users.sf.net>");
}
static void
src->kid = NULL;
/* Don't lose SOURCE flag */
- GST_OBJECT_FLAG_SET (src, GST_ELEMENT_IS_SOURCE);
+ GST_OBJECT_FLAG_SET (src, GST_ELEMENT_FLAG_SOURCE);
}
}
gst_object_unref (targetpad);
}
-static GstStaticCaps raw_caps =
- GST_STATIC_CAPS ("audio/x-raw-int; audio/x-raw-float");
+static GstStaticCaps raw_caps = GST_STATIC_CAPS ("audio/x-raw");
static void
-gst_auto_audio_src_init (GstAutoAudioSrc * src, GstAutoAudioSrcClass * g_class)
+gst_auto_audio_src_init (GstAutoAudioSrc * src)
{
src->pad = gst_ghost_pad_new_no_target ("src", GST_PAD_SRC);
gst_element_add_pad (GST_ELEMENT (src), src->pad);
src->filter_caps = gst_static_caps_get (&raw_caps);
/* mark as source */
- GST_OBJECT_FLAG_SET (src, GST_ELEMENT_IS_SOURCE);
+ GST_OBJECT_FLAG_SET (src, GST_ELEMENT_FLAG_SOURCE);
}
static gboolean
GstElement *element;
gchar *name, *marker;
- marker = g_strdup (GST_PLUGIN_FEATURE (factory)->name);
+ marker = g_strdup (GST_OBJECT_NAME (factory));
if (g_str_has_suffix (marker, "src"))
marker[strlen (marker) - 4] = '\0';
if (g_str_has_prefix (marker, "gst"))
GstCaps *el_caps = NULL;
gboolean no_match = TRUE;
- list = gst_registry_feature_filter (gst_registry_get_default (),
+ list = gst_registry_feature_filter (gst_registry_get (),
(GstPluginFeatureFilter) gst_auto_audio_src_factory_filter, FALSE, src);
list = g_list_sort (list, (GCompareFunc) gst_auto_audio_src_compare_ranks);
if ((el = gst_auto_audio_src_create_element_with_pretty_name (src, f))) {
GstStateChangeReturn ret;
- GST_DEBUG_OBJECT (src, "Testing %s", GST_PLUGIN_FEATURE (f)->name);
+ GST_DEBUG_OBJECT (src, "Testing %s", GST_OBJECT_NAME (f));
/* If autoAudioSrc has been provided with filter caps,
* accept only sources that match with the filter caps */
if (src->filter_caps) {
el_pad = gst_element_get_static_pad (GST_ELEMENT (el), "src");
- el_caps = gst_pad_get_caps (el_pad);
+ el_caps = gst_pad_query_caps (el_pad, NULL);
gst_object_unref (el_pad);
GST_DEBUG_OBJECT (src,
"Checking caps: %" GST_PTR_FORMAT " vs. %" GST_PTR_FORMAT,
static void gst_auto_video_sink_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
-GST_BOILERPLATE (GstAutoVideoSink, gst_auto_video_sink, GstBin, GST_TYPE_BIN);
+#define gst_auto_video_sink_parent_class parent_class
+G_DEFINE_TYPE (GstAutoVideoSink, gst_auto_video_sink, GST_TYPE_BIN);
static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_STATIC_CAPS_ANY);
static void
-gst_auto_video_sink_base_init (gpointer klass)
-{
- GstElementClass *eklass = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_static_pad_template (eklass, &sink_template);
- gst_element_class_set_details_simple (eklass, "Auto video sink",
- "Sink/Video",
- "Wrapper video sink for automatically detected video sink",
- "Jan Schmidt <thaytan@noraisin.net>");
-}
-
-static void
gst_auto_video_sink_class_init (GstAutoVideoSinkClass * klass)
{
GObjectClass *gobject_class;
g_param_spec_boxed ("filter-caps", "Filter caps",
"Filter sink candidates using these caps.", GST_TYPE_CAPS,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gst_element_class_add_pad_template (eklass,
+ gst_static_pad_template_get (&sink_template));
+ gst_element_class_set_details_simple (eklass, "Auto video sink",
+ "Sink/Video",
+ "Wrapper video sink for automatically detected video sink",
+ "Jan Schmidt <thaytan@noraisin.net>");
}
static void
gst_bin_remove (GST_BIN (sink), sink->kid);
sink->kid = NULL;
/* Don't lose the SINK flag */
- GST_OBJECT_FLAG_SET (sink, GST_ELEMENT_IS_SINK);
+ GST_OBJECT_FLAG_SET (sink, GST_ELEMENT_FLAG_SINK);
}
}
gst_object_unref (targetpad);
}
-static GstStaticCaps raw_caps =
- GST_STATIC_CAPS ("video/x-raw-yuv; video/x-raw-rgb");
+static GstStaticCaps raw_caps = GST_STATIC_CAPS ("video/x-raw");
static void
-gst_auto_video_sink_init (GstAutoVideoSink * sink,
- GstAutoVideoSinkClass * g_class)
+gst_auto_video_sink_init (GstAutoVideoSink * sink)
{
sink->pad = gst_ghost_pad_new_no_target ("sink", GST_PAD_SINK);
gst_element_add_pad (GST_ELEMENT (sink), sink->pad);
sink->filter_caps = gst_static_caps_get (&raw_caps);
/* mark as sink */
- GST_OBJECT_FLAG_SET (sink, GST_ELEMENT_IS_SINK);
+ GST_OBJECT_FLAG_SET (sink, GST_ELEMENT_FLAG_SINK);
}
static gboolean
GstElement *element;
gchar *name, *marker;
- marker = g_strdup (GST_PLUGIN_FEATURE (factory)->name);
+ marker = g_strdup (GST_OBJECT_NAME (factory));
if (g_str_has_suffix (marker, "sink"))
marker[strlen (marker) - 4] = '\0';
if (g_str_has_prefix (marker, "gst"))
GstCaps *el_caps = NULL;
gboolean no_match = TRUE;
- list = gst_registry_feature_filter (gst_registry_get_default (),
+ list = gst_registry_feature_filter (gst_registry_get (),
(GstPluginFeatureFilter) gst_auto_video_sink_factory_filter, FALSE, sink);
list = g_list_sort (list, (GCompareFunc) gst_auto_video_sink_compare_ranks);
if ((el = gst_auto_video_sink_create_element_with_pretty_name (sink, f))) {
GstStateChangeReturn ret;
- GST_DEBUG_OBJECT (sink, "Testing %s", GST_PLUGIN_FEATURE (f)->name);
+ GST_DEBUG_OBJECT (sink, "Testing %s", GST_OBJECT_NAME (f));
/* If autovideosink has been provided with filter caps,
* accept only sinks that match with the filter caps */
if (sink->filter_caps) {
el_pad = gst_element_get_static_pad (GST_ELEMENT (el), "sink");
- el_caps = gst_pad_get_caps (el_pad);
+ el_caps = gst_pad_query_caps (el_pad, NULL);
gst_object_unref (el_pad);
GST_DEBUG_OBJECT (sink,
"Checking caps: %" GST_PTR_FORMAT " vs. %" GST_PTR_FORMAT,
static void gst_auto_video_src_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
-GST_BOILERPLATE (GstAutoVideoSrc, gst_auto_video_src, GstBin, GST_TYPE_BIN);
+#define gst_auto_video_src_parent_class parent_class
+G_DEFINE_TYPE (GstAutoVideoSrc, gst_auto_video_src, GST_TYPE_BIN);
static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_STATIC_CAPS_ANY);
static void
-gst_auto_video_src_base_init (gpointer klass)
-{
- GstElementClass *eklass = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_static_pad_template (eklass, &src_template);
- gst_element_class_set_details_simple (eklass, "Auto video source",
- "Source/Video",
- "Wrapper video source for automatically detected video source",
- "Jan Schmidt <thaytan@noraisin.net>, "
- "Stefan Kost <ensonic@users.sf.net>");
-}
-
-static void
gst_auto_video_src_class_init (GstAutoVideoSrcClass * klass)
{
GObjectClass *gobject_class;
g_param_spec_boxed ("filter-caps", "Filter caps",
"Filter src candidates using these caps.", GST_TYPE_CAPS,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gst_element_class_add_pad_template (eklass,
+ gst_static_pad_template_get (&src_template));
+ gst_element_class_set_details_simple (eklass, "Auto video source",
+ "Source/Video",
+ "Wrapper video source for automatically detected video source",
+ "Jan Schmidt <thaytan@noraisin.net>, "
+ "Stefan Kost <ensonic@users.sf.net>");
}
static void
gst_bin_remove (GST_BIN (src), src->kid);
src->kid = NULL;
/* Don't loose SOURCE flag */
- GST_OBJECT_FLAG_SET (src, GST_ELEMENT_IS_SOURCE);
+ GST_OBJECT_FLAG_SET (src, GST_ELEMENT_FLAG_SOURCE);
}
}
gst_object_unref (targetpad);
}
-static GstStaticCaps raw_caps =
- GST_STATIC_CAPS ("video/x-raw-yuv; video/x-raw-rgb");
+static GstStaticCaps raw_caps = GST_STATIC_CAPS ("video/x-raw");
static void
-gst_auto_video_src_init (GstAutoVideoSrc * src, GstAutoVideoSrcClass * g_class)
+gst_auto_video_src_init (GstAutoVideoSrc * src)
{
src->pad = gst_ghost_pad_new_no_target ("src", GST_PAD_SRC);
gst_element_add_pad (GST_ELEMENT (src), src->pad);
src->filter_caps = gst_static_caps_get (&raw_caps);
/* mark as source */
- GST_OBJECT_FLAG_SET (src, GST_ELEMENT_IS_SOURCE);
+ GST_OBJECT_FLAG_SET (src, GST_ELEMENT_FLAG_SOURCE);
}
static gboolean
GstElement *element;
gchar *name, *marker;
- marker = g_strdup (GST_PLUGIN_FEATURE (factory)->name);
+ marker = g_strdup (GST_OBJECT_NAME (factory));
if (g_str_has_suffix (marker, "src"))
marker[strlen (marker) - 4] = '\0';
if (g_str_has_prefix (marker, "gst"))
GstCaps *el_caps = NULL;
gboolean no_match = TRUE;
- list = gst_registry_feature_filter (gst_registry_get_default (),
+ list = gst_registry_feature_filter (gst_registry_get (),
(GstPluginFeatureFilter) gst_auto_video_src_factory_filter, FALSE, src);
list = g_list_sort (list, (GCompareFunc) gst_auto_video_src_compare_ranks);
if ((el = gst_auto_video_src_create_element_with_pretty_name (src, f))) {
GstStateChangeReturn ret;
- GST_DEBUG_OBJECT (src, "Testing %s", GST_PLUGIN_FEATURE (f)->name);
+ GST_DEBUG_OBJECT (src, "Testing %s", GST_OBJECT_NAME (f));
/* If AutoVideoSrc has been provided with filter caps,
* accept only sources that match with the filter caps */
if (src->filter_caps) {
el_pad = gst_element_get_static_pad (GST_ELEMENT (el), "src");
- el_caps = gst_pad_get_caps (el_pad);
+ el_caps = gst_pad_query_caps (el_pad, NULL);
gst_object_unref (el_pad);
GST_DEBUG_OBJECT (src,
"Checking caps: %" GST_PTR_FORMAT " vs. %" GST_PTR_FORMAT,
$(GST_LIBS) \
-lgstriff-@GST_MAJORMINOR@ \
-lgstaudio-@GST_MAJORMINOR@ \
- -lgsttag-@GST_MAJORMINOR@
+ -lgsttag-@GST_MAJORMINOR@ \
+ -lgstvideo-@GST_MAJORMINOR@
libgstavi_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
libgstavi_la_LIBTOOLFLAGS = --tag=disable-static
#include "config.h"
#endif
-/* FIXME 0.11: suppress warnings for deprecated API such as GStaticRecMutex
- * with newer GLib versions (>= 2.31.0) */
-#define GLIB_DISABLE_DEPRECATION_WARNINGS
-
#include <string.h>
#include <stdio.h>
GST_STATIC_CAPS ("video/x-msvideo")
);
-static void gst_avi_demux_base_init (GstAviDemuxClass * klass);
-static void gst_avi_demux_class_init (GstAviDemuxClass * klass);
-static void gst_avi_demux_init (GstAviDemux * avi);
static void gst_avi_demux_finalize (GObject * object);
static void gst_avi_demux_reset (GstAviDemux * avi);
#if 0
static const GstEventMask *gst_avi_demux_get_event_mask (GstPad * pad);
#endif
-static gboolean gst_avi_demux_handle_src_event (GstPad * pad, GstEvent * event);
+static gboolean gst_avi_demux_handle_src_event (GstPad * pad,
+ GstObject * parent, GstEvent * event);
static gboolean gst_avi_demux_handle_sink_event (GstPad * pad,
- GstEvent * event);
+ GstObject * parent, GstEvent * event);
static gboolean gst_avi_demux_push_event (GstAviDemux * avi, GstEvent * event);
#if 0
static const GstFormat *gst_avi_demux_get_src_formats (GstPad * pad);
#endif
-static const GstQueryType *gst_avi_demux_get_src_query_types (GstPad * pad);
-static gboolean gst_avi_demux_handle_src_query (GstPad * pad, GstQuery * query);
+static gboolean gst_avi_demux_handle_src_query (GstPad * pad,
+ GstObject * parent, GstQuery * query);
static gboolean gst_avi_demux_src_convert (GstPad * pad, GstFormat src_format,
gint64 src_value, GstFormat * dest_format, gint64 * dest_value);
static gboolean gst_avi_demux_handle_seek_push (GstAviDemux * avi, GstPad * pad,
GstEvent * event);
static void gst_avi_demux_loop (GstPad * pad);
-static gboolean gst_avi_demux_sink_activate (GstPad * sinkpad);
-static gboolean gst_avi_demux_sink_activate_pull (GstPad * sinkpad,
- gboolean active);
-static gboolean gst_avi_demux_activate_push (GstPad * pad, gboolean active);
-static GstFlowReturn gst_avi_demux_chain (GstPad * pad, GstBuffer * buf);
-
+static gboolean gst_avi_demux_sink_activate (GstPad * sinkpad,
+ GstObject * parent);
+static gboolean gst_avi_demux_sink_activate_mode (GstPad * sinkpad,
+ GstObject * parent, GstPadMode mode, gboolean active);
+static GstFlowReturn gst_avi_demux_chain (GstPad * pad, GstObject * parent,
+ GstBuffer * buf);
+#if 0
static void gst_avi_demux_set_index (GstElement * element, GstIndex * index);
static GstIndex *gst_avi_demux_get_index (GstElement * element);
+#endif
static GstStateChangeReturn gst_avi_demux_change_state (GstElement * element,
GstStateChange transition);
static void gst_avi_demux_calculate_durations_from_index (GstAviDemux * avi);
static void gst_avi_demux_parse_idit (GstAviDemux * avi, GstBuffer * buf);
-static GstElementClass *parent_class = NULL;
-
/* GObject methods */
-GType
-gst_avi_demux_get_type (void)
-{
- static GType avi_demux_type = 0;
-
- if (!avi_demux_type) {
- static const GTypeInfo avi_demux_info = {
- sizeof (GstAviDemuxClass),
- (GBaseInitFunc) gst_avi_demux_base_init,
- NULL,
- (GClassInitFunc) gst_avi_demux_class_init,
- NULL,
- NULL,
- sizeof (GstAviDemux),
- 0,
- (GInstanceInitFunc) gst_avi_demux_init,
- };
-
- avi_demux_type =
- g_type_register_static (GST_TYPE_ELEMENT,
- "GstAviDemux", &avi_demux_info, 0);
- }
-
- return avi_demux_type;
-}
+#define gst_avi_demux_parent_class parent_class
+G_DEFINE_TYPE (GstAviDemux, gst_avi_demux, GST_TYPE_ELEMENT);
static void
-gst_avi_demux_base_init (GstAviDemuxClass * klass)
+gst_avi_demux_class_init (GstAviDemuxClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
+ GObjectClass *gobject_class = (GObjectClass *) klass;
GstPadTemplate *videosrctempl, *audiosrctempl, *subsrctempl;
GstCaps *audcaps, *vidcaps, *subcaps;
+ GST_DEBUG_CATEGORY_INIT (avidemux_debug, "avidemux",
+ 0, "Demuxer for AVI streams");
+
+ gobject_class->finalize = gst_avi_demux_finalize;
+
+ gstelement_class->change_state =
+ GST_DEBUG_FUNCPTR (gst_avi_demux_change_state);
+#if 0
+ gstelement_class->set_index = GST_DEBUG_FUNCPTR (gst_avi_demux_set_index);
+ gstelement_class->get_index = GST_DEBUG_FUNCPTR (gst_avi_demux_get_index);
+#endif
+
audcaps = gst_riff_create_audio_template_caps ();
- gst_caps_append (audcaps, gst_caps_new_simple ("audio/x-avi-unknown", NULL));
- audiosrctempl = gst_pad_template_new ("audio_%02d",
+ gst_caps_append (audcaps, gst_caps_new_empty_simple ("audio/x-avi-unknown"));
+ audiosrctempl = gst_pad_template_new ("audio_%u",
GST_PAD_SRC, GST_PAD_SOMETIMES, audcaps);
vidcaps = gst_riff_create_video_template_caps ();
gst_caps_append (vidcaps, gst_riff_create_iavs_template_caps ());
- gst_caps_append (vidcaps, gst_caps_new_simple ("video/x-avi-unknown", NULL));
- videosrctempl = gst_pad_template_new ("video_%02d",
+ gst_caps_append (vidcaps, gst_caps_new_empty_simple ("video/x-avi-unknown"));
+ videosrctempl = gst_pad_template_new ("video_%u",
GST_PAD_SRC, GST_PAD_SOMETIMES, vidcaps);
- subcaps = gst_caps_new_simple ("application/x-subtitle-avi", NULL);
- subsrctempl = gst_pad_template_new ("subtitle_%02d",
+ subcaps = gst_caps_new_empty_simple ("application/x-subtitle-avi");
+ subsrctempl = gst_pad_template_new ("subtitle_%u",
GST_PAD_SRC, GST_PAD_SOMETIMES, subcaps);
- gst_element_class_add_pad_template (element_class, audiosrctempl);
- gst_element_class_add_pad_template (element_class, videosrctempl);
- gst_element_class_add_pad_template (element_class, subsrctempl);
- gst_element_class_add_static_pad_template (element_class, &sink_templ);
- gst_object_unref (audiosrctempl);
- gst_object_unref (videosrctempl);
- gst_object_unref (subsrctempl);
- gst_element_class_set_details_simple (element_class, "Avi demuxer",
+ gst_element_class_add_pad_template (gstelement_class, audiosrctempl);
+ gst_element_class_add_pad_template (gstelement_class, videosrctempl);
+ gst_element_class_add_pad_template (gstelement_class, subsrctempl);
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&sink_templ));
+
+ gst_element_class_set_details_simple (gstelement_class, "Avi demuxer",
"Codec/Demuxer",
"Demultiplex an avi file into audio and video",
"Erik Walthinsen <omega@cse.ogi.edu>, "
}
static void
-gst_avi_demux_class_init (GstAviDemuxClass * klass)
-{
- GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
- GObjectClass *gobject_class = (GObjectClass *) klass;
-
- GST_DEBUG_CATEGORY_INIT (avidemux_debug, "avidemux",
- 0, "Demuxer for AVI streams");
-
- parent_class = g_type_class_peek_parent (klass);
-
- gobject_class->finalize = gst_avi_demux_finalize;
- gstelement_class->change_state =
- GST_DEBUG_FUNCPTR (gst_avi_demux_change_state);
-
- gstelement_class->set_index = GST_DEBUG_FUNCPTR (gst_avi_demux_set_index);
- gstelement_class->get_index = GST_DEBUG_FUNCPTR (gst_avi_demux_get_index);
-}
-
-static void
gst_avi_demux_init (GstAviDemux * avi)
{
avi->sinkpad = gst_pad_new_from_static_template (&sink_templ, "sink");
gst_pad_set_activate_function (avi->sinkpad,
GST_DEBUG_FUNCPTR (gst_avi_demux_sink_activate));
- gst_pad_set_activatepull_function (avi->sinkpad,
- GST_DEBUG_FUNCPTR (gst_avi_demux_sink_activate_pull));
- gst_pad_set_activatepush_function (avi->sinkpad,
- GST_DEBUG_FUNCPTR (gst_avi_demux_activate_push));
+ gst_pad_set_activatemode_function (avi->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_avi_demux_sink_activate_mode));
gst_pad_set_chain_function (avi->sinkpad,
GST_DEBUG_FUNCPTR (gst_avi_demux_chain));
gst_pad_set_event_function (avi->sinkpad,
avi->adapter = gst_adapter_new ();
gst_avi_demux_reset (avi);
+
+ GST_OBJECT_FLAG_SET (avi, GST_ELEMENT_FLAG_INDEXABLE);
}
static void
g_free (avi->avih);
avi->avih = NULL;
+#if 0
if (avi->element_index)
gst_object_unref (avi->element_index);
avi->element_index = NULL;
+#endif
- if (avi->close_seg_event) {
- gst_event_unref (avi->close_seg_event);
- avi->close_seg_event = NULL;
- }
if (avi->seg_event) {
gst_event_unref (avi->seg_event);
avi->seg_event = NULL;
return res;
}
-static const GstQueryType *
-gst_avi_demux_get_src_query_types (GstPad * pad)
-{
- static const GstQueryType src_types[] = {
- GST_QUERY_POSITION,
- GST_QUERY_DURATION,
- GST_QUERY_SEEKING,
- GST_QUERY_CONVERT,
- 0
- };
-
- return src_types;
-}
-
static gboolean
-gst_avi_demux_handle_src_query (GstPad * pad, GstQuery * query)
+gst_avi_demux_handle_src_query (GstPad * pad, GstObject * parent,
+ GstQuery * query)
{
gboolean res = TRUE;
- GstAviDemux *avi = GST_AVI_DEMUX (gst_pad_get_parent (pad));
+ GstAviDemux *avi = GST_AVI_DEMUX (parent);
GstAviStream *stream = gst_pad_get_element_private (pad);
if (!stream->strh || !stream->strf.data)
- return gst_pad_query_default (pad, query);
+ return gst_pad_query_default (pad, parent, query);
switch (GST_QUERY_TYPE (query)) {
case GST_QUERY_POSITION:{
if (stream->idx_n > 0)
gst_query_set_duration (query, fmt, stream->idx_n);
else if (gst_pad_query_convert (pad, GST_FORMAT_TIME,
- duration, &fmt, &dur))
+ duration, fmt, &dur))
gst_query_set_duration (query, fmt, dur);
break;
}
&dest_val)))
gst_query_set_convert (query, src_fmt, src_val, dest_fmt, dest_val);
else
- res = gst_pad_query_default (pad, query);
+ res = gst_pad_query_default (pad, parent, query);
break;
}
default:
- res = gst_pad_query_default (pad, query);
+ res = gst_pad_query_default (pad, parent, query);
break;
}
- gst_object_unref (avi);
return res;
}
}
#endif
+#if 0
static guint64
gst_avi_demux_seek_streams (GstAviDemux * avi, guint64 offset, gboolean before)
{
return min;
}
+#endif
static guint
gst_avi_demux_index_entry_offset_search (GstAviIndexEntry * entry,
#define GST_AVI_SEEK_PUSH_DISPLACE (4 * GST_SECOND)
static gboolean
-gst_avi_demux_handle_sink_event (GstPad * pad, GstEvent * event)
+gst_avi_demux_handle_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event)
{
gboolean res = TRUE;
- GstAviDemux *avi = GST_AVI_DEMUX (gst_pad_get_parent (pad));
+ GstAviDemux *avi = GST_AVI_DEMUX (parent);
GST_DEBUG_OBJECT (avi,
"have event type %s: %p on sink pad", GST_EVENT_TYPE_NAME (event), event);
switch (GST_EVENT_TYPE (event)) {
- case GST_EVENT_NEWSEGMENT:
+ case GST_EVENT_SEGMENT:
{
- GstFormat format;
- gdouble rate, arate;
- gint64 start, stop, time, offset = 0;
- gboolean update;
+ gint64 boffset, offset = 0;
GstSegment segment;
/* some debug output */
- gst_segment_init (&segment, GST_FORMAT_UNDEFINED);
- gst_event_parse_new_segment_full (event, &update, &rate, &arate, &format,
- &start, &stop, &time);
- gst_segment_set_newsegment_full (&segment, update, rate, arate, format,
- start, stop, time);
- GST_DEBUG_OBJECT (avi,
- "received format %d newsegment %" GST_SEGMENT_FORMAT, format,
+ gst_event_copy_segment (event, &segment);
+ GST_DEBUG_OBJECT (avi, "received newsegment %" GST_SEGMENT_FORMAT,
&segment);
/* chain will send initial newsegment after pads have been added */
}
/* we only expect a BYTE segment, e.g. following a seek */
- if (format != GST_FORMAT_BYTES) {
+ if (segment.format != GST_FORMAT_BYTES) {
GST_DEBUG_OBJECT (avi, "unsupported segment format, ignoring");
goto exit;
}
GstAviStream *stream;
/* compensate chunk header, stored index offset points after header */
- start += 8;
+ boffset = segment.start + 8;
/* find which stream we're on */
do {
stream = &avi->stream[i];
entry = gst_util_array_binary_search (stream->index,
stream->idx_n, sizeof (GstAviIndexEntry),
(GCompareDataFunc) gst_avi_demux_index_entry_offset_search,
- GST_SEARCH_MODE_AFTER, &start, NULL);
+ GST_SEARCH_MODE_AFTER, &boffset, NULL);
if (entry == NULL)
continue;
k = i;
}
/* exact match needs no further searching */
- if (stream->index[index].offset == start)
+ if (stream->index[index].offset == segment.start)
break;
} while (++i < avi->num_streams);
- start -= 8;
+ boffset -= 8;
offset -= 8;
stream = &avi->stream[k];
/* get the ts corresponding to start offset bytes for the stream */
gst_avi_demux_get_buffer_info (avi, stream, index,
- (GstClockTime *) & time, NULL, NULL, NULL);
+ (GstClockTime *) & segment.time, NULL, NULL, NULL);
+#if 0
} else if (avi->element_index) {
GstIndexEntry *entry;
/* Let's check if we have an index entry for this position */
entry = gst_index_get_assoc_entry (avi->element_index, avi->index_id,
GST_INDEX_LOOKUP_AFTER, GST_ASSOCIATION_FLAG_NONE,
- GST_FORMAT_BYTES, start);
+ GST_FORMAT_BYTES, segment.start);
/* we can not go where we have not yet been before ... */
if (!entry) {
goto eos;
}
- gst_index_entry_assoc_map (entry, GST_FORMAT_TIME, &time);
+ gst_index_entry_assoc_map (entry, GST_FORMAT_TIME,
+ (gint64 *) & segment.time);
gst_index_entry_assoc_map (entry, GST_FORMAT_BYTES, &offset);
+#endif
} else {
GST_WARNING_OBJECT (avi, "no index data, forcing EOS");
goto eos;
}
- stop = GST_CLOCK_TIME_NONE;
+ segment.format = GST_FORMAT_TIME;
+ segment.start = segment.time;
+ segment.stop = GST_CLOCK_TIME_NONE;
+ segment.position = segment.start;
+
+ /* rescue duration */
+ segment.duration = avi->segment.duration;
/* set up segment and send downstream */
- gst_segment_set_newsegment_full (&avi->segment, update, rate, arate,
- GST_FORMAT_TIME, time, stop, time);
- GST_DEBUG_OBJECT (avi, "Pushing newseg update %d, rate %g, "
- "applied rate %g, format %d, start %" G_GINT64_FORMAT ", "
- "stop %" G_GINT64_FORMAT, update, rate, arate, GST_FORMAT_TIME,
- time, stop);
- gst_avi_demux_push_event (avi,
- gst_event_new_new_segment_full (update, rate, arate, GST_FORMAT_TIME,
- time, stop, time));
-
- GST_DEBUG_OBJECT (avi, "next chunk expected at %" G_GINT64_FORMAT, start);
+ gst_segment_copy_into (&segment, &avi->segment);
+
+ GST_DEBUG_OBJECT (avi, "Pushing newseg %" GST_SEGMENT_FORMAT, &segment);
+ gst_avi_demux_push_event (avi, gst_event_new_segment (&segment));
+
+ GST_DEBUG_OBJECT (avi, "next chunk expected at %" G_GINT64_FORMAT,
+ boffset);
/* adjust state for streaming thread accordingly */
if (avi->have_index)
gst_avi_demux_seek_streams_index (avi, offset, FALSE);
+#if 0
else
gst_avi_demux_seek_streams (avi, offset, FALSE);
+#endif
/* set up streaming thread */
- g_assert (offset >= start);
- avi->offset = start;
- avi->todrop = offset - start;
+ g_assert (offset >= boffset);
+ avi->offset = boffset;
+ avi->todrop = offset - boffset;
exit:
gst_event_unref (event);
/* fall through to default case so that the event gets passed downstream */
}
default:
- res = gst_pad_event_default (pad, event);
+ res = gst_pad_event_default (pad, parent, event);
break;
}
- gst_object_unref (avi);
-
return res;
}
static gboolean
-gst_avi_demux_handle_src_event (GstPad * pad, GstEvent * event)
+gst_avi_demux_handle_src_event (GstPad * pad, GstObject * parent,
+ GstEvent * event)
{
gboolean res = TRUE;
- GstAviDemux *avi = GST_AVI_DEMUX (gst_pad_get_parent (pad));
+ GstAviDemux *avi = GST_AVI_DEMUX (parent);
GST_DEBUG_OBJECT (avi,
"have event type %s: %p on src pad", GST_EVENT_TYPE_NAME (event), event);
gst_event_unref (event);
break;
default:
- res = gst_pad_event_default (pad, event);
+ res = gst_pad_event_default (pad, parent, event);
break;
}
- gst_object_unref (avi);
-
return res;
}
if (gst_adapter_available (avi->adapter) < 8)
return FALSE;
- data = gst_adapter_peek (avi->adapter, 8);
+ data = gst_adapter_map (avi->adapter, 8);
*tag = GST_READ_UINT32_LE (data);
*size = GST_READ_UINT32_LE (data + 4);
+ gst_adapter_unmap (avi->adapter);
return TRUE;
}
GstBuffer * buf, gst_riff_avih ** _avih)
{
gst_riff_avih *avih;
+ gsize size;
if (buf == NULL)
goto no_buffer;
- if (GST_BUFFER_SIZE (buf) < sizeof (gst_riff_avih))
+ size = gst_buffer_get_size (buf);
+ if (size < sizeof (gst_riff_avih))
goto avih_too_small;
- avih = g_memdup (GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (buf));
+ avih = g_malloc (size);
+ gst_buffer_extract (buf, 0, avih, size);
#if (G_BYTE_ORDER == G_BIG_ENDIAN)
avih->us_frame = GUINT32_FROM_LE (avih->us_frame);
avih_too_small:
{
GST_ELEMENT_ERROR (avi, STREAM, DEMUX, (NULL),
- ("Too small avih (%d available, %d needed)",
- GST_BUFFER_SIZE (buf), (int) sizeof (gst_riff_avih)));
+ ("Too small avih (%" G_GSIZE_FORMAT " available, %d needed)",
+ size, (int) sizeof (gst_riff_avih)));
gst_buffer_unref (buf);
return FALSE;
}
gst_avi_demux_parse_superindex (GstAviDemux * avi,
GstBuffer * buf, guint64 ** _indexes)
{
+ GstMapInfo map;
guint8 *data;
guint16 bpe = 16;
guint32 num, i;
guint64 *indexes;
- guint size;
+ gsize size;
*_indexes = NULL;
- size = buf ? GST_BUFFER_SIZE (buf) : 0;
+ if (buf) {
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ data = map.data;
+ size = map.size;
+ } else {
+ data = NULL;
+ size = 0;
+ }
+
if (size < 24)
goto too_small;
- data = GST_BUFFER_DATA (buf);
-
/* check type of index. The opendml2 specs state that
* there should be 4 dwords per array entry. Type can be
* either frame or field (and we don't care). */
indexes[i] = GST_BUFFER_OFFSET_NONE;
*_indexes = indexes;
+ gst_buffer_unmap (buf, &map);
gst_buffer_unref (buf);
return TRUE;
too_small:
{
GST_ERROR_OBJECT (avi,
- "Not enough data to parse superindex (%d available, 24 needed)", size);
- if (buf)
+ "Not enough data to parse superindex (%" G_GSIZE_FORMAT
+ " available, 24 needed)", size);
+ if (buf) {
+ gst_buffer_unmap (buf, &map);
gst_buffer_unref (buf);
+ }
return FALSE;
}
invalid_params:
{
GST_ERROR_OBJECT (avi, "invalid index parameters (num = %d, bpe = %d)",
num, bpe);
- if (buf)
- gst_buffer_unref (buf);
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_unref (buf);
return FALSE;
}
}
gst_avi_demux_parse_subindex (GstAviDemux * avi, GstAviStream * stream,
GstBuffer * buf)
{
+ GstMapInfo map;
guint8 *data;
guint16 bpe;
guint32 num, i;
guint64 baseoff;
- guint size;
- if (!buf)
+ if (buf == NULL)
return TRUE;
- size = GST_BUFFER_SIZE (buf);
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ data = map.data;
/* check size */
- if (size < 24)
+ if (map.size < 24)
goto too_small;
- data = GST_BUFFER_DATA (buf);
-
/* We don't support index-data yet */
if (data[3] & 0x80)
goto not_implemented;
for (i = 0; i < num; i++) {
GstAviIndexEntry entry;
- if (size < 24 + bpe * (i + 1))
+ if (map.size < 24 + bpe * (i + 1))
break;
/* fill in offset and size. offset contains the keyframe flag in the
if (G_UNLIKELY (!gst_avi_demux_add_index (avi, stream, num, &entry)))
goto out_of_mem;
}
+done:
+ gst_buffer_unmap (buf, &map);
gst_buffer_unref (buf);
return TRUE;
too_small:
{
GST_ERROR_OBJECT (avi,
- "Not enough data to parse subindex (%d available, 24 needed)", size);
- gst_buffer_unref (buf);
- return TRUE; /* continue */
+ "Not enough data to parse subindex (%" G_GSIZE_FORMAT
+ " available, 24 needed)", map.size);
+ goto done; /* continue */
}
not_implemented:
{
GST_ELEMENT_ERROR (avi, STREAM, NOT_IMPLEMENTED, (NULL),
("Subindex-is-data is not implemented"));
+ gst_buffer_unmap (buf, &map);
gst_buffer_unref (buf);
return FALSE;
}
empty_index:
{
GST_DEBUG_OBJECT (avi, "the index is empty");
- gst_buffer_unref (buf);
- return TRUE;
+ goto done; /* continue */
}
out_of_mem:
{
("Cannot allocate memory for %u*%u=%u bytes",
(guint) sizeof (GstAviIndexEntry), num,
(guint) sizeof (GstAviIndexEntry) * num));
+ gst_buffer_unmap (buf, &map);
gst_buffer_unref (buf);
return FALSE;
}
{
gst_riff_vprp *vprp;
gint k;
+ gsize size;
g_return_val_if_fail (buf != NULL, FALSE);
g_return_val_if_fail (_vprp != NULL, FALSE);
- if (GST_BUFFER_SIZE (buf) < G_STRUCT_OFFSET (gst_riff_vprp, field_info))
+ size = gst_buffer_get_size (buf);
+
+ if (size < G_STRUCT_OFFSET (gst_riff_vprp, field_info))
goto too_small;
- vprp = g_memdup (GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (buf));
+ vprp = g_malloc (size);
+ gst_buffer_extract (buf, 0, vprp, size);
#if (G_BYTE_ORDER == G_BIG_ENDIAN)
vprp->format_token = GUINT32_FROM_LE (vprp->format_token);
/* size checking */
/* calculate fields based on size */
- k = (GST_BUFFER_SIZE (buf) - G_STRUCT_OFFSET (gst_riff_vprp, field_info)) /
- vprp->fields;
+ k = (size - G_STRUCT_OFFSET (gst_riff_vprp, field_info)) / vprp->fields;
if (vprp->fields > k) {
GST_WARNING_OBJECT (element,
"vprp header indicated %d fields, only %d available", vprp->fields, k);
too_small:
{
GST_ERROR_OBJECT (element,
- "Too small vprp (%d available, at least %d needed)",
- GST_BUFFER_SIZE (buf),
- (int) G_STRUCT_OFFSET (gst_riff_vprp, field_info));
+ "Too small vprp (%" G_GSIZE_FORMAT " available, at least %d needed)",
+ size, (int) G_STRUCT_OFFSET (gst_riff_vprp, field_info));
gst_buffer_unref (buf);
return FALSE;
}
GstAviStream *stream = &avi->stream[i];
if (force || stream->idx_n != 0) {
- GST_LOG_OBJECT (avi, "Added pad %s with caps %" GST_PTR_FORMAT,
- GST_PAD_NAME (stream->pad), GST_PAD_CAPS (stream->pad));
+ GST_LOG_OBJECT (avi, "Adding pad %s" GST_PTR_FORMAT,
+ GST_PAD_NAME (stream->pad));
gst_element_add_pad ((GstElement *) avi, stream->pad);
+#if 0
if (avi->element_index)
gst_index_get_writer_id (avi->element_index,
GST_OBJECT_CAST (stream->pad), &stream->index_id);
+#endif
stream->exposed = TRUE;
if (avi->main_stream == -1)
static inline void
gst_avi_demux_roundup_list (GstAviDemux * avi, GstBuffer ** buf)
{
- gint size = GST_BUFFER_SIZE (*buf);
+ gsize size;
+
+ size = gst_buffer_get_size (*buf);
if (G_UNLIKELY (size & 1)) {
GstBuffer *obuf;
+ GstMapInfo map;
- GST_DEBUG_OBJECT (avi, "rounding up dubious list size %d", size);
+ GST_DEBUG_OBJECT (avi, "rounding up dubious list size %" G_GSIZE_FORMAT,
+ size);
obuf = gst_buffer_new_and_alloc (size + 1);
- memcpy (GST_BUFFER_DATA (obuf), GST_BUFFER_DATA (*buf), size);
+
+ gst_buffer_map (obuf, &map, GST_MAP_WRITE);
+ gst_buffer_extract (*buf, 0, map.data, size);
/* assume 0 padding, at least makes outcome deterministic */
- (GST_BUFFER_DATA (obuf))[size] = 0;
+ map.data[size] = 0;
+ gst_buffer_unmap (obuf, &map);
gst_buffer_replace (buf, obuf);
}
}
case GST_RIFF_TAG_strn:
g_free (stream->name);
if (sub != NULL) {
- stream->name =
- g_strndup ((gchar *) GST_BUFFER_DATA (sub),
- (gsize) GST_BUFFER_SIZE (sub));
+ GstMapInfo map;
+
+ gst_buffer_map (sub, &map, GST_MAP_READ);
+ stream->name = g_strndup ((gchar *) map.data, map.size);
+ gst_buffer_unmap (sub, &map);
gst_buffer_unref (sub);
sub = NULL;
} else {
fourcc = (stream->strf.vids->compression) ?
stream->strf.vids->compression : stream->strh->fcc_handler;
- padname = g_strdup_printf ("video_%02d", avi->num_v_streams);
- templ = gst_element_class_get_pad_template (klass, "video_%02d");
+ padname = g_strdup_printf ("video_%u", avi->num_v_streams);
+ templ = gst_element_class_get_pad_template (klass, "video_%u");
caps = gst_riff_create_video_caps (fourcc, stream->strh,
stream->strf.vids, stream->extradata, stream->initdata, &codec_name);
if (!caps) {
caps = gst_caps_new_simple ("video/x-avi-unknown", "fourcc",
- GST_TYPE_FOURCC, fourcc, NULL);
+ G_TYPE_INT, fourcc, NULL);
} else if (got_vprp && vprp) {
guint32 aspect_n, aspect_d;
gint n, d;
break;
}
case GST_RIFF_FCC_auds:{
- padname = g_strdup_printf ("audio_%02d", avi->num_a_streams);
- templ = gst_element_class_get_pad_template (klass, "audio_%02d");
+ /* FIXME: Do something with the channel reorder map */
+ padname = g_strdup_printf ("audio_%u", avi->num_a_streams);
+ templ = gst_element_class_get_pad_template (klass, "audio_%u");
caps = gst_riff_create_audio_caps (stream->strf.auds->format,
stream->strh, stream->strf.auds, stream->extradata,
- stream->initdata, &codec_name);
+ stream->initdata, &codec_name, NULL);
if (!caps) {
caps = gst_caps_new_simple ("audio/x-avi-unknown", "codec_id",
G_TYPE_INT, stream->strf.auds->format, NULL);
case GST_RIFF_FCC_iavs:{
guint32 fourcc = stream->strh->fcc_handler;
- padname = g_strdup_printf ("video_%02d", avi->num_v_streams);
- templ = gst_element_class_get_pad_template (klass, "video_%02d");
+ padname = g_strdup_printf ("video_%u", avi->num_v_streams);
+ templ = gst_element_class_get_pad_template (klass, "video_%u");
caps = gst_riff_create_iavs_caps (fourcc, stream->strh,
stream->strf.iavs, stream->extradata, stream->initdata, &codec_name);
if (!caps) {
caps = gst_caps_new_simple ("video/x-avi-unknown", "fourcc",
- GST_TYPE_FOURCC, fourcc, NULL);
+ G_TYPE_INT, fourcc, NULL);
}
tag_name = GST_TAG_VIDEO_CODEC;
avi->num_v_streams++;
break;
}
case GST_RIFF_FCC_txts:{
- padname = g_strdup_printf ("subtitle_%02d", avi->num_t_streams);
- templ = gst_element_class_get_pad_template (klass, "subtitle_%02d");
- caps = gst_caps_new_simple ("application/x-subtitle-avi", NULL);
+ padname = g_strdup_printf ("subtitle_%u", avi->num_t_streams);
+ templ = gst_element_class_get_pad_template (klass, "subtitle_%u");
+ caps = gst_caps_new_empty_simple ("application/x-subtitle-avi");
tag_name = NULL;
avi->num_t_streams++;
break;
#endif
gst_pad_set_event_function (pad,
GST_DEBUG_FUNCPTR (gst_avi_demux_handle_src_event));
- gst_pad_set_query_type_function (pad,
- GST_DEBUG_FUNCPTR (gst_avi_demux_get_src_query_types));
gst_pad_set_query_function (pad,
GST_DEBUG_FUNCPTR (gst_avi_demux_handle_src_query));
#if 0
gst_pad_set_element_private (pad, stream);
avi->num_streams++;
- gst_pad_set_caps (pad, caps);
gst_pad_set_active (pad, TRUE);
+ gst_pad_push_event (pad, gst_event_new_caps (caps));
gst_caps_unref (caps);
/* make tags */
if (codec_name) {
if (!stream->taglist)
- stream->taglist = gst_tag_list_new ();
+ stream->taglist = gst_tag_list_new_empty ();
avi->got_tags = TRUE;
switch (tag) {
case GST_RIFF_TAG_dmlh:{
gst_riff_dmlh dmlh, *_dmlh;
- guint size;
+ GstMapInfo map;
/* sub == NULL is possible and means an empty buffer */
- size = sub ? GST_BUFFER_SIZE (sub) : 0;
+ if (sub == NULL)
+ goto next;
+
+ gst_buffer_map (sub, &map, GST_MAP_READ);
/* check size */
- if (size < sizeof (gst_riff_dmlh)) {
+ if (map.size < sizeof (gst_riff_dmlh)) {
GST_ERROR_OBJECT (avi,
- "DMLH entry is too small (%d bytes, %d needed)",
- size, (int) sizeof (gst_riff_dmlh));
+ "DMLH entry is too small (%" G_GSIZE_FORMAT " bytes, %d needed)",
+ map.size, (int) sizeof (gst_riff_dmlh));
+ gst_buffer_unmap (sub, &map);
goto next;
}
- _dmlh = (gst_riff_dmlh *) GST_BUFFER_DATA (sub);
+ _dmlh = (gst_riff_dmlh *) map.data;
dmlh.totalframes = GST_READ_UINT32_LE (&_dmlh->totalframes);
+ gst_buffer_unmap (sub, &map);
GST_INFO_OBJECT (avi, "dmlh tag found: totalframes: %u",
dmlh.totalframes);
static gboolean
gst_avi_demux_parse_index (GstAviDemux * avi, GstBuffer * buf)
{
- guint8 *data;
- guint size;
+ GstMapInfo map;
guint i, num, n;
gst_riff_index_entry *index;
GstClockTime stamp;
if (!buf)
return FALSE;
- data = GST_BUFFER_DATA (buf);
- size = GST_BUFFER_SIZE (buf);
+ gst_buffer_map (buf, &map, GST_MAP_READ);
stamp = gst_util_get_timestamp ();
/* see how many items in the index */
- num = size / sizeof (gst_riff_index_entry);
+ num = map.size / sizeof (gst_riff_index_entry);
if (num == 0)
goto empty_list;
GST_INFO_OBJECT (avi, "Parsing index, nr_entries = %6d", num);
- index = (gst_riff_index_entry *) data;
+ index = (gst_riff_index_entry *) map.data;
/* figure out if the index is 0 based or relative to the MOVI start */
entry.offset = GST_READ_UINT32_LE (&index[0].offset);
n++;
}
+ gst_buffer_unmap (buf, &map);
gst_buffer_unref (buf);
/* get stream stats now */
empty_list:
{
GST_DEBUG_OBJECT (avi, "empty index");
+ gst_buffer_unmap (buf, &map);
gst_buffer_unref (buf);
return FALSE;
}
("Cannot allocate memory for %u*%u=%u bytes",
(guint) sizeof (GstAviIndexEntry), num,
(guint) sizeof (GstAviIndexEntry) * num));
+ gst_buffer_unmap (buf, &map);
gst_buffer_unref (buf);
return FALSE;
}
GstBuffer *buf;
guint32 tag;
guint32 size;
+ GstMapInfo map;
GST_DEBUG ("demux stream index at offset %" G_GUINT64_FORMAT, offset);
res = gst_pad_pull_range (avi->sinkpad, offset, 8, &buf);
if (res != GST_FLOW_OK)
goto pull_failed;
- else if (GST_BUFFER_SIZE (buf) < 8)
+
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ if (map.size < 8)
goto too_small;
/* check tag first before blindy trying to read 'size' bytes */
- tag = GST_READ_UINT32_LE (GST_BUFFER_DATA (buf));
- size = GST_READ_UINT32_LE (GST_BUFFER_DATA (buf) + 4);
+ tag = GST_READ_UINT32_LE (map.data);
+ size = GST_READ_UINT32_LE (map.data + 4);
if (tag == GST_RIFF_TAG_LIST) {
/* this is the movi tag */
GST_DEBUG_OBJECT (avi, "skip LIST chunk, size %" G_GUINT32_FORMAT,
(8 + GST_ROUND_UP_2 (size)));
offset += 8 + GST_ROUND_UP_2 (size);
+ gst_buffer_unmap (buf, &map);
gst_buffer_unref (buf);
+
res = gst_pad_pull_range (avi->sinkpad, offset, 8, &buf);
if (res != GST_FLOW_OK)
goto pull_failed;
- else if (GST_BUFFER_SIZE (buf) < 8)
+
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ if (map.size < 8)
goto too_small;
- tag = GST_READ_UINT32_LE (GST_BUFFER_DATA (buf));
- size = GST_READ_UINT32_LE (GST_BUFFER_DATA (buf) + 4);
+
+ tag = GST_READ_UINT32_LE (map.data);
+ size = GST_READ_UINT32_LE (map.data + 4);
}
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_unref (buf);
if (tag != GST_RIFF_TAG_idx1)
goto no_index;
if (!size)
goto zero_index;
- gst_buffer_unref (buf);
-
GST_DEBUG ("index found at offset %" G_GUINT64_FORMAT, offset);
/* read chunk, advance offset */
avi->sinkpad, &offset, &tag, &buf) != GST_FLOW_OK)
return;
- GST_DEBUG ("will parse index chunk size %u for tag %"
- GST_FOURCC_FORMAT, GST_BUFFER_SIZE (buf), GST_FOURCC_ARGS (tag));
+ GST_DEBUG ("will parse index chunk size %" G_GSIZE_FORMAT " for tag %"
+ GST_FOURCC_FORMAT, gst_buffer_get_size (buf), GST_FOURCC_ARGS (tag));
gst_avi_demux_parse_index (avi, buf);
too_small:
{
GST_DEBUG_OBJECT (avi, "Buffer is too small");
+ gst_buffer_unmap (buf, &map);
gst_buffer_unref (buf);
return;
}
GST_WARNING_OBJECT (avi,
"No index data (idx1) after movi chunk, but %" GST_FOURCC_FORMAT,
GST_FOURCC_ARGS (tag));
- gst_buffer_unref (buf);
return;
}
zero_index:
{
GST_WARNING_OBJECT (avi, "Empty index data (idx1) after movi chunk");
- gst_buffer_unref (buf);
return;
}
}
/* advance offset */
offset += 8 + GST_ROUND_UP_2 (size);
- GST_DEBUG ("will parse index chunk size %u for tag %"
- GST_FOURCC_FORMAT, GST_BUFFER_SIZE (buf), GST_FOURCC_ARGS (tag));
+ GST_DEBUG ("will parse index chunk size %" G_GSIZE_FORMAT " for tag %"
+ GST_FOURCC_FORMAT, gst_buffer_get_size (buf), GST_FOURCC_ARGS (tag));
avi->offset = avi->first_movi_offset;
gst_avi_demux_parse_index (avi, buf);
{
GstFlowReturn res = GST_FLOW_OK;
GstBuffer *buf = NULL;
- guint bufsize;
- guint8 *bufdata;
+ GstMapInfo map;
res = gst_pad_pull_range (avi->sinkpad, offset, 8, &buf);
if (res != GST_FLOW_OK)
goto pull_failed;
- bufsize = GST_BUFFER_SIZE (buf);
- if (bufsize != 8)
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ if (map.size != 8)
goto wrong_size;
- bufdata = GST_BUFFER_DATA (buf);
-
- *tag = GST_READ_UINT32_LE (bufdata);
- *size = GST_READ_UINT32_LE (bufdata + 4);
+ *tag = GST_READ_UINT32_LE (map.data);
+ *size = GST_READ_UINT32_LE (map.data + 4);
GST_LOG_OBJECT (avi, "Tag[%" GST_FOURCC_FORMAT "] (size:%d) %"
G_GINT64_FORMAT " -- %" G_GINT64_FORMAT, GST_FOURCC_ARGS (*tag),
*size, offset + 8, offset + 8 + (gint64) * size);
done:
+ gst_buffer_unmap (buf, &map);
gst_buffer_unref (buf);
return res;
}
wrong_size:
{
- GST_DEBUG_OBJECT (avi, "got %d bytes which is <> 8 bytes", bufsize);
+ GST_DEBUG_OBJECT (avi, "got %" G_GSIZE_FORMAT " bytes which is <> 8 bytes",
+ map.size);
res = GST_FLOW_ERROR;
goto done;
}
{
GstFlowReturn res;
GstAviStream *stream;
- GstFormat format;
guint64 pos = 0;
guint64 length;
gint64 tmplength;
GST_DEBUG_OBJECT (avi, "Creating index");
/* get the size of the file */
- format = GST_FORMAT_BYTES;
- if (!gst_pad_query_peer_duration (avi->sinkpad, &format, &tmplength))
+ if (!gst_pad_peer_query_duration (avi->sinkpad, GST_FORMAT_BYTES, &tmplength))
return FALSE;
length = tmplength;
GST_INFO ("Setting total duration to: %" GST_TIME_FORMAT,
GST_TIME_ARGS (total));
- gst_segment_set_duration (&avi->segment, GST_FORMAT_TIME, total);
+ avi->segment.duration = total;
}
/* returns FALSE if there are no pads to deliver event to,
/* try harder to query upstream size if we didn't get it the first time */
if (seekable && stop == -1) {
- GstFormat fmt = GST_FORMAT_BYTES;
-
GST_DEBUG_OBJECT (avi, "doing duration query to fix up unset stop");
- gst_pad_query_peer_duration (avi->sinkpad, &fmt, &stop);
+ gst_pad_peer_query_duration (avi->sinkpad, GST_FORMAT_BYTES, &stop);
}
/* if upstream doesn't know the size, it's likely that it's not seekable in
const guint8 *data;
GstBuffer *buf = NULL, *sub = NULL;
guint offset = 4;
- gint64 stop;
gint i;
GstTagList *tags = NULL;
+ guint8 fourcc[4];
GST_DEBUG ("Reading and parsing avi headers: %d", avi->header_state);
GST_DEBUG ("Reading %d bytes", size);
buf = gst_adapter_take_buffer (avi->adapter, size);
- if (GST_READ_UINT32_LE (GST_BUFFER_DATA (buf)) != GST_RIFF_LIST_hdrl)
+ gst_buffer_extract (buf, 0, fourcc, 4);
+
+ if (GST_READ_UINT32_LE (fourcc) != GST_RIFF_LIST_hdrl)
goto header_no_hdrl;
/* mind padding */
switch (tag) {
case GST_RIFF_TAG_LIST:
- if (GST_BUFFER_SIZE (sub) < 4)
+ if (gst_buffer_get_size (sub) < 4)
goto next;
- switch (GST_READ_UINT32_LE (GST_BUFFER_DATA (sub))) {
+ gst_buffer_extract (sub, 0, fourcc, 4);
+
+ switch (GST_READ_UINT32_LE (fourcc)) {
case GST_RIFF_LIST_strl:
if (!(gst_avi_demux_parse_stream (avi, sub))) {
sub = NULL;
default:
GST_WARNING_OBJECT (avi,
"Unknown list %" GST_FOURCC_FORMAT " in AVI header",
- GST_FOURCC_ARGS (GST_READ_UINT32_LE (GST_BUFFER_DATA
- (sub))));
+ GST_FOURCC_ARGS (GST_READ_UINT32_LE (fourcc)));
/* fall-through */
case GST_RIFF_TAG_JUNQ:
case GST_RIFF_TAG_JUNK:
goto next;
+ break;
}
- break;
case GST_RIFF_IDIT:
gst_avi_demux_parse_idit (avi, sub);
goto next;
if (gst_adapter_available (avi->adapter) < 12)
return GST_FLOW_OK;
- data = gst_adapter_peek (avi->adapter, 12);
+ data = gst_adapter_map (avi->adapter, 12);
tag = GST_READ_UINT32_LE (data);
size = GST_READ_UINT32_LE (data + 4);
ltag = GST_READ_UINT32_LE (data + 8);
+ gst_adapter_unmap (avi->adapter);
if (tag == GST_RIFF_TAG_LIST) {
switch (ltag) {
avi->stream[i].current_entry = 0;
/* create initial NEWSEGMENT event */
- if ((stop = avi->segment.stop) == GST_CLOCK_TIME_NONE)
- stop = avi->segment.duration;
-
- GST_DEBUG_OBJECT (avi, "segment stop %" G_GINT64_FORMAT, stop);
-
if (avi->seg_event)
gst_event_unref (avi->seg_event);
- avi->seg_event = gst_event_new_new_segment_full
- (FALSE, avi->segment.rate, avi->segment.applied_rate, GST_FORMAT_TIME,
- avi->segment.start, stop, avi->segment.time);
+ avi->seg_event = gst_event_new_segment (&avi->segment);
gst_avi_demux_check_seekability (avi);
dt = gst_date_time_new_local_time (y, m, d, h, min, s);
if (avi->globaltags == NULL)
- avi->globaltags = gst_tag_list_new ();
+ avi->globaltags = gst_tag_list_new_empty ();
gst_tag_list_add (avi->globaltags, GST_TAG_MERGE_REPLACE, GST_TAG_DATE, date,
NULL);
static void
gst_avi_demux_parse_idit (GstAviDemux * avi, GstBuffer * buf)
{
- gchar *data = (gchar *) GST_BUFFER_DATA (buf);
- guint size = GST_BUFFER_SIZE (buf);
+ GstMapInfo map;
+ gchar *ptr;
+ gsize left;
gchar *safedata = NULL;
+ gst_buffer_map (buf, &map, GST_MAP_READ);
/*
* According to:
* http://www.eden-foundation.org/products/code/film_date_stamp/index.html
*/
/* skip eventual initial whitespace */
- while (size > 0 && g_ascii_isspace (data[0])) {
- data++;
- size--;
+ ptr = (gchar *) map.data;
+ left = map.size;
+
+ while (left > 0 && g_ascii_isspace (ptr[0])) {
+ ptr++;
+ left--;
}
- if (size == 0) {
+ if (left == 0) {
goto non_parsable;
}
/* make a safe copy to add a \0 to the end of the string */
- safedata = g_strndup (data, size);
+ safedata = g_strndup (ptr, left);
/* test if the first char is a alpha or a number */
- if (g_ascii_isdigit (data[0])) {
+ if (g_ascii_isdigit (ptr[0])) {
gst_avi_demux_parse_idit_nums_only (avi, safedata);
g_free (safedata);
return;
- } else if (g_ascii_isalpha (data[0])) {
+ } else if (g_ascii_isalpha (ptr[0])) {
gst_avi_demux_parse_idit_text (avi, safedata);
g_free (safedata);
return;
non_parsable:
GST_WARNING_OBJECT (avi, "IDIT tag has no parsable info");
+ gst_buffer_unmap (buf, &map);
}
/*
GstBuffer *buf, *sub = NULL;
guint32 tag;
guint offset = 4;
- gint64 stop;
GstElement *element = GST_ELEMENT_CAST (avi);
GstClockTime stamp;
GstTagList *tags = NULL;
+ guint8 fourcc[4];
stamp = gst_util_get_timestamp ();
goto pull_range_failed;
else if (tag != GST_RIFF_TAG_LIST)
goto no_list;
- else if (GST_BUFFER_SIZE (buf) < 4)
+ else if (gst_buffer_get_size (buf) < 4)
goto no_header;
GST_DEBUG_OBJECT (avi, "parsing headers");
/* Find the 'hdrl' LIST tag */
- while (GST_READ_UINT32_LE (GST_BUFFER_DATA (buf)) != GST_RIFF_LIST_hdrl) {
+ gst_buffer_extract (buf, 0, fourcc, 4);
+ while (GST_READ_UINT32_LE (fourcc) != GST_RIFF_LIST_hdrl) {
GST_LOG_OBJECT (avi, "buffer contains %" GST_FOURCC_FORMAT,
- GST_FOURCC_ARGS (GST_READ_UINT32_LE (GST_BUFFER_DATA (buf))));
+ GST_FOURCC_ARGS (GST_READ_UINT32_LE (fourcc)));
/* Eat up */
gst_buffer_unref (buf);
goto pull_range_failed;
else if (tag != GST_RIFF_TAG_LIST)
goto no_list;
- else if (GST_BUFFER_SIZE (buf) < 4)
+ else if (gst_buffer_get_size (buf) < 4)
goto no_header;
+ gst_buffer_extract (buf, 0, fourcc, 4);
}
GST_DEBUG_OBJECT (avi, "hdrl LIST tag found");
/* now, read the elements from the header until the end */
while (gst_riff_parse_chunk (element, buf, &offset, &tag, &sub)) {
+ GstMapInfo map;
+
/* sub can be NULL on empty tags */
if (!sub)
continue;
+ gst_buffer_map (sub, &map, GST_MAP_READ);
+
switch (tag) {
case GST_RIFF_TAG_LIST:
- {
- guint8 *data;
- guint32 fourcc;
-
- if (GST_BUFFER_SIZE (sub) < 4)
+ if (map.size < 4)
goto next;
- data = GST_BUFFER_DATA (sub);
- fourcc = GST_READ_UINT32_LE (data);
-
- switch (fourcc) {
+ switch (GST_READ_UINT32_LE (map.data)) {
case GST_RIFF_LIST_strl:
if (!(gst_avi_demux_parse_stream (avi, sub))) {
GST_ELEMENT_WARNING (avi, STREAM, DEMUX, (NULL),
sub = NULL;
break;
case GST_RIFF_LIST_INFO:
- GST_BUFFER_DATA (sub) = data + 4;
- GST_BUFFER_SIZE (sub) -= 4;
+ gst_buffer_resize (sub, 4, -1);
gst_riff_parse_info (element, sub, &tags);
if (tags) {
if (avi->globaltags) {
default:
GST_WARNING_OBJECT (avi,
"Unknown list %" GST_FOURCC_FORMAT " in AVI header",
- GST_FOURCC_ARGS (fourcc));
- GST_MEMDUMP_OBJECT (avi, "Unknown list", GST_BUFFER_DATA (sub),
- GST_BUFFER_SIZE (sub));
+ GST_FOURCC_ARGS (GST_READ_UINT32_LE (map.data)));
+ GST_MEMDUMP_OBJECT (avi, "Unknown list", map.data, map.size);
/* fall-through */
case GST_RIFF_TAG_JUNQ:
case GST_RIFF_TAG_JUNK:
goto next;
}
break;
- }
case GST_RIFF_IDIT:
gst_avi_demux_parse_idit (avi, sub);
goto next;
GST_WARNING_OBJECT (avi,
"Unknown tag %" GST_FOURCC_FORMAT " in AVI header at off %d",
GST_FOURCC_ARGS (tag), offset);
- GST_MEMDUMP_OBJECT (avi, "Unknown tag", GST_BUFFER_DATA (sub),
- GST_BUFFER_SIZE (sub));
+ GST_MEMDUMP_OBJECT (avi, "Unknown tag", map.data, map.size);
/* fall-through */
case GST_RIFF_TAG_JUNQ:
case GST_RIFF_TAG_JUNK:
next:
- if (sub)
+ if (sub) {
+ gst_buffer_unmap (sub, &map);
gst_buffer_unref (sub);
+ }
sub = NULL;
break;
}
/* Now, find the data (i.e. skip all junk between header and data) */
do {
+ GstMapInfo map;
guint size;
- guint8 *data;
guint32 tag, ltag;
res = gst_pad_pull_range (avi->sinkpad, avi->offset, 12, &buf);
if (res != GST_FLOW_OK) {
GST_DEBUG_OBJECT (avi, "pull_range failure while looking for tags");
goto pull_range_failed;
- } else if (GST_BUFFER_SIZE (buf) < 12) {
- GST_DEBUG_OBJECT (avi, "got %d bytes which is less than 12 bytes",
- GST_BUFFER_SIZE (buf));
+ } else if (gst_buffer_get_size (buf) < 12) {
+ GST_DEBUG_OBJECT (avi,
+ "got %" G_GSIZE_FORMAT " bytes which is less than 12 bytes",
+ gst_buffer_get_size (buf));
gst_buffer_unref (buf);
return GST_FLOW_ERROR;
}
- data = GST_BUFFER_DATA (buf);
-
- tag = GST_READ_UINT32_LE (data);
- size = GST_READ_UINT32_LE (data + 4);
- ltag = GST_READ_UINT32_LE (data + 8);
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ tag = GST_READ_UINT32_LE (map.data);
+ size = GST_READ_UINT32_LE (map.data + 4);
+ ltag = GST_READ_UINT32_LE (map.data + 8);
GST_DEBUG ("tag %" GST_FOURCC_FORMAT ", size %u",
GST_FOURCC_ARGS (tag), size);
- GST_MEMDUMP ("Tag content", data, GST_BUFFER_SIZE (buf));
+ GST_MEMDUMP ("Tag content", map.data, map.size);
+ gst_buffer_unmap (buf, &map);
gst_buffer_unref (buf);
switch (tag) {
GST_DEBUG_OBJECT (avi, "couldn't read INFO chunk");
goto pull_range_failed;
}
- GST_DEBUG ("got size %u", GST_BUFFER_SIZE (buf));
+ GST_DEBUG ("got size %" G_GSIZE_FORMAT, gst_buffer_get_size (buf));
if (size < 4) {
GST_DEBUG ("skipping INFO LIST prefix");
avi->offset += (4 - GST_ROUND_UP_2 (size));
continue;
}
- sub = gst_buffer_create_sub (buf, 4, GST_BUFFER_SIZE (buf) - 4);
+ sub = gst_buffer_copy_region (buf, GST_BUFFER_COPY_ALL, 4, -1);
gst_riff_parse_info (element, sub, &tags);
if (tags) {
if (avi->globaltags) {
GST_DEBUG_OBJECT (avi, "couldn't read INFO chunk");
goto pull_range_failed;
}
- GST_MEMDUMP ("Junk", GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (buf));
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ GST_MEMDUMP ("Junk", map.data, map.size);
+ gst_buffer_unmap (buf, &map);
gst_buffer_unref (buf);
}
avi->offset += 8 + GST_ROUND_UP_2 (size);
gst_avi_demux_expose_streams (avi, FALSE);
- /* create initial NEWSEGMENT event */
- if ((stop = avi->segment.stop) == GST_CLOCK_TIME_NONE)
- stop = avi->segment.duration;
-
- GST_DEBUG_OBJECT (avi, "segment stop %" G_GINT64_FORMAT, stop);
-
/* do initial seek to the default segment values */
gst_avi_demux_do_seek (avi, &avi->segment);
- /* prepare initial segment */
+ /* create initial NEWSEGMENT event */
if (avi->seg_event)
gst_event_unref (avi->seg_event);
- avi->seg_event = gst_event_new_new_segment_full
- (FALSE, avi->segment.rate, avi->segment.applied_rate, GST_FORMAT_TIME,
- avi->segment.start, stop, avi->segment.time);
+ avi->seg_event = gst_event_new_segment (&avi->segment);
stamp = gst_util_get_timestamp () - stamp;
GST_DEBUG_OBJECT (avi, "pulling header took %" GST_TIME_FORMAT,
guint next_key;
/* Because we don't know the frame order we need to push from the prev keyframe
* to the next keyframe. If there is a smart decoder downstream he will notice
- * that there are too many encoded frames send and return UNEXPECTED when there
+ * that there are too many encoded frames send and return EOS when there
* are enough decoded frames to fill the segment. */
next_key = gst_avi_demux_index_next (avi, stream, index, TRUE);
guint i, index;
GstAviStream *stream;
- seek_time = segment->last_stop;
- keyframe = !!(segment->flags & GST_SEEK_FLAG_KEY_UNIT);
+ seek_time = segment->position;
+ keyframe = ! !(segment->flags & GST_SEEK_FLAG_KEY_UNIT);
GST_DEBUG_OBJECT (avi, "seek to: %" GST_TIME_FORMAT
" keyframe seeking:%d", GST_TIME_ARGS (seek_time), keyframe);
GST_TIME_ARGS (seek_time));
}
- /* the seek time is also the last_stop and stream time when going
+ /* the seek time is also the position and stream time when going
* forwards */
- segment->last_stop = seek_time;
+ segment->position = seek_time;
if (segment->rate > 0.0)
segment->time = seek_time;
/* we have to have a format as the segment format. Try to convert
* if not. */
if (format != GST_FORMAT_TIME) {
- GstFormat fmt = GST_FORMAT_TIME;
gboolean res = TRUE;
if (cur_type != GST_SEEK_TYPE_NONE)
- res = gst_pad_query_convert (pad, format, cur, &fmt, &cur);
+ res = gst_pad_query_convert (pad, format, cur, GST_FORMAT_TIME, &cur);
if (res && stop_type != GST_SEEK_TYPE_NONE)
- res = gst_pad_query_convert (pad, format, stop, &fmt, &stop);
+ res = gst_pad_query_convert (pad, format, stop, GST_FORMAT_TIME, &stop);
if (!res)
goto no_format;
- format = fmt;
+ format = GST_FORMAT_TIME;
}
GST_DEBUG_OBJECT (avi,
"seek requested: rate %g cur %" GST_TIME_FORMAT " stop %"
if (event) {
GST_DEBUG_OBJECT (avi, "configuring seek");
- gst_segment_set_seek (&seeksegment, rate, format, flags,
+ gst_segment_do_seek (&seeksegment, rate, format, flags,
cur_type, cur, stop_type, stop, &update);
}
- /* do the seek, seeksegment.last_stop contains the new position, this
+ /* do the seek, seeksegment.position contains the new position, this
* actually never fails. */
gst_avi_demux_do_seek (avi, &seeksegment);
- gst_event_replace (&avi->close_seg_event, NULL);
if (flush) {
- GstEvent *fevent = gst_event_new_flush_stop ();
+ GstEvent *fevent = gst_event_new_flush_stop (TRUE);
GST_DEBUG_OBJECT (avi, "sending flush stop");
gst_avi_demux_push_event (avi, gst_event_ref (fevent));
gst_pad_push_event (avi->sinkpad, fevent);
- } else if (avi->segment_running) {
- /* we are running the current segment and doing a non-flushing seek,
- * close the segment first based on the last_stop. */
- GST_DEBUG_OBJECT (avi, "closing running segment %" G_GINT64_FORMAT
- " to %" G_GINT64_FORMAT, avi->segment.start, avi->segment.last_stop);
- avi->close_seg_event = gst_event_new_new_segment_full (TRUE,
- avi->segment.rate, avi->segment.applied_rate, avi->segment.format,
- avi->segment.start, avi->segment.last_stop, avi->segment.time);
}
/* now update the real segment info */
if (avi->segment.flags & GST_SEEK_FLAG_SEGMENT) {
gst_element_post_message (GST_ELEMENT_CAST (avi),
gst_message_new_segment_start (GST_OBJECT_CAST (avi),
- avi->segment.format, avi->segment.last_stop));
+ avi->segment.format, avi->segment.position));
}
- /* prepare for streaming again */
- if ((stop = avi->segment.stop) == GST_CLOCK_TIME_NONE)
- stop = avi->segment.duration;
-
/* queue the segment event for the streaming thread. */
if (avi->seg_event)
gst_event_unref (avi->seg_event);
- if (avi->segment.rate > 0.0) {
- /* forwards goes from last_stop to stop */
- avi->seg_event = gst_event_new_new_segment_full (FALSE,
- avi->segment.rate, avi->segment.applied_rate, avi->segment.format,
- avi->segment.last_stop, stop, avi->segment.time);
- } else {
- /* reverse goes from start to last_stop */
- avi->seg_event = gst_event_new_new_segment_full (FALSE,
- avi->segment.rate, avi->segment.applied_rate, avi->segment.format,
- avi->segment.start, avi->segment.last_stop, avi->segment.time);
- }
+ avi->seg_event = gst_event_new_segment (&avi->segment);
if (!avi->streaming) {
- avi->segment_running = TRUE;
gst_pad_start_task (avi->sinkpad, (GstTaskFunction) gst_avi_demux_loop,
avi->sinkpad);
}
&cur_type, &cur, &stop_type, &stop);
if (format != GST_FORMAT_TIME) {
- GstFormat fmt = GST_FORMAT_TIME;
gboolean res = TRUE;
if (cur_type != GST_SEEK_TYPE_NONE)
- res = gst_pad_query_convert (pad, format, cur, &fmt, &cur);
+ res = gst_pad_query_convert (pad, format, cur, GST_FORMAT_TIME, &cur);
if (res && stop_type != GST_SEEK_TYPE_NONE)
- res = gst_pad_query_convert (pad, format, stop, &fmt, &stop);
+ res = gst_pad_query_convert (pad, format, stop, GST_FORMAT_TIME, &stop);
if (!res) {
GST_DEBUG_OBJECT (avi, "unsupported format given, seek aborted.");
return FALSE;
}
- format = fmt;
+ format = GST_FORMAT_TIME;
}
/* let gst_segment handle any tricky stuff */
GST_DEBUG_OBJECT (avi, "configuring seek");
memcpy (&seeksegment, &avi->segment, sizeof (GstSegment));
- gst_segment_set_seek (&seeksegment, rate, format, flags,
+ gst_segment_do_seek (&seeksegment, rate, format, flags,
cur_type, cur, stop_type, stop, &update);
- keyframe = !!(flags & GST_SEEK_FLAG_KEY_UNIT);
- cur = seeksegment.last_stop;
+ keyframe = ! !(flags & GST_SEEK_FLAG_KEY_UNIT);
+ cur = seeksegment.position;
GST_DEBUG_OBJECT (avi,
"Seek requested: ts %" GST_TIME_FORMAT " stop %" GST_TIME_FORMAT
gint y, w, h;
gint bpp, stride;
guint8 *tmp = NULL;
+ GstMapInfo map;
+ GstCaps *caps;
if (stream->strh->type != GST_RIFF_FCC_vids)
return buf;
return buf; /* Ignore non DIB buffers */
}
- s = gst_caps_get_structure (GST_PAD_CAPS (stream->pad), 0);
+ caps = gst_pad_get_current_caps (stream->pad);
+ s = gst_caps_get_structure (caps, 0);
+ gst_caps_unref (caps);
+
if (!gst_structure_get_int (s, "bpp", &bpp)) {
GST_WARNING ("Failed to retrieve depth from caps");
return buf;
stride = GST_ROUND_UP_4 (w * (bpp / 8));
buf = gst_buffer_make_writable (buf);
- if (GST_BUFFER_SIZE (buf) < (stride * h)) {
+
+ gst_buffer_map (buf, &map, GST_MAP_READWRITE);
+ if (map.size < (stride * h)) {
GST_WARNING ("Buffer is smaller than reported Width x Height x Depth");
+ gst_buffer_unmap (buf, &map);
return buf;
}
tmp = g_malloc (stride);
for (y = 0; y < h / 2; y++) {
- swap_line (GST_BUFFER_DATA (buf) + stride * y,
- GST_BUFFER_DATA (buf) + stride * (h - 1 - y), tmp, stride);
+ swap_line (map.data + stride * y, map.data + stride * (h - 1 - y), tmp,
+ stride);
}
g_free (tmp);
+ gst_buffer_unmap (buf, &map);
+
return buf;
}
+#if 0
static void
gst_avi_demux_add_assoc (GstAviDemux * avi, GstAviStream * stream,
GstClockTime timestamp, guint64 offset, gboolean keyframe)
NULL);
}
}
+#endif
/*
* Returns the aggregated GstFlowReturn.
stream->last_flow = ret;
/* any other error that is not-linked or eos can be returned right away */
- if (G_LIKELY (ret != GST_FLOW_UNEXPECTED && ret != GST_FLOW_NOT_LINKED))
+ if (G_LIKELY (ret != GST_FLOW_EOS && ret != GST_FLOW_NOT_LINKED))
goto done;
/* only return NOT_LINKED if all other pads returned NOT_LINKED */
ret = ostream->last_flow;
/* no unexpected or unlinked, return */
- if (G_LIKELY (ret != GST_FLOW_UNEXPECTED && ret != GST_FLOW_NOT_LINKED))
+ if (G_LIKELY (ret != GST_FLOW_EOS && ret != GST_FLOW_NOT_LINKED))
goto done;
/* we check to see if we have at least 1 unexpected or all unlinked */
- unexpected |= (ret == GST_FLOW_UNEXPECTED);
+ unexpected |= (ret == GST_FLOW_EOS);
not_linked &= (ret == GST_FLOW_NOT_LINKED);
}
/* when we get here, we all have unlinked or unexpected */
if (not_linked)
ret = GST_FLOW_NOT_LINKED;
else if (unexpected)
- ret = GST_FLOW_UNEXPECTED;
+ ret = GST_FLOW_EOS;
done:
GST_LOG_OBJECT (avi, "combined %s to return %s",
gst_flow_get_name (stream->last_flow), gst_flow_get_name (ret));
GST_DEBUG_OBJECT (avi, "we are EOS");
/* setting current_timestamp to -1 marks EOS */
stream->current_timestamp = -1;
- return GST_FLOW_UNEXPECTED;
+ return GST_FLOW_EOS;
}
}
stream = &avi->stream[i];
/* ignore streams that finished */
- if (stream->last_flow == GST_FLOW_UNEXPECTED)
+ if (stream->last_flow == GST_FLOW_EOS)
continue;
position = stream->current_timestamp;
goto pull_failed;
/* check for short buffers, this is EOS as well */
- if (GST_BUFFER_SIZE (buf) < size)
+ if (gst_buffer_get_size (buf) < size)
goto short_buffer;
/* invert the picture if needed */
GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DISCONT);
stream->discont = FALSE;
}
-
+#if 0
gst_avi_demux_add_assoc (avi, stream, timestamp, offset, keyframe);
-
- gst_buffer_set_caps (buf, GST_PAD_CAPS (stream->pad));
+#endif
/* update current position in the segment */
- gst_segment_set_last_stop (&avi->segment, GST_FORMAT_TIME, timestamp);
+ avi->segment.position = timestamp;
- GST_DEBUG_OBJECT (avi, "Pushing buffer of size %u, ts %"
+ GST_DEBUG_OBJECT (avi, "Pushing buffer of size %" G_GSIZE_FORMAT ", ts %"
GST_TIME_FORMAT ", dur %" GST_TIME_FORMAT ", off %" G_GUINT64_FORMAT
", off_end %" G_GUINT64_FORMAT,
- GST_BUFFER_SIZE (buf), GST_TIME_ARGS (timestamp),
+ gst_buffer_get_size (buf), GST_TIME_ARGS (timestamp),
GST_TIME_ARGS (duration), out_offset, out_offset_end);
ret = gst_pad_push (stream->pad, buf);
processed = TRUE;
if (avi->segment.rate < 0) {
- if (timestamp > avi->segment.stop && ret == GST_FLOW_UNEXPECTED) {
- /* In reverse playback we can get a GST_FLOW_UNEXPECTED when
+ if (timestamp > avi->segment.stop && ret == GST_FLOW_EOS) {
+ /* In reverse playback we can get a GST_FLOW_EOS when
* we are at the end of the segment, so we just need to jump
* back to the previous section. */
GST_DEBUG_OBJECT (avi, "downstream has reached end of segment");
eos:
{
GST_DEBUG_OBJECT (avi, "No samples left for any streams - EOS");
- ret = GST_FLOW_UNEXPECTED;
+ ret = GST_FLOW_EOS;
goto beach;
}
eos_stop:
GST_LOG_OBJECT (avi, "Found keyframe after segment,"
" setting EOS (%" GST_TIME_FORMAT " > %" GST_TIME_FORMAT ")",
GST_TIME_ARGS (timestamp), GST_TIME_ARGS (avi->segment.stop));
- ret = GST_FLOW_UNEXPECTED;
+ ret = GST_FLOW_EOS;
/* move to next stream */
goto next;
}
short_buffer:
{
GST_WARNING_OBJECT (avi, "Short read at offset %" G_GUINT64_FORMAT
- ", only got %d/%" G_GUINT64_FORMAT " bytes (truncated file?)", offset,
- GST_BUFFER_SIZE (buf), size);
+ ", only got %" G_GSIZE_FORMAT "/%" G_GUINT64_FORMAT
+ " bytes (truncated file?)", offset, gst_buffer_get_size (buf), size);
gst_buffer_unref (buf);
- ret = GST_FLOW_UNEXPECTED;
+ ret = GST_FLOW_EOS;
goto beach;
}
}
guint32 size = 0;
gint stream_nr = 0;
GstFlowReturn res = GST_FLOW_OK;
- GstFormat format = GST_FORMAT_TIME;
if (G_UNLIKELY (avi->have_eos)) {
/* Clean adapter, we're done */
gst_adapter_clear (avi->adapter);
- return GST_FLOW_UNEXPECTED;
+ return GST_FLOW_EOS;
}
if (G_UNLIKELY (avi->todrop)) {
} else {
GST_DEBUG ("No more stream chunks, send EOS");
avi->have_eos = TRUE;
- return GST_FLOW_UNEXPECTED;
+ return GST_FLOW_EOS;
}
if (G_UNLIKELY (!gst_avi_demux_peek_chunk (avi, &tag, &size))) {
GstAviStream *stream;
GstClockTime next_ts = 0;
GstBuffer *buf = NULL;
+#if 0
guint64 offset;
+#endif
gboolean saw_desired_kf = stream_nr != avi->main_stream
|| avi->offset >= avi->seek_kf_offset;
if (size) {
buf = gst_adapter_take_buffer (avi->adapter, GST_ROUND_UP_2 (size));
/* patch the size */
- GST_BUFFER_SIZE (buf) = size;
+ gst_buffer_resize (buf, 0, size);
} else {
buf = NULL;
}
gst_adapter_flush (avi->adapter, 8 + GST_ROUND_UP_2 (size));
}
+#if 0
offset = avi->offset;
+#endif
avi->offset += 8 + GST_ROUND_UP_2 (size);
stream = &avi->stream[stream_nr];
gst_buffer_unref (buf);
} else {
/* get time of this buffer */
- gst_pad_query_position (stream->pad, &format, (gint64 *) & next_ts);
- if (G_UNLIKELY (format != GST_FORMAT_TIME))
- goto wrong_format;
+ gst_pad_query_position (stream->pad, GST_FORMAT_TIME,
+ (gint64 *) & next_ts);
+#if 0
gst_avi_demux_add_assoc (avi, stream, next_ts, offset, FALSE);
+#endif
/* increment our positions */
stream->current_entry++;
stream->current_total += size;
/* update current position in the segment */
- gst_segment_set_last_stop (&avi->segment, GST_FORMAT_TIME, next_ts);
+ avi->segment.position = next_ts;
if (saw_desired_kf && buf) {
GstClockTime dur_ts = 0;
/* invert the picture if needed */
buf = gst_avi_demux_invert (stream, buf);
- gst_pad_query_position (stream->pad, &format, (gint64 *) & dur_ts);
- if (G_UNLIKELY (format != GST_FORMAT_TIME))
- goto wrong_format;
+ gst_pad_query_position (stream->pad, GST_FORMAT_TIME,
+ (gint64 *) & dur_ts);
GST_BUFFER_TIMESTAMP (buf) = next_ts;
GST_BUFFER_DURATION (buf) = dur_ts - next_ts;
GST_BUFFER_OFFSET_END (buf) = GST_BUFFER_OFFSET_NONE;
}
- gst_buffer_set_caps (buf, GST_PAD_CAPS (stream->pad));
GST_DEBUG_OBJECT (avi,
"Pushing buffer with time=%" GST_TIME_FORMAT ", duration %"
GST_TIME_FORMAT ", offset %" G_GUINT64_FORMAT
}
}
-done:
return res;
-
- /* ERRORS */
-wrong_format:
- {
- GST_DEBUG_OBJECT (avi, "format %s != GST_FORMAT_TIME",
- gst_format_get_name (format));
- res = GST_FLOW_ERROR;
- goto done;
- }
}
/*
if (pad && tags) {
GST_DEBUG_OBJECT (pad, "Tags: %" GST_PTR_FORMAT, tags);
- gst_element_found_tags_for_pad (GST_ELEMENT_CAST (avi), pad, tags);
+ gst_pad_push_event (pad, gst_event_new_tag (tags));
stream->taglist = NULL;
}
}
if (!(tags = avi->globaltags))
- tags = gst_tag_list_new ();
+ tags = gst_tag_list_new_empty ();
gst_tag_list_add (tags, GST_TAG_MERGE_REPLACE,
GST_TAG_CONTAINER_FORMAT, "AVI", NULL);
GST_DEBUG_OBJECT (avi, "Global tags: %" GST_PTR_FORMAT, tags);
- gst_element_found_tags (GST_ELEMENT_CAST (avi), tags);
+ gst_avi_demux_push_event (avi, gst_event_new_tag (tags));
avi->globaltags = NULL;
avi->got_tags = FALSE;
}
avi->state = GST_AVI_DEMUX_MOVI;
break;
case GST_AVI_DEMUX_MOVI:
- if (G_UNLIKELY (avi->close_seg_event)) {
- gst_avi_demux_push_event (avi, avi->close_seg_event);
- avi->close_seg_event = NULL;
- }
if (G_UNLIKELY (avi->seg_event)) {
gst_avi_demux_push_event (avi, avi->seg_event);
avi->seg_event = NULL;
gboolean push_eos = FALSE;
GST_LOG_OBJECT (avi, "pausing task, reason %s", gst_flow_get_name (res));
- avi->segment_running = FALSE;
gst_pad_pause_task (avi->sinkpad);
-
- if (res == GST_FLOW_UNEXPECTED) {
+ if (res == GST_FLOW_EOS) {
/* handle end-of-stream/segment */
+ /* so align our position with the end of it, if there is one
+ * this ensures a subsequent will arrive at correct base/acc time */
+ if (avi->segment.rate > 0.0 &&
+ GST_CLOCK_TIME_IS_VALID (avi->segment.stop))
+ avi->segment.position = avi->segment.stop;
+ else if (avi->segment.rate < 0.0)
+ avi->segment.position = avi->segment.start;
if (avi->segment.flags & GST_SEEK_FLAG_SEGMENT) {
gint64 stop;
} else {
push_eos = TRUE;
}
- } else if (res == GST_FLOW_NOT_LINKED || res < GST_FLOW_UNEXPECTED) {
+ } else if (res == GST_FLOW_NOT_LINKED || res < GST_FLOW_EOS) {
/* for fatal errors we post an error message, wrong-state is
* not fatal because it happens due to flushes and only means
* that we should stop now. */
if (push_eos) {
GST_INFO_OBJECT (avi, "sending eos");
if (!gst_avi_demux_push_event (avi, gst_event_new_eos ()) &&
- (res == GST_FLOW_UNEXPECTED)) {
+ (res == GST_FLOW_EOS)) {
GST_ELEMENT_ERROR (avi, STREAM, DEMUX,
(NULL), ("got eos but no streams (yet)"));
}
static GstFlowReturn
-gst_avi_demux_chain (GstPad * pad, GstBuffer * buf)
+gst_avi_demux_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
{
GstFlowReturn res;
- GstAviDemux *avi = GST_AVI_DEMUX (GST_PAD_PARENT (pad));
+ GstAviDemux *avi = GST_AVI_DEMUX (parent);
gint i;
if (GST_BUFFER_IS_DISCONT (buf)) {
avi->stream[i].discont = TRUE;
}
- GST_DEBUG ("Store %d bytes in adapter", GST_BUFFER_SIZE (buf));
+ GST_DEBUG ("Store %" G_GSIZE_FORMAT " bytes in adapter",
+ gst_buffer_get_size (buf));
gst_adapter_push (avi->adapter, buf);
switch (avi->state) {
}
break;
case GST_AVI_DEMUX_MOVI:
- if (G_UNLIKELY (avi->close_seg_event)) {
- gst_avi_demux_push_event (avi, avi->close_seg_event);
- avi->close_seg_event = NULL;
- }
if (G_UNLIKELY (avi->seg_event)) {
gst_avi_demux_push_event (avi, avi->seg_event);
avi->seg_event = NULL;
}
static gboolean
-gst_avi_demux_sink_activate (GstPad * sinkpad)
+gst_avi_demux_sink_activate (GstPad * sinkpad, GstObject * parent)
{
- if (gst_pad_check_pull_range (sinkpad)) {
- GST_DEBUG ("going to pull mode");
- return gst_pad_activate_pull (sinkpad, TRUE);
- } else {
- GST_DEBUG ("going to push (streaming) mode");
- return gst_pad_activate_push (sinkpad, TRUE);
+ GstQuery *query;
+ gboolean pull_mode;
+
+ query = gst_query_new_scheduling ();
+
+ if (!gst_pad_peer_query (sinkpad, query)) {
+ gst_query_unref (query);
+ goto activate_push;
}
-}
-static gboolean
-gst_avi_demux_sink_activate_pull (GstPad * sinkpad, gboolean active)
-{
- GstAviDemux *avi = GST_AVI_DEMUX (GST_OBJECT_PARENT (sinkpad));
+ pull_mode = gst_query_has_scheduling_mode (query, GST_PAD_MODE_PULL);
+ gst_query_unref (query);
- if (active) {
- avi->segment_running = TRUE;
- avi->streaming = FALSE;
- return gst_pad_start_task (sinkpad, (GstTaskFunction) gst_avi_demux_loop,
- sinkpad);
- } else {
- avi->segment_running = FALSE;
- return gst_pad_stop_task (sinkpad);
+ if (!pull_mode)
+ goto activate_push;
+
+ GST_DEBUG_OBJECT (sinkpad, "activating pull");
+ return gst_pad_activate_mode (sinkpad, GST_PAD_MODE_PULL, TRUE);
+
+activate_push:
+ {
+ GST_DEBUG_OBJECT (sinkpad, "activating push");
+ return gst_pad_activate_mode (sinkpad, GST_PAD_MODE_PUSH, TRUE);
}
}
static gboolean
-gst_avi_demux_activate_push (GstPad * pad, gboolean active)
+gst_avi_demux_sink_activate_mode (GstPad * sinkpad, GstObject * parent,
+ GstPadMode mode, gboolean active)
{
- GstAviDemux *avi = GST_AVI_DEMUX (GST_OBJECT_PARENT (pad));
-
- if (active) {
- GST_DEBUG ("avi: activating push/chain function");
- avi->streaming = TRUE;
-#if 0
- /* create index for some push based seeking if not provided */
- GST_OBJECT_LOCK (avi);
- if (!avi->element_index) {
- GST_DEBUG_OBJECT (avi, "creating index");
- avi->element_index = gst_index_factory_make ("memindex");
- }
- GST_OBJECT_UNLOCK (avi);
- /* object lock might be taken again */
- gst_index_get_writer_id (avi->element_index, GST_OBJECT_CAST (avi),
- &avi->index_id);
-#endif
- } else {
- GST_DEBUG ("avi: deactivating push/chain function");
+ gboolean res;
+ GstAviDemux *avi = GST_AVI_DEMUX (parent);
+
+ switch (mode) {
+ case GST_PAD_MODE_PULL:
+ if (active) {
+ avi->streaming = FALSE;
+ res = gst_pad_start_task (sinkpad, (GstTaskFunction) gst_avi_demux_loop,
+ sinkpad);
+ } else {
+ res = gst_pad_stop_task (sinkpad);
+ }
+ break;
+ case GST_PAD_MODE_PUSH:
+ if (active) {
+ GST_DEBUG ("avi: activating push/chain function");
+ avi->streaming = TRUE;
+ } else {
+ GST_DEBUG ("avi: deactivating push/chain function");
+ }
+ res = TRUE;
+ break;
+ default:
+ res = FALSE;
+ break;
}
-
- return TRUE;
+ return res;
}
+#if 0
static void
gst_avi_demux_set_index (GstElement * element, GstIndex * index)
{
return result;
}
+#endif
static GstStateChangeReturn
gst_avi_demux_change_state (GstElement * element, GstStateChange transition)
/* segment in TIME */
GstSegment segment;
- gboolean segment_running;
/* pending tags/events */
GstEvent *seg_event;
- GstEvent *close_seg_event;
GstTagList *globaltags;
gboolean got_tags;
+#if 0
/* gst index support */
GstIndex *element_index;
gint index_id;
+#endif
+
gboolean seekable;
guint64 first_movi_offset;
* <para>(write everything in one line, without the backslash characters)</para>
* |[
* gst-launch videotestsrc num-buffers=250 \
- * ! 'video/x-raw-yuv,format=(fourcc)I420,width=320,height=240,framerate=(fraction)25/1' \
+ * ! 'video/x-raw,format=(string)I420,width=320,height=240,framerate=(fraction)25/1' \
* ! queue ! mux. \
* audiotestsrc num-buffers=440 ! audioconvert \
- * ! 'audio/x-raw-int,rate=44100,channels=2' ! queue ! mux. \
+ * ! 'audio/x-raw,rate=44100,channels=2' ! queue ! mux. \
* avimux name=mux ! filesink location=test.avi
* ]| This will create an .AVI file containing an uncompressed video stream
* with a test picture and an uncompressed audio stream containing a
* test sound.
* |[
* gst-launch videotestsrc num-buffers=250 \
- * ! 'video/x-raw-yuv,format=(fourcc)I420,width=320,height=240,framerate=(fraction)25/1' \
+ * ! 'video/x-raw,format=(string)I420,width=320,height=240,framerate=(fraction)25/1' \
* ! xvidenc ! queue ! mux. \
- * audiotestsrc num-buffers=440 ! audioconvert ! 'audio/x-raw-int,rate=44100,channels=2' \
+ * audiotestsrc num-buffers=440 ! audioconvert ! 'audio/x-raw,rate=44100,channels=2' \
* ! lame ! queue ! mux. \
* avimux name=mux ! filesink location=test.avi
* ]| This will create an .AVI file containing the same test video and sound
#include <string.h>
#include <gst/video/video.h>
+#include <gst/audio/audio.h>
#include <gst/base/gstbytewriter.h>
#include "gstavimux.h"
);
static GstStaticPadTemplate video_sink_factory =
- GST_STATIC_PAD_TEMPLATE ("video_%d",
+ GST_STATIC_PAD_TEMPLATE ("video_%u",
GST_PAD_SINK,
GST_PAD_REQUEST,
- GST_STATIC_CAPS ("video/x-raw-yuv, "
- "format = (fourcc) { YUY2, I420 }, "
+ GST_STATIC_CAPS ("video/x-raw, "
+ "format = (string) { YUY2, I420 }, "
"width = (int) [ 16, 4096 ], "
"height = (int) [ 16, 4096 ], "
"framerate = (fraction) [ 0, MAX ]; "
);
static GstStaticPadTemplate audio_sink_factory =
- GST_STATIC_PAD_TEMPLATE ("audio_%d",
+ GST_STATIC_PAD_TEMPLATE ("audio_%u",
GST_PAD_SINK,
GST_PAD_REQUEST,
- GST_STATIC_CAPS ("audio/x-raw-int, "
- "endianness = (int) LITTLE_ENDIAN, "
- "signed = (boolean) { TRUE, FALSE }, "
- "width = (int) { 8, 16 }, "
- "depth = (int) { 8, 16 }, "
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "format = (string) { U8, S16LE }, "
"rate = (int) [ 1000, 96000 ], "
"channels = (int) [ 1, 2 ]; "
"audio/mpeg, "
"wmaversion = (int) [ 1, 2 ] ")
);
-static void gst_avi_mux_base_init (gpointer g_class);
-static void gst_avi_mux_class_init (GstAviMuxClass * klass);
-static void gst_avi_mux_init (GstAviMux * avimux);
static void gst_avi_mux_pad_reset (GstAviPad * avipad, gboolean free);
static GstFlowReturn gst_avi_mux_collect_pads (GstCollectPads2 * pads,
static gboolean gst_avi_mux_handle_event (GstCollectPads2 * pad,
GstCollectData2 * data, GstEvent * event, gpointer user_data);
static GstPad *gst_avi_mux_request_new_pad (GstElement * element,
- GstPadTemplate * templ, const gchar * name);
+ GstPadTemplate * templ, const gchar * name, const GstCaps * caps);
static void gst_avi_mux_release_pad (GstElement * element, GstPad * pad);
static void gst_avi_mux_set_property (GObject * object,
guint prop_id, const GValue * value, GParamSpec * pspec);
static GstStateChangeReturn gst_avi_mux_change_state (GstElement * element,
GstStateChange transition);
-static GstElementClass *parent_class = NULL;
-
-GType
-gst_avi_mux_get_type (void)
-{
- static GType avimux_type = 0;
-
- if (!avimux_type) {
- static const GTypeInfo avimux_info = {
- sizeof (GstAviMuxClass),
- gst_avi_mux_base_init,
- NULL,
- (GClassInitFunc) gst_avi_mux_class_init,
- NULL,
- NULL,
- sizeof (GstAviMux),
- 0,
- (GInstanceInitFunc) gst_avi_mux_init,
- };
- static const GInterfaceInfo tag_setter_info = {
- NULL,
- NULL,
- NULL
- };
-
- avimux_type =
- g_type_register_static (GST_TYPE_ELEMENT, "GstAviMux", &avimux_info, 0);
- g_type_add_interface_static (avimux_type, GST_TYPE_TAG_SETTER,
- &tag_setter_info);
- }
- return avimux_type;
-}
-
-static void
-gst_avi_mux_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_static_pad_template (element_class, &src_factory);
- gst_element_class_add_static_pad_template (element_class,
- &audio_sink_factory);
- gst_element_class_add_static_pad_template (element_class,
- &video_sink_factory);
-
- gst_element_class_set_details_simple (element_class, "Avi muxer",
- "Codec/Muxer",
- "Muxes audio and video into an avi stream",
- "GStreamer maintainers <gstreamer-devel@lists.sourceforge.net>");
-
- GST_DEBUG_CATEGORY_INIT (avimux_debug, "avimux", 0, "Muxer for AVI streams");
-}
+#define gst_avi_mux_parent_class parent_class
+G_DEFINE_TYPE_WITH_CODE (GstAviMux, gst_avi_mux, GST_TYPE_ELEMENT,
+ G_IMPLEMENT_INTERFACE (GST_TYPE_TAG_SETTER, NULL));
static void
gst_avi_mux_finalize (GObject * object)
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
- parent_class = g_type_class_peek_parent (klass);
+ GST_DEBUG_CATEGORY_INIT (avimux_debug, "avimux", 0, "Muxer for AVI streams");
gobject_class->get_property = gst_avi_mux_get_property;
gobject_class->set_property = gst_avi_mux_set_property;
GST_DEBUG_FUNCPTR (gst_avi_mux_request_new_pad);
gstelement_class->release_pad = GST_DEBUG_FUNCPTR (gst_avi_mux_release_pad);
gstelement_class->change_state = GST_DEBUG_FUNCPTR (gst_avi_mux_change_state);
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&src_factory));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&audio_sink_factory));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&video_sink_factory));
+
+ gst_element_class_set_details_simple (gstelement_class, "Avi muxer",
+ "Codec/Muxer",
+ "Muxes audio and video into an avi stream",
+ "GStreamer maintainers <gstreamer-devel@lists.sourceforge.net>");
}
/* reset pad to initial state
avipad->vprp.field_info[0].valid_bm_width = width;
}
- if (!strcmp (mimetype, "video/x-raw-yuv")) {
- guint32 format;
+ if (!strcmp (mimetype, "video/x-raw")) {
+ const gchar *format;
+ GstVideoFormat fmt;
+
+ format = gst_structure_get_string (structure, "format");
+ fmt = gst_video_format_from_string (format);
- gst_structure_get_fourcc (structure, "format", &format);
- avipad->vids.compression = format;
- switch (format) {
- case GST_MAKE_FOURCC ('Y', 'U', 'Y', '2'):
+ switch (fmt) {
+ case GST_VIDEO_FORMAT_YUY2:
+ avipad->vids.compression = GST_MAKE_FOURCC ('Y', 'U', 'Y', '2');
avipad->vids.bit_cnt = 16;
break;
- case GST_MAKE_FOURCC ('I', '4', '2', '0'):
+ case GST_VIDEO_FORMAT_I420:
+ avipad->vids.compression = GST_MAKE_FOURCC ('I', '4', '2', '0');
avipad->vids.bit_cnt = 12;
break;
+ default:
+ break;
}
} else {
avipad->vids.bit_cnt = 24;
avipad->vids_codec_data = gst_value_get_buffer (codec_data);
gst_buffer_ref (avipad->vids_codec_data);
/* keep global track of size */
- avimux->codec_data_size += GST_BUFFER_SIZE (avipad->vids_codec_data);
+ avimux->codec_data_size += gst_buffer_get_size (avipad->vids_codec_data);
} else {
avipad->prepend_buffer =
gst_buffer_ref (gst_value_get_buffer (codec_data));
gst_avi_mux_audsink_scan_mpeg_audio (GstAviMux * avimux, GstAviPad * avipad,
GstBuffer * buffer)
{
- guint8 *data;
- guint size;
+ GstMapInfo map;
guint spf;
guint32 header;
gulong layer;
gulong version;
gint lsf, mpg25;
- data = GST_BUFFER_DATA (buffer);
- size = GST_BUFFER_SIZE (buffer);
-
- if (size < 4)
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ if (map.size < 4)
goto not_parsed;
- header = GST_READ_UINT32_BE (data);
+ header = GST_READ_UINT32_BE (map.data);
if ((header & 0xffe00000) != 0xffe00000)
goto not_parsed;
GST_WARNING_OBJECT (avimux, "input mpeg audio has varying frame size");
goto cbr_fallback;
}
+done:
+ gst_buffer_unmap (buffer, &map);
return GST_FLOW_OK;
avipad->hdr.scale = 1;
/* no need to check further */
avipad->hook = NULL;
- return GST_FLOW_OK;
+ goto done;
}
}
avipad->auds_codec_data = gst_value_get_buffer (codec_data);
gst_buffer_ref (avipad->auds_codec_data);
/* keep global track of size */
- avimux->codec_data_size += GST_BUFFER_SIZE (avipad->auds_codec_data);
+ avimux->codec_data_size += gst_buffer_get_size (avipad->auds_codec_data);
}
- if (!strcmp (mimetype, "audio/x-raw-int")) {
- gint width, depth;
- gboolean signedness;
-
- avipad->auds.format = GST_RIFF_WAVE_FORMAT_PCM;
-
- if (!gst_structure_get_int (structure, "width", &width) ||
- !gst_structure_get_int (structure, "depth", &depth) ||
- !gst_structure_get_boolean (structure, "signed", &signedness)) {
- GST_DEBUG_OBJECT (avimux,
- "broken caps, width/depth/signed field missing");
- goto refuse_caps;
- }
+ if (!strcmp (mimetype, "audio/x-raw")) {
+ const gchar *format;
+ GstAudioFormat fmt;
- /* no clear place to put different values for these while keeping to spec */
- if (width != depth) {
- GST_DEBUG_OBJECT (avimux, "width must be same as depth!");
- goto refuse_caps;
- }
+ format = gst_structure_get_string (structure, "format");
+ fmt = gst_audio_format_from_string (format);
- /* because that's the way the caps will be recreated from riff data */
- if ((width == 8 && signedness) || (width == 16 && !signedness)) {
- GST_DEBUG_OBJECT (avimux,
- "8-bit PCM must be unsigned, 16-bit PCM signed");
- goto refuse_caps;
+ switch (fmt) {
+ case GST_AUDIO_FORMAT_U8:
+ avipad->auds.blockalign = 8;
+ avipad->auds.size = 8;
+ break;
+ case GST_AUDIO_FORMAT_S16:
+ avipad->auds.blockalign = 16;
+ avipad->auds.size = 16;
+ break;
+ default:
+ goto refuse_caps;
}
- avipad->auds.blockalign = width;
- avipad->auds.size = (width == 8) ? 8 : depth;
-
+ avipad->auds.format = GST_RIFF_WAVE_FORMAT_PCM;
/* set some more info straight */
avipad->auds.blockalign /= 8;
avipad->auds.blockalign *= avipad->auds.channels;
GstBuffer *codec_data_buf = avipad->auds_codec_data;
const gchar *stream_format;
guint codec;
+ guint8 data[2];
stream_format = gst_structure_get_string (structure, "stream-format");
if (stream_format) {
}
/* vbr case needs some special handling */
- if (!codec_data_buf || GST_BUFFER_SIZE (codec_data_buf) < 2) {
+ if (!codec_data_buf || gst_buffer_get_size (codec_data_buf) < 2) {
GST_WARNING_OBJECT (avimux, "no (valid) codec_data for AAC audio");
break;
}
avipad->auds.format = GST_RIFF_WAVE_FORMAT_AAC;
/* need to determine frame length */
- codec = GST_READ_UINT16_BE (GST_BUFFER_DATA (codec_data_buf));
+ gst_buffer_extract (codec_data_buf, 0, data, 2);
+ codec = GST_READ_UINT16_BE (data);
avipad->parent.hdr.scale = (codec & 0x4) ? 960 : 1024;
break;
}
static GstPad *
gst_avi_mux_request_new_pad (GstElement * element,
- GstPadTemplate * templ, const gchar * req_name)
+ GstPadTemplate * templ, const gchar * req_name, const GstCaps * caps)
{
GstAviMux *avimux;
GstPad *newpad;
GstElementClass *klass;
gchar *name = NULL;
const gchar *pad_name = NULL;
- GstPadSetCapsFunction setcapsfunc = NULL;
gint pad_id;
g_return_val_if_fail (templ != NULL, NULL);
klass = GST_ELEMENT_GET_CLASS (element);
- /* FIXME-0.11: use %d instead of %02d for pad_names */
-
- if (templ == gst_element_class_get_pad_template (klass, "audio_%d")) {
+ if (templ == gst_element_class_get_pad_template (klass, "audio_%u")) {
/* don't mix named and unnamed pads, if the pad already exists we fail when
* trying to add it */
- if (req_name != NULL && sscanf (req_name, "audio_%02d", &pad_id) == 1) {
+ if (req_name != NULL && sscanf (req_name, "audio_%u", &pad_id) == 1) {
pad_name = req_name;
} else {
- name = g_strdup_printf ("audio_%02d", avimux->audio_pads++);
+ name = g_strdup_printf ("audio_%u", avimux->audio_pads++);
pad_name = name;
}
- setcapsfunc = GST_DEBUG_FUNCPTR (gst_avi_mux_audsink_set_caps);
/* init pad specific data */
avipad = g_malloc0 (sizeof (GstAviAudioPad));
avipad->hdr.type = GST_MAKE_FOURCC ('a', 'u', 'd', 's');
/* audio goes last */
avimux->sinkpads = g_slist_append (avimux->sinkpads, avipad);
- } else if (templ == gst_element_class_get_pad_template (klass, "video_%d")) {
+ } else if (templ == gst_element_class_get_pad_template (klass, "video_%u")) {
/* though streams are pretty generic and relatively self-contained,
* some video info goes in a single avi header -and therefore mux struct-
* so video restricted to one stream */
goto too_many_video_pads;
/* setup pad */
- pad_name = "video_00";
+ pad_name = "video_0";
avimux->video_pads++;
- setcapsfunc = GST_DEBUG_FUNCPTR (gst_avi_mux_vidsink_set_caps);
/* init pad specific data */
avipad = g_malloc0 (sizeof (GstAviVideoPad));
goto wrong_template;
newpad = gst_pad_new_from_template (templ, pad_name);
- gst_pad_set_setcaps_function (newpad, setcapsfunc);
g_free (name);
avipad->collect = gst_collect_pads2_add_pad (avimux->collect,
newpad, sizeof (GstAviCollectData));
((GstAviCollectData *) (avipad->collect))->avipad = avipad;
+
if (!gst_element_add_pad (element, newpad))
goto pad_add_failed;
GstByteWriter bw;
GSList *node;
guint avih, riff, hdrl;
+ GstMapInfo map;
GST_DEBUG_OBJECT (avimux, "creating avi header, data_size %u, idx_size %u",
avimux->data_size, avimux->idx_size);
if (avipad->is_video) {
codec_size = vidpad->vids_codec_data ?
- GST_BUFFER_SIZE (vidpad->vids_codec_data) : 0;
+ gst_buffer_get_size (vidpad->vids_codec_data) : 0;
/* the video header */
strf = gst_avi_mux_start_chunk (&bw, "strf", 0);
/* the actual header */
gst_byte_writer_put_uint32_le (&bw, vidpad->vids.num_colors);
gst_byte_writer_put_uint32_le (&bw, vidpad->vids.imp_colors);
if (vidpad->vids_codec_data) {
- gst_byte_writer_put_data (&bw,
- GST_BUFFER_DATA (vidpad->vids_codec_data),
- GST_BUFFER_SIZE (vidpad->vids_codec_data));
+ gst_buffer_map (vidpad->vids_codec_data, &map, GST_MAP_READ);
+ gst_byte_writer_put_data (&bw, map.data, map.size);
+ gst_buffer_unmap (vidpad->vids_codec_data, &map);
}
gst_avi_mux_end_chunk (&bw, strf);
}
} else {
codec_size = audpad->auds_codec_data ?
- GST_BUFFER_SIZE (audpad->auds_codec_data) : 0;
+ gst_buffer_get_size (audpad->auds_codec_data) : 0;
/* the audio header */
strf = gst_avi_mux_start_chunk (&bw, "strf", 0);
/* the actual header */
gst_byte_writer_put_uint16_le (&bw, audpad->auds.size);
gst_byte_writer_put_uint16_le (&bw, codec_size);
if (audpad->auds_codec_data) {
- gst_byte_writer_put_data (&bw,
- GST_BUFFER_DATA (audpad->auds_codec_data),
- GST_BUFFER_SIZE (audpad->auds_codec_data));
+ gst_buffer_map (audpad->auds_codec_data, &map, GST_MAP_READ);
+ gst_byte_writer_put_data (&bw, map.data, map.size);
+ gst_buffer_unmap (vidpad->vids_codec_data, &map);
}
gst_avi_mux_end_chunk (&bw, strf);
}
buffer = gst_byte_writer_reset_and_get_buffer (&bw);
/* ... but RIFF includes more than just header */
- size = GST_READ_UINT32_LE (GST_BUFFER_DATA (buffer) + 4);
+ gst_buffer_map (buffer, &map, GST_MAP_READWRITE);
+ size = GST_READ_UINT32_LE (map.data + 4);
size += 8 + avimux->data_size + avimux->idx_size;
- GST_WRITE_UINT32_LE (GST_BUFFER_DATA (buffer) + 4, size);
+ GST_WRITE_UINT32_LE (map.data + 4, size);
- GST_MEMDUMP_OBJECT (avimux, "avi header", GST_BUFFER_DATA (buffer),
- GST_BUFFER_SIZE (buffer));
+ GST_MEMDUMP_OBJECT (avimux, "avi header", map.data, map.size);
+ gst_buffer_unmap (buffer, &map);
return buffer;
}
gst_avi_mux_riff_get_avix_header (guint32 datax_size)
{
GstBuffer *buffer;
- guint8 *buffdata;
+ GstMapInfo map;
buffer = gst_buffer_new_and_alloc (24);
- buffdata = GST_BUFFER_DATA (buffer);
- memcpy (buffdata + 0, "RIFF", 4);
- GST_WRITE_UINT32_LE (buffdata + 4, datax_size + 3 * 4);
- memcpy (buffdata + 8, "AVIX", 4);
- memcpy (buffdata + 12, "LIST", 4);
- GST_WRITE_UINT32_LE (buffdata + 16, datax_size);
- memcpy (buffdata + 20, "movi", 4);
+ gst_buffer_map (buffer, &map, GST_MAP_WRITE);
+ memcpy (map.data + 0, "RIFF", 4);
+ GST_WRITE_UINT32_LE (map.data + 4, datax_size + 3 * 4);
+ memcpy (map.data + 8, "AVIX", 4);
+ memcpy (map.data + 12, "LIST", 4);
+ GST_WRITE_UINT32_LE (map.data + 16, datax_size);
+ memcpy (map.data + 20, "movi", 4);
+ gst_buffer_unmap (buffer, &map);
return buffer;
}
gst_avi_mux_riff_get_header (GstAviPad * avipad, guint32 video_frame_size)
{
GstBuffer *buffer;
- guint8 *buffdata;
+ GstMapInfo map;
buffer = gst_buffer_new_and_alloc (8);
- buffdata = GST_BUFFER_DATA (buffer);
- memcpy (buffdata + 0, avipad->tag, 4);
- GST_WRITE_UINT32_LE (buffdata + 4, video_frame_size);
+
+ gst_buffer_map (buffer, &map, GST_MAP_WRITE);
+ memcpy (map.data + 0, avipad->tag, 4);
+ GST_WRITE_UINT32_LE (map.data + 4, video_frame_size);
+ gst_buffer_unmap (buffer, &map);
return buffer;
}
{
GstFlowReturn res;
GstBuffer *buffer;
- guint8 *buffdata, *data;
+ guint8 *data;
gst_riff_index_entry *entry;
gint i;
guint32 size, entry_count;
gboolean is_pcm = FALSE;
guint32 pcm_samples = 0;
+ GstMapInfo map;
/* check if it is pcm */
if (avipad && !avipad->is_video) {
/* allocate the maximum possible */
buffer = gst_buffer_new_and_alloc (32 + 8 * avimux->idx_index);
- buffdata = GST_BUFFER_DATA (buffer);
+
+ gst_buffer_map (buffer, &map, GST_MAP_WRITE);
+ data = map.data;
/* general index chunk info */
- memcpy (buffdata + 0, chunk, 4); /* chunk id */
- GST_WRITE_UINT32_LE (buffdata + 4, 0); /* chunk size; fill later */
- GST_WRITE_UINT16_LE (buffdata + 8, 2); /* index entry is 2 words */
- buffdata[10] = 0; /* index subtype */
- buffdata[11] = GST_AVI_INDEX_OF_CHUNKS; /* index type: AVI_INDEX_OF_CHUNKS */
- GST_WRITE_UINT32_LE (buffdata + 12, 0); /* entries in use; fill later */
- memcpy (buffdata + 16, code, 4); /* stream to which index refers */
- GST_WRITE_UINT64_LE (buffdata + 20, avimux->avix_start); /* base offset */
- GST_WRITE_UINT32_LE (buffdata + 28, 0); /* reserved */
- buffdata += 32;
+ memcpy (map.data + 0, chunk, 4); /* chunk id */
+ GST_WRITE_UINT32_LE (map.data + 4, 0); /* chunk size; fill later */
+ GST_WRITE_UINT16_LE (map.data + 8, 2); /* index entry is 2 words */
+ map.data[10] = 0; /* index subtype */
+ map.data[11] = GST_AVI_INDEX_OF_CHUNKS; /* index type: AVI_INDEX_OF_CHUNKS */
+ GST_WRITE_UINT32_LE (map.data + 12, 0); /* entries in use; fill later */
+ memcpy (map.data + 16, code, 4); /* stream to which index refers */
+ GST_WRITE_UINT64_LE (map.data + 20, avimux->avix_start); /* base offset */
+ GST_WRITE_UINT32_LE (map.data + 28, 0); /* reserved */
+ map.data += 32;
/* now the actual index entries */
i = avimux->idx_index;
while (i > 0) {
if (memcmp (&entry->id, code, 4) == 0) {
/* enter relative offset to the data (!) */
- GST_WRITE_UINT32_LE (buffdata, GUINT32_FROM_LE (entry->offset) + 8);
+ GST_WRITE_UINT32_LE (map.data, GUINT32_FROM_LE (entry->offset) + 8);
/* msb is set if not (!) keyframe */
- GST_WRITE_UINT32_LE (buffdata + 4, GUINT32_FROM_LE (entry->size)
+ GST_WRITE_UINT32_LE (map.data + 4, GUINT32_FROM_LE (entry->size)
| (GUINT32_FROM_LE (entry->flags)
& GST_RIFF_IF_KEYFRAME ? 0 : 1U << 31));
- buffdata += 8;
+ map.data += 8;
}
i--;
entry++;
}
/* ok, now we know the size and no of entries, fill in where needed */
- data = GST_BUFFER_DATA (buffer);
- GST_BUFFER_SIZE (buffer) = size = buffdata - data;
+ size = map.data - data;
GST_WRITE_UINT32_LE (data + 4, size - 8);
entry_count = (size - 32) / 8;
GST_WRITE_UINT32_LE (data + 12, entry_count);
+ gst_buffer_unmap (buffer, &map);
+ gst_buffer_resize (buffer, 0, size);
- /* decorate and send */
- gst_buffer_set_caps (buffer, GST_PAD_CAPS (avimux->srcpad));
+ /* send */
if ((res = gst_pad_push (avimux->srcpad, buffer)) != GST_FLOW_OK)
return res;
{
GstFlowReturn res;
GstBuffer *buffer;
- guint8 *buffdata;
+ GstMapInfo map;
+ guint8 *data;
+ gsize size;
buffer = gst_buffer_new_and_alloc (8);
- buffdata = GST_BUFFER_DATA (buffer);
- memcpy (buffdata + 0, "idx1", 4);
- GST_WRITE_UINT32_LE (buffdata + 4,
+
+ gst_buffer_map (buffer, &map, GST_MAP_WRITE);
+ memcpy (map.data + 0, "idx1", 4);
+ GST_WRITE_UINT32_LE (map.data + 4,
avimux->idx_index * sizeof (gst_riff_index_entry));
+ gst_buffer_unmap (buffer, &map);
- gst_buffer_set_caps (buffer, GST_PAD_CAPS (avimux->srcpad));
res = gst_pad_push (avimux->srcpad, buffer);
if (res != GST_FLOW_OK)
return res;
buffer = gst_buffer_new ();
- GST_BUFFER_SIZE (buffer) = avimux->idx_index * sizeof (gst_riff_index_entry);
- GST_BUFFER_DATA (buffer) = (guint8 *) avimux->idx;
- GST_BUFFER_MALLOCDATA (buffer) = GST_BUFFER_DATA (buffer);
+
+ size = avimux->idx_index * sizeof (gst_riff_index_entry);
+ data = (guint8 *) avimux->idx;
avimux->idx = NULL; /* will be free()'ed by gst_buffer_unref() */
- avimux->total_data += GST_BUFFER_SIZE (buffer) + 8;
- gst_buffer_set_caps (buffer, GST_PAD_CAPS (avimux->srcpad));
+ gst_buffer_take_memory (buffer, -1,
+ gst_memory_new_wrapped (0, data, g_free, size, 0, size));
+
+ avimux->total_data += size + 8;
+
res = gst_pad_push (avimux->srcpad, buffer);
if (res != GST_FLOW_OK)
return res;
{
GstFlowReturn res = GST_FLOW_OK;
GstBuffer *header;
- GstEvent *event;
GSList *node;
/* first some odml standard index chunks in the movi list */
}
if (avimux->is_bigfile) {
+ GstSegment segment;
+
+ gst_segment_init (&segment, GST_FORMAT_BYTES);
+
/* search back */
- event = gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_BYTES,
- avimux->avix_start, GST_CLOCK_TIME_NONE, avimux->avix_start);
- /* if the event succeeds */
- gst_pad_push_event (avimux->srcpad, event);
+ segment.start = avimux->avix_start;
+ segment.time = avimux->avix_start;
+ gst_pad_push_event (avimux->srcpad, gst_event_new_segment (&segment));
/* rewrite AVIX header */
header = gst_avi_mux_riff_get_avix_header (avimux->datax_size);
- gst_buffer_set_caps (header, GST_PAD_CAPS (avimux->srcpad));
res = gst_pad_push (avimux->srcpad, header);
/* go back to current location, at least try */
- event = gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_BYTES,
- avimux->total_data, GST_CLOCK_TIME_NONE, avimux->total_data);
- gst_pad_push_event (avimux->srcpad, event);
+ segment.start = avimux->total_data;
+ segment.time = avimux->total_data;
+ gst_pad_push_event (avimux->srcpad, gst_event_new_segment (&segment));
if (res != GST_FLOW_OK)
return res;
}
header = gst_avi_mux_riff_get_avix_header (0);
- avimux->total_data += GST_BUFFER_SIZE (header);
+ avimux->total_data += gst_buffer_get_size (header);
/* avix_start is used as base offset for the odml index chunk */
avimux->idx_offset = avimux->total_data - avimux->avix_start;
- gst_buffer_set_caps (header, GST_PAD_CAPS (avimux->srcpad));
+
return gst_pad_push (avimux->srcpad, header);
}
GstBuffer *header;
GSList *node;
GstCaps *caps;
+ GstSegment segment;
avimux->total_data = 0;
avimux->total_frames = 0;
gst_caps_unref (caps);
/* let downstream know we think in BYTES and expect to do seeking later on */
- gst_pad_push_event (avimux->srcpad,
- gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_BYTES, 0, -1, 0));
+ gst_segment_init (&segment, GST_FORMAT_BYTES);
+ gst_pad_push_event (avimux->srcpad, gst_event_new_segment (&segment));
/* header */
avimux->avi_hdr.streams = g_slist_length (avimux->sinkpads);
avimux->is_bigfile = FALSE;
header = gst_avi_mux_riff_get_avi_header (avimux);
- avimux->total_data += GST_BUFFER_SIZE (header);
+ avimux->total_data += gst_buffer_get_size (header);
- gst_buffer_set_caps (header, GST_PAD_CAPS (avimux->srcpad));
res = gst_pad_push (avimux->srcpad, header);
avimux->idx_offset = avimux->total_data;
gst_avi_mux_stop_file (GstAviMux * avimux)
{
GstFlowReturn res = GST_FLOW_OK;
- GstEvent *event;
GstBuffer *header;
GSList *node;
+ GstSegment segment;
/* if bigfile, rewrite header, else write indexes */
/* don't bail out at once if error, still try to re-write header */
avimux->avi_hdr.tot_frames = avimux->num_frames;
/* seek and rewrite the header */
- header = gst_avi_mux_riff_get_avi_header (avimux);
- event = gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_BYTES,
- 0, GST_CLOCK_TIME_NONE, 0);
- gst_pad_push_event (avimux->srcpad, event);
+ gst_segment_init (&segment, GST_FORMAT_BYTES);
+ gst_pad_push_event (avimux->srcpad, gst_event_new_segment (&segment));
- gst_buffer_set_caps (header, GST_PAD_CAPS (avimux->srcpad));
/* the first error survives */
+ header = gst_avi_mux_riff_get_avi_header (avimux);
if (res == GST_FLOW_OK)
res = gst_pad_push (avimux->srcpad, header);
else
gst_pad_push (avimux->srcpad, header);
- event = gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_BYTES,
- avimux->total_data, GST_CLOCK_TIME_NONE, avimux->total_data);
- gst_pad_push_event (avimux->srcpad, event);
+ segment.start = avimux->total_data;
+ segment.time = avimux->total_data;
+ gst_pad_push_event (avimux->srcpad, gst_event_new_segment (&segment));
avimux->write_header = TRUE;
GstEvent * event, gpointer user_data)
{
GstAviMux *avimux;
+ gboolean ret = FALSE;
avimux = GST_AVI_MUX (user_data);
switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_CAPS:
+ {
+ GstCaps *caps;
+ GstAviCollectData *collect_pad;
+ GstAviVideoPad *avipad;
+
+ gst_event_parse_caps (event, &caps);
+
+ /* find stream data */
+ collect_pad = (GstAviCollectData *) data;
+ g_assert (collect_pad);
+ avipad = (GstAviVideoPad *) collect_pad->avipad;
+ g_assert (avipad);
+
+ if (avipad->parent.is_video) {
+ ret = gst_avi_mux_vidsink_set_caps (data->pad, caps);
+ } else {
+ ret = gst_avi_mux_audsink_set_caps (data->pad, caps);
+ }
+ gst_event_unref (event);
+ break;
+ }
case GST_EVENT_TAG:{
GstTagList *list;
GstTagSetter *setter = GST_TAG_SETTER (avimux);
gst_event_parse_tag (event, &list);
gst_tag_setter_merge_tags (setter, list, mode);
+ gst_event_unref (event);
+ ret = TRUE;
break;
}
+ case GST_EVENT_EOS:
+ case GST_EVENT_SEGMENT:
+ gst_event_unref (event);
+ ret = TRUE;
+ break;
default:
+ ret = gst_pad_event_default (data->pad, GST_OBJECT (avimux), event);
break;
}
- /* now GstCollectPads2 can take care of the rest, e.g. EOS */
- return FALSE;
+ return ret;
}
/* send extra 'padding' data */
GstBuffer *buffer;
buffer = gst_buffer_new_and_alloc (num_bytes);
- memset (GST_BUFFER_DATA (buffer), 0, num_bytes);
- gst_buffer_set_caps (buffer, GST_PAD_CAPS (avimux->srcpad));
+ gst_buffer_memset (buffer, 0, 0, num_bytes);
+
return gst_pad_push (avimux->srcpad, buffer);
}
GstBuffer *data, *header;
gulong total_size, pad_bytes = 0;
guint flags;
+ gsize datasize;
data = gst_collect_pads2_pop (avimux->collect, avipad->collect);
/* arrange downstream running time */
- data = gst_buffer_make_metadata_writable (data);
+ data = gst_buffer_make_writable (data);
GST_BUFFER_TIMESTAMP (data) =
gst_segment_to_running_time (&avipad->collect->segment,
GST_FORMAT_TIME, GST_BUFFER_TIMESTAMP (data));
if (vidpad->prepend_buffer) {
GstBuffer *newdata = gst_buffer_merge (vidpad->prepend_buffer, data);
- gst_buffer_copy_metadata (newdata, data, GST_BUFFER_COPY_TIMESTAMPS);
+ gst_buffer_copy_into (newdata, data, GST_BUFFER_COPY_TIMESTAMPS, 0, -1);
gst_buffer_unref (data);
gst_buffer_unref (vidpad->prepend_buffer);
return res;
}
+ datasize = gst_buffer_get_size (data);
+
/* need to restart or start a next avix chunk ? */
if ((avimux->is_bigfile ? avimux->datax_size : avimux->data_size) +
- GST_BUFFER_SIZE (data) > GST_AVI_MAX_SIZE) {
+ datasize > GST_AVI_MAX_SIZE) {
if (avimux->enable_large_avi) {
if ((res = gst_avi_mux_bigfile (avimux, FALSE)) != GST_FLOW_OK)
return res;
}
/* get header and record some stats */
- if (GST_BUFFER_SIZE (data) & 1) {
- pad_bytes = 2 - (GST_BUFFER_SIZE (data) & 1);
+ if (datasize & 1) {
+ pad_bytes = 2 - (datasize & 1);
}
- header = gst_avi_mux_riff_get_header (avipad, GST_BUFFER_SIZE (data));
- total_size = GST_BUFFER_SIZE (header) + GST_BUFFER_SIZE (data) + pad_bytes;
+ header = gst_avi_mux_riff_get_header (avipad, datasize);
+ total_size = gst_buffer_get_size (header) + datasize + pad_bytes;
if (avimux->is_bigfile) {
avimux->datax_size += total_size;
avipad->hook (avimux, avipad, data);
/* the suggested buffer size is the max frame size */
- if (avipad->hdr.bufsize < GST_BUFFER_SIZE (data))
- avipad->hdr.bufsize = GST_BUFFER_SIZE (data);
+ if (avipad->hdr.bufsize < datasize)
+ avipad->hdr.bufsize = datasize;
if (avipad->is_video) {
avimux->total_frames++;
GstAviAudioPad *audpad = (GstAviAudioPad *) avipad;
flags = 0;
- audpad->audio_size += GST_BUFFER_SIZE (data);
+ audpad->audio_size += datasize;
audpad->audio_time += GST_BUFFER_DURATION (data);
}
- gst_avi_mux_add_index (avimux, avipad, flags, GST_BUFFER_SIZE (data));
-
- /* prepare buffers for sending */
- gst_buffer_set_caps (header, GST_PAD_CAPS (avimux->srcpad));
- data = gst_buffer_make_metadata_writable (data);
- gst_buffer_set_caps (data, GST_PAD_CAPS (avimux->srcpad));
+ gst_avi_mux_add_index (avimux, avipad, flags, datasize);
+ /* send buffers */
GST_LOG_OBJECT (avimux, "pushing buffers: head, data");
if ((res = gst_pad_push (avimux->srcpad, header)) != GST_FLOW_OK)
/* simply finish off the file and send EOS */
gst_avi_mux_stop_file (avimux);
gst_pad_push_event (avimux->srcpad, gst_event_new_eos ());
- return GST_FLOW_UNEXPECTED;
+ return GST_FLOW_EOS;
}
/* ERRORS */
);
static void gst_avi_subtitle_title_tag (GstAviSubtitle * sub, gchar * title);
-static GstFlowReturn gst_avi_subtitle_chain (GstPad * pad, GstBuffer * buffer);
+static GstFlowReturn gst_avi_subtitle_chain (GstPad * pad, GstObject * parent,
+ GstBuffer * buffer);
static GstStateChangeReturn gst_avi_subtitle_change_state (GstElement * element,
GstStateChange transition);
static gboolean gst_avi_subtitle_send_event (GstElement * element,
GstEvent * event);
-GST_BOILERPLATE (GstAviSubtitle, gst_avi_subtitle, GstElement,
- GST_TYPE_ELEMENT);
+#define gst_avi_subtitle_parent_class parent_class
+G_DEFINE_TYPE (GstAviSubtitle, gst_avi_subtitle, GST_TYPE_ELEMENT);
#define IS_BOM_UTF8(data) ((GST_READ_UINT32_BE(data) >> 8) == 0xEFBBBF)
#define IS_BOM_UTF16_BE(data) (GST_READ_UINT16_BE(data) == 0xFEFF)
const gchar *input_enc = NULL;
GstBuffer *ret = NULL;
gchar *data;
+ GstMapInfo map;
- data = (gchar *) GST_BUFFER_DATA (buffer) + offset;
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ data = (gchar *) (map.data + offset);
if (len >= (3 + 1) && IS_BOM_UTF8 (data) &&
g_utf8_validate (data + 3, len - 3, NULL)) {
- ret = gst_buffer_create_sub (buffer, offset + 3, len - 3);
+ ret =
+ gst_buffer_copy_region (buffer, GST_BUFFER_COPY_ALL, offset + 3,
+ len - 3);
} else if (len >= 2 && IS_BOM_UTF16_BE (data)) {
input_enc = "UTF-16BE";
data += 2;
len -= 4;
} else if (g_utf8_validate (data, len, NULL)) {
/* not specified, check if it's UTF-8 */
- ret = gst_buffer_create_sub (buffer, offset, len);
+ ret = gst_buffer_copy_region (buffer, GST_BUFFER_COPY_ALL, offset, len);
} else {
/* we could fall back to gst_tag_freeform_to_utf8() here */
GST_WARNING_OBJECT (sub, "unspecified encoding, and not UTF-8");
- return NULL;
+ ret = NULL;
+ goto done;
}
g_return_val_if_fail (ret != NULL || input_enc != NULL, NULL);
if (input_enc) {
GError *err = NULL;
gchar *utf8;
+ gsize slen;
GST_DEBUG_OBJECT (sub, "converting subtitles from %s to UTF-8", input_enc);
utf8 = g_convert (data, len, "UTF-8", input_enc, NULL, NULL, &err);
if (err != NULL) {
GST_WARNING_OBJECT (sub, "conversion to UTF-8 failed : %s", err->message);
g_error_free (err);
- return NULL;
+ ret = NULL;
+ goto done;
}
ret = gst_buffer_new ();
- GST_BUFFER_DATA (ret) = (guint8 *) utf8;
- GST_BUFFER_MALLOCDATA (ret) = (guint8 *) utf8;
- GST_BUFFER_SIZE (ret) = strlen (utf8);
+ slen = strlen (utf8);
+ gst_buffer_take_memory (ret, -1,
+ gst_memory_new_wrapped (0, utf8, g_free, slen, 0, slen));
+
GST_BUFFER_OFFSET (ret) = 0;
}
- GST_BUFFER_CAPS (ret) = gst_caps_new_simple ("application/x-subtitle", NULL);
+done:
+ gst_buffer_unmap (buffer, &map);
+
return ret;
}
static void
gst_avi_subtitle_title_tag (GstAviSubtitle * sub, gchar * title)
{
- GstTagList *temp_list = gst_tag_list_new ();
-
- gst_tag_list_add (temp_list, GST_TAG_MERGE_APPEND, GST_TAG_TITLE, title,
- NULL);
- gst_pad_push_event (sub->src, gst_event_new_tag (temp_list));
+ gst_pad_push_event (sub->src,
+ gst_event_new_tag (gst_tag_list_new (GST_TAG_TITLE, title, NULL)));
}
static GstFlowReturn
gst_avi_subtitle_parse_gab2_chunk (GstAviSubtitle * sub, GstBuffer * buf)
{
- const guint8 *data;
gchar *name_utf8;
guint name_length;
guint file_length;
- guint size;
+ GstMapInfo map;
- data = GST_BUFFER_DATA (buf);
- size = GST_BUFFER_SIZE (buf);
+ gst_buffer_map (buf, &map, GST_MAP_READ);
/* check the magic word "GAB2\0", and the next word must be 2 */
- if (size < 12 || memcmp (data, "GAB2\0\2\0", 5 + 2) != 0)
+ if (map.size < 12 || memcmp (map.data, "GAB2\0\2\0", 5 + 2) != 0)
goto wrong_magic_word;
/* read 'name' of subtitle */
- name_length = GST_READ_UINT32_LE (data + 5 + 2);
+ name_length = GST_READ_UINT32_LE (map.data + 5 + 2);
GST_LOG_OBJECT (sub, "length of name: %u", name_length);
- if (size <= 17 + name_length)
+ if (map.size <= 17 + name_length)
goto wrong_name_length;
- name_utf8 = g_convert ((gchar *) data + 11, name_length, "UTF-8", "UTF-16LE",
+ name_utf8 =
+ g_convert ((gchar *) map.data + 11, name_length, "UTF-8", "UTF-16LE",
NULL, NULL, NULL);
if (name_utf8) {
}
/* next word must be 4 */
- if (GST_READ_UINT16_LE (data + 11 + name_length) != 0x4)
+ if (GST_READ_UINT16_LE (map.data + 11 + name_length) != 0x4)
goto wrong_fixed_word_2;
- file_length = GST_READ_UINT32_LE (data + 13 + name_length);
+ file_length = GST_READ_UINT32_LE (map.data + 13 + name_length);
GST_LOG_OBJECT (sub, "length srt/ssa file: %u", file_length);
- if (size < (17 + name_length + file_length))
+ if (map.size < (17 + name_length + file_length))
goto wrong_total_length;
/* store this, so we can send it again after a seek; note that we shouldn't
if (sub->subfile == NULL)
goto extract_failed;
+ gst_buffer_unmap (buf, &map);
+
return GST_FLOW_OK;
/* ERRORS */
wrong_magic_word:
{
GST_ELEMENT_ERROR (sub, STREAM, DECODE, (NULL), ("Wrong magic word"));
+ gst_buffer_unmap (buf, &map);
return GST_FLOW_ERROR;
}
wrong_name_length:
{
GST_ELEMENT_ERROR (sub, STREAM, DECODE, (NULL),
- ("name doesn't fit in buffer (%d < %d)", size, 17 + name_length));
+ ("name doesn't fit in buffer (%" G_GSIZE_FORMAT " < %d)", map.size,
+ 17 + name_length));
+ gst_buffer_unmap (buf, &map);
return GST_FLOW_ERROR;
}
wrong_fixed_word_2:
{
GST_ELEMENT_ERROR (sub, STREAM, DECODE, (NULL),
("wrong fixed word: expected %u, got %u", 4,
- GST_READ_UINT16_LE (data + 11 + name_length)));
+ GST_READ_UINT16_LE (map.data + 11 + name_length)));
+ gst_buffer_unmap (buf, &map);
return GST_FLOW_ERROR;
}
wrong_total_length:
{
GST_ELEMENT_ERROR (sub, STREAM, DECODE, (NULL),
- ("buffer size is wrong: need %d bytes, have %d bytes",
- 17 + name_length + file_length, size));
+ ("buffer size is wrong: need %d bytes, have %" G_GSIZE_FORMAT " bytes",
+ 17 + name_length + file_length, map.size));
+ gst_buffer_unmap (buf, &map);
return GST_FLOW_ERROR;
}
extract_failed:
{
GST_ELEMENT_ERROR (sub, STREAM, DECODE, (NULL),
("could not extract subtitles"));
+ gst_buffer_unmap (buf, &map);
return GST_FLOW_ERROR;
}
}
static GstFlowReturn
-gst_avi_subtitle_chain (GstPad * pad, GstBuffer * buffer)
+gst_avi_subtitle_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer)
{
- GstAviSubtitle *sub = GST_AVI_SUBTITLE (GST_PAD_PARENT (pad));
+ GstAviSubtitle *sub = GST_AVI_SUBTITLE (parent);
GstFlowReturn ret;
if (sub->subfile != NULL) {
GST_WARNING_OBJECT (sub, "Got more buffers than expected, dropping");
- ret = GST_FLOW_UNEXPECTED;
+ ret = GST_FLOW_EOS;
goto done;
}
}
static void
-gst_avi_subtitle_base_init (gpointer klass)
+gst_avi_subtitle_class_init (GstAviSubtitleClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
GST_DEBUG_CATEGORY_INIT (avisubtitle_debug, "avisubtitle", 0,
"parse avi subtitle stream");
- gst_element_class_add_static_pad_template (element_class,
- &sink_template);
- gst_element_class_add_static_pad_template (element_class, &src_template);
-
- gst_element_class_set_details_simple (element_class,
- "Avi subtitle parser", "Codec/Parser/Subtitle",
- "Parse avi subtitle stream", "Thijs Vermeir <thijsvermeir@gmail.com>");
-}
-
-static void
-gst_avi_subtitle_class_init (GstAviSubtitleClass * klass)
-{
- GstElementClass *gstelement_class = (GstElementClass *) klass;
-
gstelement_class->change_state =
GST_DEBUG_FUNCPTR (gst_avi_subtitle_change_state);
gstelement_class->send_event =
GST_DEBUG_FUNCPTR (gst_avi_subtitle_send_event);
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&src_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "Avi subtitle parser", "Codec/Parser/Subtitle",
+ "Parse avi subtitle stream", "Thijs Vermeir <thijsvermeir@gmail.com>");
}
static void
-gst_avi_subtitle_init (GstAviSubtitle * self, GstAviSubtitleClass * klass)
+gst_avi_subtitle_init (GstAviSubtitle * self)
{
GstCaps *caps;
static GstStaticPadTemplate cutter_src_factory = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("audio/x-raw-int, "
- "rate = (int) [ 1, MAX ], "
- "channels = (int) [ 1, MAX ], "
- "endianness = (int) BYTE_ORDER, "
- "width = (int) { 8, 16 }, "
- "depth = (int) { 8, 16 }, " "signed = (boolean) true")
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "format = (string) { " GST_AUDIO_NE (S8) "," GST_AUDIO_NE (S16) " }, "
+ "rate = (int) [ 1, MAX ], " "channels = (int) [ 1, MAX ], "
+ "layout = (string) interleaved")
);
static GstStaticPadTemplate cutter_sink_factory =
GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("audio/x-raw-int, "
- "rate = (int) [ 1, MAX ], "
- "channels = (int) [ 1, MAX ], "
- "endianness = (int) BYTE_ORDER, "
- "width = (int) { 8, 16 }, "
- "depth = (int) { 8, 16 }, " "signed = (boolean) true")
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "format = (string) { " GST_AUDIO_NE (S8) "," GST_AUDIO_NE (S16) " }, "
+ "rate = (int) [ 1, MAX ], " "channels = (int) [ 1, MAX ], "
+ "layout = (string) interleaved")
);
enum
PROP_LEAKY
};
-GST_BOILERPLATE (GstCutter, gst_cutter, GstElement, GST_TYPE_ELEMENT);
+#define gst_cutter_parent_class parent_class
+G_DEFINE_TYPE (GstCutter, gst_cutter, GST_TYPE_ELEMENT);
static void gst_cutter_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void gst_cutter_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
-static GstFlowReturn gst_cutter_chain (GstPad * pad, GstBuffer * buffer);
-
-static gboolean gst_cutter_get_caps (GstPad * pad, GstCutter * filter);
-
-static void
-gst_cutter_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_static_pad_template (element_class,
- &cutter_src_factory);
- gst_element_class_add_static_pad_template (element_class,
- &cutter_sink_factory);
- gst_element_class_set_details_simple (element_class, "Audio cutter",
- "Filter/Editor/Audio",
- "Audio Cutter to split audio into non-silent bits",
- "Thomas Vander Stichele <thomas at apestaart dot org>");
-}
+static gboolean gst_cutter_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+static GstFlowReturn gst_cutter_chain (GstPad * pad, GstObject * parent,
+ GstBuffer * buffer);
static void
gst_cutter_class_init (GstCutterClass * klass)
{
GObjectClass *gobject_class;
+ GstElementClass *element_class;
gobject_class = (GObjectClass *) klass;
+ element_class = (GstElementClass *) klass;
gobject_class->set_property = gst_cutter_set_property;
gobject_class->get_property = gst_cutter_get_property;
FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
GST_DEBUG_CATEGORY_INIT (cutter_debug, "cutter", 0, "Audio cutting");
+
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&cutter_src_factory));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&cutter_sink_factory));
+ gst_element_class_set_details_simple (element_class, "Audio cutter",
+ "Filter/Editor/Audio",
+ "Audio Cutter to split audio into non-silent bits",
+ "Thomas Vander Stichele <thomas at apestaart dot org>");
}
static void
-gst_cutter_init (GstCutter * filter, GstCutterClass * g_class)
+gst_cutter_init (GstCutter * filter)
{
filter->sinkpad =
gst_pad_new_from_static_template (&cutter_sink_factory, "sink");
+ gst_pad_set_chain_function (filter->sinkpad, gst_cutter_chain);
+ gst_pad_set_event_function (filter->sinkpad, gst_cutter_event);
+ gst_pad_use_fixed_caps (filter->sinkpad);
+ gst_element_add_pad (GST_ELEMENT (filter), filter->sinkpad);
+
filter->srcpad =
gst_pad_new_from_static_template (&cutter_src_factory, "src");
+ gst_pad_use_fixed_caps (filter->srcpad);
+ gst_element_add_pad (GST_ELEMENT (filter), filter->srcpad);
filter->threshold_level = CUTTER_DEFAULT_THRESHOLD_LEVEL;
filter->threshold_length = CUTTER_DEFAULT_THRESHOLD_LENGTH;
filter->pre_run_length = 0 * GST_SECOND;
filter->pre_buffer = NULL;
filter->leaky = FALSE;
-
- gst_element_add_pad (GST_ELEMENT (filter), filter->sinkpad);
- gst_pad_set_chain_function (filter->sinkpad, gst_cutter_chain);
- gst_pad_use_fixed_caps (filter->sinkpad);
-
- gst_element_add_pad (GST_ELEMENT (filter), filter->srcpad);
- gst_pad_use_fixed_caps (filter->srcpad);
}
static GstMessage *
DEFINE_CUTTER_CALCULATOR (gint16, 15);
DEFINE_CUTTER_CALCULATOR (gint8, 7);
+static gboolean
+gst_cutter_setcaps (GstCutter * filter, GstCaps * caps)
+{
+ GstAudioInfo info;
+
+ if (!gst_audio_info_from_caps (&info, caps))
+ return FALSE;
+
+ filter->info = info;
+
+ return gst_pad_set_caps (filter->srcpad, caps);
+}
+
+static gboolean
+gst_cutter_event (GstPad * pad, GstObject * parent, GstEvent * event)
+{
+ gboolean ret;
+ GstCutter *filter;
+
+ filter = GST_CUTTER (parent);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_CAPS:
+ {
+ GstCaps *caps;
+
+ gst_event_parse_caps (event, &caps);
+ ret = gst_cutter_setcaps (filter, caps);
+ gst_event_unref (event);
+ break;
+ }
+ default:
+ ret = gst_pad_event_default (pad, parent, event);
+ break;
+ }
+ return ret;
+}
static GstFlowReturn
-gst_cutter_chain (GstPad * pad, GstBuffer * buf)
+gst_cutter_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
{
+ GstFlowReturn ret = GST_FLOW_OK;
GstCutter *filter;
+ GstMapInfo map;
gint16 *in_data;
+ gint bpf, rate;
+ gsize in_size;
guint num_samples;
gdouble NCS = 0.0; /* Normalized Cumulative Square of buffer */
gdouble RMS = 0.0; /* RMS of signal in buffer */
gdouble NMS = 0.0; /* Normalized Mean Square of buffer */
GstBuffer *prebuf; /* pointer to a prebuffer element */
+ GstClockTime duration;
- g_return_val_if_fail (pad != NULL, GST_FLOW_ERROR);
- g_return_val_if_fail (GST_IS_PAD (pad), GST_FLOW_ERROR);
- g_return_val_if_fail (buf != NULL, GST_FLOW_ERROR);
+ filter = GST_CUTTER (parent);
- filter = GST_CUTTER (GST_OBJECT_PARENT (pad));
- g_return_val_if_fail (filter != NULL, GST_FLOW_ERROR);
- g_return_val_if_fail (GST_IS_CUTTER (filter), GST_FLOW_ERROR);
+ if (GST_AUDIO_INFO_FORMAT (&filter->info) == GST_AUDIO_FORMAT_UNKNOWN)
+ goto not_negotiated;
- if (!filter->have_caps) {
- if (!(gst_cutter_get_caps (pad, filter)))
- return GST_FLOW_NOT_NEGOTIATED;
- }
+ bpf = GST_AUDIO_INFO_BPF (&filter->info);
+ rate = GST_AUDIO_INFO_RATE (&filter->info);
+
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ in_data = (gint16 *) map.data;
+ in_size = map.size;
- in_data = (gint16 *) GST_BUFFER_DATA (buf);
GST_LOG_OBJECT (filter, "length of prerec buffer: %" GST_TIME_FORMAT,
GST_TIME_ARGS (filter->pre_run_length));
/* calculate mean square value on buffer */
- switch (filter->width) {
- case 16:
- num_samples = GST_BUFFER_SIZE (buf) / 2;
+ switch (GST_AUDIO_INFO_FORMAT (&filter->info)) {
+ case GST_AUDIO_FORMAT_S16:
+ num_samples = in_size / 2;
gst_cutter_calculate_gint16 (in_data, num_samples, &NCS);
NMS = NCS / num_samples;
break;
- case 8:
- num_samples = GST_BUFFER_SIZE (buf);
+ case GST_AUDIO_FORMAT_S8:
+ num_samples = in_size;
gst_cutter_calculate_gint8 ((gint8 *) in_data, num_samples, &NCS);
NMS = NCS / num_samples;
break;
default:
/* this shouldn't happen */
- g_warning ("no mean square function for width %d\n", filter->width);
+ g_warning ("no mean square function for format");
break;
}
+ gst_buffer_unmap (buf, &map);
+
filter->silent_prev = filter->silent;
+ duration = gst_util_uint64_scale (in_size / bpf, GST_SECOND, rate);
+
RMS = sqrt (NMS);
/* if RMS below threshold, add buffer length to silent run length count
* if not, reset
*/
GST_LOG_OBJECT (filter, "buffer stats: NMS %f, RMS %f, audio length %f", NMS,
- RMS,
- gst_guint64_to_gdouble (gst_audio_duration_from_pad_buffer
- (filter->sinkpad, buf)));
+ RMS, gst_guint64_to_gdouble (duration));
+
if (RMS < filter->threshold_level)
- filter->silent_run_length +=
- gst_guint64_to_gdouble (gst_audio_duration_from_pad_buffer
- (filter->sinkpad, buf));
+ filter->silent_run_length += gst_guint64_to_gdouble (duration);
else {
filter->silent_run_length = 0 * GST_SECOND;
filter->silent = FALSE;
/* first of all, flush current buffer */
GST_DEBUG_OBJECT (filter, "flushing buffer of length %" GST_TIME_FORMAT,
GST_TIME_ARGS (filter->pre_run_length));
+
while (filter->pre_buffer) {
prebuf = (g_list_first (filter->pre_buffer))->data;
filter->pre_buffer = g_list_remove (filter->pre_buffer, prebuf);
* or to the srcpad */
if (filter->silent) {
filter->pre_buffer = g_list_append (filter->pre_buffer, buf);
- filter->pre_run_length +=
- gst_guint64_to_gdouble (gst_audio_duration_from_pad_buffer
- (filter->sinkpad, buf));
+ filter->pre_run_length += gst_guint64_to_gdouble (duration);
+
while (filter->pre_run_length > filter->pre_length) {
+ GstClockTime pduration;
+ gsize psize;
+
prebuf = (g_list_first (filter->pre_buffer))->data;
g_assert (GST_IS_BUFFER (prebuf));
+
+ psize = gst_buffer_get_size (prebuf);
+ pduration = gst_util_uint64_scale (psize / bpf, GST_SECOND, rate);
+
filter->pre_buffer = g_list_remove (filter->pre_buffer, prebuf);
- filter->pre_run_length -=
- gst_guint64_to_gdouble (gst_audio_duration_from_pad_buffer
- (filter->sinkpad, prebuf));
+ filter->pre_run_length -= gst_guint64_to_gdouble (pduration);
+
/* only pass buffers if we don't leak */
if (!filter->leaky)
- gst_pad_push (filter->srcpad, prebuf);
+ ret = gst_pad_push (filter->srcpad, prebuf);
else
gst_buffer_unref (prebuf);
}
} else
- gst_pad_push (filter->srcpad, buf);
+ ret = gst_pad_push (filter->srcpad, buf);
- return GST_FLOW_OK;
-}
+ return ret;
-
-static gboolean
-gst_cutter_get_caps (GstPad * pad, GstCutter * filter)
-{
- GstCaps *caps;
- GstStructure *structure;
-
- caps = gst_pad_get_caps (pad);
- if (!caps) {
- GST_INFO ("no caps on pad %s:%s", GST_DEBUG_PAD_NAME (pad));
- return FALSE;
+ /* ERRORS */
+not_negotiated:
+ {
+ return GST_FLOW_NOT_NEGOTIATED;
}
- structure = gst_caps_get_structure (caps, 0);
- gst_structure_get_int (structure, "width", &filter->width);
- filter->max_sample = 1 << (filter->width - 1); /* signed */
- filter->have_caps = TRUE;
-
- gst_caps_unref (caps);
- return TRUE;
}
-
static void
gst_cutter_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec)
double threshold_level; /* level below which to cut */
double threshold_length; /* how long signal has to remain
* below this level before cutting */
-
double silent_run_length; /* how long has it been below threshold ? */
gboolean silent;
gboolean silent_prev;
GList *pre_buffer; /* list of GstBuffers in pre-record buffer */
gboolean leaky; /* do we leak an overflowing prebuffer ? */
- gboolean have_caps; /* did we get the needed caps yet ? */
- gint width; /* bit width of data */
- long max_sample; /* maximum sample value */
+ GstAudioInfo info;
};
struct _GstCutterClass
if GST_HAVE_MMAP
-EFENCE_PLUGIN=libgstefence.la
-else
EFENCE_PLUGIN=
+else
+EFENCE_PLUGIN=libgstefence.la
endif
plugin_LTLIBRARIES = $(EFENCE_PLUGIN) libgstdebug.la libgstnavigationtest.la
libgstdebug_la_SOURCES = \
gstdebug.c \
breakmydata.c \
- gstcapsdebug.c \
gstcapssetter.c \
gstnavseek.c \
gstpushfilesrc.c \
cpureport.c \
testplugin.c
+# gstcapsdebug.c
+
libgstdebug_la_CFLAGS = $(GST_CFLAGS) $(GST_BASE_CFLAGS)
libgstdebug_la_LIBADD = $(GST_LIBS) $(GST_BASE_LIBS)
libgstdebug_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
#define GST_IS_BREAK_MY_DATA_CLASS(klass) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_BREAK_MY_DATA))
+GType gst_break_my_data_get_type (void);
+
enum
{
ARG_0,
GST_PAD_ALWAYS,
GST_STATIC_CAPS_ANY);
-
-#define DEBUG_INIT(bla) \
- GST_DEBUG_CATEGORY_INIT (gst_break_my_data_debug, "breakmydata", 0, \
- "debugging category for breakmydata element");
-
-GType gst_break_my_data_get_type (void);
-GST_BOILERPLATE_FULL (GstBreakMyData, gst_break_my_data, GstBaseTransform,
- GST_TYPE_BASE_TRANSFORM, DEBUG_INIT);
+#define gst_break_my_data_parent_class parent_class
+G_DEFINE_TYPE (GstBreakMyData, gst_break_my_data, GST_TYPE_BASE_TRANSFORM);
static void
-gst_break_my_data_base_init (gpointer g_class)
-{
- GstElementClass *gstelement_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_static_pad_template (gstelement_class,
- &bmd_sink_template);
- gst_element_class_add_static_pad_template (gstelement_class,
- &bmd_src_template);
-
- gst_element_class_set_details_simple (gstelement_class, "Break my data",
- "Testing",
- "randomly change data in the stream", "Benjamin Otte <otte@gnome>");
-}
-
-static void
gst_break_my_data_class_init (GstBreakMyDataClass * klass)
{
GstBaseTransformClass *gstbasetrans_class;
+ GstElementClass *gstelement_class;
GObjectClass *gobject_class;
gobject_class = G_OBJECT_CLASS (klass);
+ gstelement_class = GST_ELEMENT_CLASS (klass);
gstbasetrans_class = GST_BASE_TRANSFORM_CLASS (klass);
+ GST_DEBUG_CATEGORY_INIT (gst_break_my_data_debug, "breakmydata", 0,
+ "debugging category for breakmydata element");
+
gobject_class->set_property = gst_break_my_data_set_property;
gobject_class->get_property = gst_break_my_data_get_property;
"probability for each byte in the buffer to be changed", 0.0, 1.0,
0.0, G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&bmd_sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&bmd_src_template));
+
+ gst_element_class_set_details_simple (gstelement_class, "Break my data",
+ "Testing",
+ "randomly change data in the stream", "Benjamin Otte <otte@gnome>");
+
gstbasetrans_class->transform_ip =
GST_DEBUG_FUNCPTR (gst_break_my_data_transform_ip);
gstbasetrans_class->start = GST_DEBUG_FUNCPTR (gst_break_my_data_start);
}
static void
-gst_break_my_data_init (GstBreakMyData * bmd, GstBreakMyDataClass * g_class)
+gst_break_my_data_init (GstBreakMyData * bmd)
{
gst_base_transform_set_in_place (GST_BASE_TRANSFORM (bmd), TRUE);
}
gst_break_my_data_transform_ip (GstBaseTransform * trans, GstBuffer * buf)
{
GstBreakMyData *bmd = GST_BREAK_MY_DATA (trans);
- guint i, size;
+ GstMapInfo map;
+ gsize i;
g_return_val_if_fail (gst_buffer_is_writable (buf), GST_FLOW_ERROR);
i = 0;
}
- size = GST_BUFFER_SIZE (buf);
+ gst_buffer_map (buf, &map, GST_MAP_READWRITE);
GST_LOG_OBJECT (bmd,
- "got buffer %p (size %u, timestamp %" G_GUINT64_FORMAT ", offset %"
- G_GUINT64_FORMAT "", buf, size, GST_BUFFER_TIMESTAMP (buf),
- GST_BUFFER_OFFSET (buf));
+ "got buffer %p (size %" G_GSIZE_FORMAT ", timestamp %" G_GUINT64_FORMAT
+ ", offset %" G_GUINT64_FORMAT "", buf, map.size,
+ GST_BUFFER_TIMESTAMP (buf), GST_BUFFER_OFFSET (buf));
- for (; i < size; i++) {
+ for (; i < map.size; i++) {
if (g_rand_double_range (bmd->rand, 0, 1.0) <= bmd->probability) {
guint8 new;
} else {
new = bmd->set;
}
- GST_INFO_OBJECT (bmd, "changing byte %u from 0x%02X to 0x%02X", i,
- (guint) GST_READ_UINT8 (GST_BUFFER_DATA (buf) + i),
- (guint) ((guint8) new));
- GST_BUFFER_DATA (buf)[i] = new;
+ GST_INFO_OBJECT (bmd,
+ "changing byte %" G_GSIZE_FORMAT " from 0x%02X to 0x%02X", i,
+ (guint) GST_READ_UINT8 (map.data + i), (guint) ((guint8) new));
+ map.data[i] = new;
}
}
/* don't overflow */
- bmd->skipped += MIN (G_MAXUINT - bmd->skipped, GST_BUFFER_SIZE (buf));
+ bmd->skipped += MIN (G_MAXUINT - bmd->skipped, map.size);
+
+ gst_buffer_unmap (buf, &map);
GST_OBJECT_UNLOCK (bmd);
static gboolean gst_cpu_report_start (GstBaseTransform * trans);
static gboolean gst_cpu_report_stop (GstBaseTransform * trans);
-GST_BOILERPLATE (GstCpuReport, gst_cpu_report, GstBaseTransform,
- GST_TYPE_BASE_TRANSFORM);
-
-static void
-gst_cpu_report_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_static_pad_template (element_class,
- &cpu_report_sink_template);
- gst_element_class_add_static_pad_template (element_class,
- &cpu_report_src_template);
-
- gst_element_class_set_details_simple (element_class, "CPU report",
- "Testing",
- "Post cpu usage information every buffer",
- "Zaheer Abbas Merali <zaheerabbas at merali dot org>");
-}
+#define gst_cpu_report_parent_class parent_class
+G_DEFINE_TYPE (GstCpuReport, gst_cpu_report, GST_TYPE_BASE_TRANSFORM);
static void
gst_cpu_report_finalize (GObject * obj)
gst_cpu_report_class_init (GstCpuReportClass * g_class)
{
GstBaseTransformClass *gstbasetrans_class;
+ GstElementClass *element_class;
GObjectClass *gobject_class;
gobject_class = G_OBJECT_CLASS (g_class);
+ element_class = GST_ELEMENT_CLASS (g_class);
gstbasetrans_class = GST_BASE_TRANSFORM_CLASS (g_class);
gobject_class->finalize = gst_cpu_report_finalize;
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&cpu_report_sink_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&cpu_report_src_template));
+
+ gst_element_class_set_details_simple (element_class, "CPU report",
+ "Testing",
+ "Post cpu usage information every buffer",
+ "Zaheer Abbas Merali <zaheerabbas at merali dot org>");
+
gstbasetrans_class->transform_ip =
GST_DEBUG_FUNCPTR (gst_cpu_report_transform_ip);
gstbasetrans_class->start = GST_DEBUG_FUNCPTR (gst_cpu_report_start);
}
static void
-gst_cpu_report_init (GstCpuReport * report, GstCpuReportClass * g_class)
+gst_cpu_report_init (GstCpuReport * report)
{
gst_base_transform_set_passthrough (GST_BASE_TRANSFORM (report), TRUE);
static GstElementClass *parent_class = NULL;
-typedef struct _GstFencedBuffer GstFencedBuffer;
-struct _GstFencedBuffer
+typedef struct _GstMetaFenced
{
- GstBuffer buffer;
+ GstMeta meta;
+
void *region;
unsigned int length;
-};
+} GstMetaFenced;
+
+static const GstMetaInfo *
+gst_meta_fenced_get_info (void)
+{
+ static const GstMetaInfo *meta_fenced_info = NULL;
+
+ if (meta_fenced_info == NULL) {
+ meta_fenced_info = gst_meta_register ("GstMetaFenced", "GstMetaFenced",
+ sizeof (GstMetaFenced),
+ (GstMetaInitFunction) NULL,
+ (GstMetaFreeFunction) NULL,
+ (GstMetaCopyFunction) NULL, (GstMetaTransformFunction) NULL);
+ }
+ return meta_fenced_info;
+}
-GType gst_fenced_buffer_get_type (void);
-static void gst_fenced_buffer_finalize (GstFencedBuffer * buf);
-static GstFencedBuffer *gst_fenced_buffer_copy (const GstBuffer * buffer);
+#define GST_META_FENCED_GET(buf) ((GstMetaFenced *)gst_buffer_get_meta(buf,gst_meta_fenced_get_info()))
+#define GST_META_FENCED_ADD(buf) ((GstMetaFenced *)gst_buffer_add_meta(buf,gst_meta_fenced_get_info(),NULL))
+
+static void gst_fenced_buffer_dispose (GstBuffer * buf);
+static GstBuffer *gst_fenced_buffer_copy (const GstBuffer * buffer);
static void *gst_fenced_buffer_alloc (GstBuffer * buffer, unsigned int length,
gboolean fence_top);
+#if 0
static GstFlowReturn gst_efence_buffer_alloc (GstPad * pad, guint64 offset,
guint size, GstCaps * caps, GstBuffer ** buf);
-
-#define GST_TYPE_FENCED_BUFFER (gst_fenced_buffer_get_type())
-
-#define GST_IS_FENCED_BUFFER(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_FENCED_BUFFER))
-#define GST_FENCED_BUFFER(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_FENCED_BUFFER, GstFencedBuffer))
+#endif
GType
gst_gst_efence_get_type (void)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
- gst_element_class_add_static_pad_template (element_class,
- &gst_efence_sink_factory);
- gst_element_class_add_static_pad_template (element_class,
- &gst_efence_src_factory);
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_efence_sink_factory));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_efence_src_factory));
gst_element_class_set_details_simple (element_class, "Electric Fence",
"Testing",
"This element converts a stream of normal GStreamer buffers into a "
GST_DEBUG_FUNCPTR (gst_pad_proxy_setcaps));
gst_pad_set_chain_function (filter->sinkpad,
GST_DEBUG_FUNCPTR (gst_efence_chain));
+#if 0
gst_pad_set_bufferalloc_function (filter->sinkpad,
GST_DEBUG_FUNCPTR (gst_efence_buffer_alloc));
+#endif
gst_element_add_pad (GST_ELEMENT (filter), filter->sinkpad);
filter->srcpad =
efence = GST_EFENCE (GST_OBJECT_PARENT (pad));
g_return_val_if_fail (GST_IS_EFENCE (efence), GST_FLOW_ERROR);
+#if 0
if (GST_IS_FENCED_BUFFER (buffer)) {
GST_DEBUG_OBJECT (efence, "Passing on existing fenced buffer with caps %"
GST_PTR_FORMAT, GST_BUFFER_CAPS (buffer));
return gst_pad_push (efence->srcpad, buffer);
}
+#endif
copy = (GstBuffer *) gst_fenced_buffer_copy (buffer);
return gst_pad_activate_pull (efence->sinkpad, active);
}
+#if 0
static GstFlowReturn
gst_efence_buffer_alloc (GstPad * pad, guint64 offset,
guint size, GstCaps * caps, GstBuffer ** buf)
return GST_FLOW_OK;
}
+#endif
static void
gst_efence_set_property (GObject * object, guint prop_id,
plugin_init, VERSION, "LGPL", GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN);
-static GstBufferClass *fenced_buffer_parent_class = NULL;
-
static void
-gst_fenced_buffer_finalize (GstFencedBuffer * buffer)
+gst_fenced_buffer_dispose (GstBuffer * buffer)
{
- GstFencedBuffer *fenced_buffer;
+ GstMetaFenced *meta;
- GST_DEBUG ("free buffer=%p", buffer);
+ meta = GST_META_FENCED_GET (buffer);
- fenced_buffer = GST_FENCED_BUFFER (buffer);
+ GST_DEBUG ("free buffer=%p", buffer);
/* free our data */
if (GST_BUFFER_DATA (buffer)) {
- GST_DEBUG ("free region %p %d", fenced_buffer->region,
- fenced_buffer->length);
- munmap (fenced_buffer->region, fenced_buffer->length);
+ GST_DEBUG ("free region %p %d", meta->region, meta->length);
+ munmap (meta->region, meta->length);
}
-
- GST_MINI_OBJECT_CLASS (fenced_buffer_parent_class)->finalize (GST_MINI_OBJECT
- (buffer));
}
-static GstFencedBuffer *
+static GstBuffer *
gst_fenced_buffer_copy (const GstBuffer * buffer)
{
GstBuffer *copy;
g_return_val_if_fail (buffer != NULL, NULL);
/* create a fresh new buffer */
- copy = (GstBuffer *) gst_mini_object_new (GST_TYPE_FENCED_BUFFER);
+ copy = gst_buffer_new ();
/* we simply copy everything from our parent */
- ptr = gst_fenced_buffer_alloc (GST_BUFFER (copy),
- GST_BUFFER_SIZE (buffer), TRUE);
+ ptr = gst_fenced_buffer_alloc (copy, GST_BUFFER_SIZE (buffer), TRUE);
memcpy (ptr, GST_BUFFER_DATA (buffer), GST_BUFFER_SIZE (buffer));
/* copy relevant flags */
", caps: %" GST_PTR_FORMAT, buffer,
GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (copy)), GST_BUFFER_CAPS (copy));
- return GST_FENCED_BUFFER (copy);
+ return copy;
}
void *
{
int alloc_size;
void *region;
- GstFencedBuffer *fenced_buffer = (GstFencedBuffer *) buffer;
+ GstMetaFenced *meta;
int page_size;
GST_DEBUG ("buffer=%p length=%d fence_top=%d", buffer, length, fence_top);
g_warning ("mmap failed");
return NULL;
}
+
+ GST_MINI_OBJECT_CAST (buffer)->dispose =
+ (GstMiniObjectDisposeFunction) gst_fenced_buffer_dispose;
+ GST_MINI_OBJECT_CAST (buffer)->copy =
+ (GstMiniObjectCopyFunction) gst_fenced_buffer_copy;
+
+ meta = GST_META_FENCED_ADD (buffer);
+
#if 0
munmap (region, page_size);
munmap (region + alloc_size - page_size, page_size);
- fenced_buffer->region = region + page_size;
- fenced_buffer->length = alloc_size - page_size;
+ meta->region = region + page_size;
+ meta->length = alloc_size - page_size;
#else
mprotect (region, page_size, PROT_NONE);
mprotect ((char *) region + alloc_size - page_size, page_size, PROT_NONE);
- fenced_buffer->region = region;
- fenced_buffer->length = alloc_size;
+ meta->region = region;
+ meta->length = alloc_size;
#endif
- GST_DEBUG ("new region %p %d", fenced_buffer->region, fenced_buffer->length);
+ GST_DEBUG ("new region %p %d", meta->region, meta->length);
if (fence_top) {
int offset;
return (void *) ((char *) region + page_size);
}
}
-
-static void
-gst_fenced_buffer_class_init (gpointer g_class, gpointer class_data)
-{
- GstMiniObjectClass *mini_object_class = GST_MINI_OBJECT_CLASS (g_class);
-
- fenced_buffer_parent_class = g_type_class_peek_parent (g_class);
-
- mini_object_class->finalize =
- (GstMiniObjectFinalizeFunction) gst_fenced_buffer_finalize;
- mini_object_class->copy = (GstMiniObjectCopyFunction) gst_fenced_buffer_copy;
-}
-
-GType
-gst_fenced_buffer_get_type (void)
-{
- static GType fenced_buf_type = 0;
-
- if (G_UNLIKELY (!fenced_buf_type)) {
- static const GTypeInfo fenced_buf_info = {
- sizeof (GstBufferClass),
- NULL,
- NULL,
- (GClassInitFunc) gst_fenced_buffer_class_init,
- NULL,
- NULL,
- sizeof (GstFencedBuffer),
- 0,
- NULL,
- };
-
- fenced_buf_type = g_type_register_static (GST_TYPE_BUFFER,
- "GstFencedBuffer", &fenced_buf_info, 0);
- }
- return fenced_buf_type;
-}
/* class initialization */
-#define DEBUG_INIT(bla) \
- GST_DEBUG_CATEGORY_INIT (gst_caps_debug_debug, "capsdebug", 0, \
- "debug category for capsdebug element");
-
-GST_BOILERPLATE_FULL (GstCapsDebug, gst_caps_debug, GstElement,
- GST_TYPE_ELEMENT, DEBUG_INIT);
-
-static void
-gst_caps_debug_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_static_pad_template (element_class,
- &gst_caps_debug_src_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_caps_debug_sink_template);
-
- gst_element_class_set_details_simple (element_class, "Caps debug",
- "Generic", "Debug caps negotiation", "David Schleef <ds@schleef.org>");
-}
+#define gst_caps_debug_parent_class parent_class
+G_DEFINE_TYPE (GstCapsDebug, gst_caps_debug, GST_TYPE_ELEMENT);
static void
gst_caps_debug_class_init (GstCapsDebugClass * klass)
gobject_class->finalize = gst_caps_debug_finalize;
element_class->change_state = GST_DEBUG_FUNCPTR (gst_caps_debug_change_state);
+ GST_DEBUG_CATEGORY_INIT (gst_caps_debug_debug, "capsdebug", 0,
+ "debug category for capsdebug element");
+
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_caps_debug_src_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_caps_debug_sink_template));
+
+ gst_element_class_set_details_simple (element_class, "Caps debug",
+ "Generic", "Debug caps negotiation", "David Schleef <ds@schleef.org>");
}
static void
-gst_caps_debug_init (GstCapsDebug * capsdebug,
- GstCapsDebugClass * capsdebug_class)
+gst_caps_debug_init (GstCapsDebug * capsdebug)
{
capsdebug->srcpad =
static gboolean gst_caps_setter_transform_size (GstBaseTransform * trans,
- GstPadDirection direction, GstCaps * caps, guint size,
- GstCaps * othercaps, guint * othersize);
+ GstPadDirection direction, GstCaps * caps, gsize size,
+ GstCaps * othercaps, gsize * othersize);
static GstCaps *gst_caps_setter_transform_caps (GstBaseTransform * trans,
- GstPadDirection direction, GstCaps * caps);
+ GstPadDirection direction, GstCaps * caps, GstCaps * cfilter);
static GstFlowReturn gst_caps_setter_transform_ip (GstBaseTransform * btrans,
GstBuffer * in);
static void gst_caps_setter_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
-GST_BOILERPLATE (GstCapsSetter, gst_caps_setter, GstBaseTransform,
- GST_TYPE_BASE_TRANSFORM);
-
-static void
-gst_caps_setter_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_set_details_simple (element_class, "CapsSetter",
- "Generic",
- "Set/merge caps on stream",
- "Mark Nauwelaerts <mnauw@users.sourceforge.net>");
-
- gst_element_class_add_static_pad_template (element_class,
- &gst_caps_setter_sink_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_caps_setter_src_template);
-}
+#define gst_caps_setter_parent_class parent_class
+G_DEFINE_TYPE (GstCapsSetter, gst_caps_setter, GST_TYPE_BASE_TRANSFORM);
static void
gst_caps_setter_class_init (GstCapsSetterClass * g_class)
{
GObjectClass *gobject_class = (GObjectClass *) g_class;
+ GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
GstBaseTransformClass *trans_class = (GstBaseTransformClass *) g_class;
GST_DEBUG_CATEGORY_INIT (caps_setter_debug, "capssetter", 0, "capssetter");
"Drop fields of incoming caps", DEFAULT_REPLACE,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ gst_element_class_set_details_simple (element_class, "CapsSetter",
+ "Generic",
+ "Set/merge caps on stream",
+ "Mark Nauwelaerts <mnauw@users.sourceforge.net>");
+
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_caps_setter_sink_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_caps_setter_src_template));
+
trans_class->transform_size =
GST_DEBUG_FUNCPTR (gst_caps_setter_transform_size);
trans_class->transform_caps =
}
static void
-gst_caps_setter_init (GstCapsSetter * filter, GstCapsSetterClass * g_class)
+gst_caps_setter_init (GstCapsSetter * filter)
{
filter->caps = gst_caps_new_any ();
filter->join = DEFAULT_JOIN;
static gboolean
gst_caps_setter_transform_size (GstBaseTransform * trans,
- GstPadDirection direction, GstCaps * caps, guint size,
- GstCaps * othercaps, guint * othersize)
+ GstPadDirection direction, GstCaps * caps, gsize size,
+ GstCaps * othercaps, gsize * othersize)
{
*othersize = size;
static GstCaps *
gst_caps_setter_transform_caps (GstBaseTransform * trans,
- GstPadDirection direction, GstCaps * caps)
+ GstPadDirection direction, GstCaps * caps, GstCaps * cfilter)
{
GstCapsSetter *filter = GST_CAPS_SETTER (trans);
GstCaps *ret, *filter_caps;
#include <gst/gst.h>
GType gst_break_my_data_get_type (void);
-GType gst_caps_debug_get_type (void);
+//GType gst_caps_debug_get_type (void);
GType gst_caps_setter_get_type (void);
GType gst_rnd_buffer_size_get_type (void);
GType gst_navseek_get_type (void);
gst_tag_inject_get_type ())
|| !gst_element_register (plugin, "testsink", GST_RANK_NONE,
gst_test_get_type ())
+#if 0
|| !gst_element_register (plugin, "capsdebug", GST_RANK_NONE,
gst_caps_debug_get_type ())
+#endif
|| !gst_element_register (plugin, "cpureport", GST_RANK_NONE,
gst_cpu_report_get_type ()))
GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("I420"))
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("I420"))
);
static GstStaticPadTemplate gst_navigationtest_sink_template =
GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("I420"))
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("I420"))
);
-static GstVideoFilterClass *parent_class = NULL;
+#define gst_navigationtest_parent_class parent_class
+G_DEFINE_TYPE (GstNavigationtest, gst_navigationtest, GST_TYPE_VIDEO_FILTER);
static gboolean
-gst_navigationtest_handle_src_event (GstPad * pad, GstEvent * event)
+gst_navigationtest_src_event (GstBaseTransform * trans, GstEvent * event)
{
+ GstVideoInfo *info;
GstNavigationtest *navtest;
const gchar *type;
- navtest = GST_NAVIGATIONTEST (GST_PAD_PARENT (pad));
+ navtest = GST_NAVIGATIONTEST (trans);
+
+ info = &GST_VIDEO_FILTER (trans)->in_info;
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_NAVIGATION:
const GstStructure *s = gst_event_get_structure (event);
gint fps_n, fps_d;
- fps_n = gst_value_get_fraction_numerator ((&navtest->framerate));
- fps_d = gst_value_get_fraction_denominator ((&navtest->framerate));
+ fps_n = GST_VIDEO_INFO_FPS_N (info);
+ fps_d = GST_VIDEO_INFO_FPS_D (info);
type = gst_structure_get_string (s, "event");
if (g_str_equal (type, "mouse-move")) {
default:
break;
}
- return gst_pad_event_default (pad, event);
+ return GST_BASE_TRANSFORM_CLASS (parent_class)->src_event (trans, event);
}
/* Useful macros */
#define GST_VIDEO_I420_SIZE(w,h) (GST_VIDEO_I420_V_OFFSET(w,h)+(GST_VIDEO_I420_V_ROWSTRIDE(w)*GST_ROUND_UP_2(h)/2))
-static gboolean
-gst_navigationtest_get_unit_size (GstBaseTransform * btrans, GstCaps * caps,
- guint * size)
-{
- GstNavigationtest *navtest;
- GstStructure *structure;
- gboolean ret = FALSE;
- gint width, height;
-
- navtest = GST_NAVIGATIONTEST (btrans);
-
- structure = gst_caps_get_structure (caps, 0);
-
- if (gst_structure_get_int (structure, "width", &width) &&
- gst_structure_get_int (structure, "height", &height)) {
- *size = GST_VIDEO_I420_SIZE (width, height);
- ret = TRUE;
- GST_DEBUG_OBJECT (navtest, "our frame size is %d bytes (%dx%d)", *size,
- width, height);
- }
-
- return ret;
-}
-
-static gboolean
-gst_navigationtest_set_caps (GstBaseTransform * btrans, GstCaps * incaps,
- GstCaps * outcaps)
-{
- GstNavigationtest *navtest = GST_NAVIGATIONTEST (btrans);
- gboolean ret = FALSE;
- GstStructure *structure;
-
- structure = gst_caps_get_structure (incaps, 0);
-
- if (gst_structure_get_int (structure, "width", &navtest->width) &&
- gst_structure_get_int (structure, "height", &navtest->height)) {
- const GValue *framerate;
-
- framerate = gst_structure_get_value (structure, "framerate");
- if (framerate && GST_VALUE_HOLDS_FRACTION (framerate)) {
- g_value_copy (framerate, &navtest->framerate);
- ret = TRUE;
- }
- }
-
- return ret;
-}
-
static void
-draw_box_planar411 (guint8 * dest, int width, int height, int x, int y,
+draw_box_planar411 (GstVideoFrame * frame, int x, int y,
guint8 colory, guint8 coloru, guint8 colorv)
{
+ gint width, height;
int x1, x2, y1, y2;
- guint8 *d = dest;
+ guint8 *d;
+ gint stride;
+
+ width = GST_VIDEO_FRAME_WIDTH (frame);
+ height = GST_VIDEO_FRAME_HEIGHT (frame);
if (x < 0 || y < 0 || x >= width || y >= height)
return;
y1 = MAX (y - 5, 0);
y2 = MIN (y + 5, height);
+ d = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
+ stride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
+
for (y = y1; y < y2; y++) {
for (x = x1; x < x2; x++) {
- ((guint8 *) d)[y * GST_VIDEO_I420_Y_ROWSTRIDE (width) + x] = colory;
+ d[y * stride + x] = colory;
}
}
- d = dest + GST_VIDEO_I420_U_OFFSET (width, height);
+ d = GST_VIDEO_FRAME_PLANE_DATA (frame, 1);
+ stride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 1);
+
x1 /= 2;
x2 /= 2;
y1 /= 2;
y2 /= 2;
for (y = y1; y < y2; y++) {
for (x = x1; x < x2; x++) {
- ((guint8 *) d)[y * GST_VIDEO_I420_U_ROWSTRIDE (width) + x] = coloru;
+ d[y * stride + x] = coloru;
}
}
- d = dest + GST_VIDEO_I420_V_OFFSET (width, height);
+ d = GST_VIDEO_FRAME_PLANE_DATA (frame, 2);
+ stride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 2);
+
for (y = y1; y < y2; y++) {
for (x = x1; x < x2; x++) {
- ((guint8 *) d)[y * GST_VIDEO_I420_V_ROWSTRIDE (width) + x] = colorv;
+ d[y * stride + x] = colorv;
}
}
}
static GstFlowReturn
-gst_navigationtest_transform (GstBaseTransform * trans, GstBuffer * in,
- GstBuffer * out)
+gst_navigationtest_transform_frame (GstVideoFilter * filter,
+ GstVideoFrame * in_frame, GstVideoFrame * out_frame)
{
- GstNavigationtest *navtest = GST_NAVIGATIONTEST (trans);
+ GstNavigationtest *navtest = GST_NAVIGATIONTEST (filter);
GSList *walk;
- GstFlowReturn ret = GST_FLOW_OK;
-
- /* do something interesting here. This simply copies the source
- * to the destination. */
- gst_buffer_copy_metadata (out, in, GST_BUFFER_COPY_TIMESTAMPS);
- memcpy (GST_BUFFER_DATA (out), GST_BUFFER_DATA (in),
- MIN (GST_BUFFER_SIZE (in), GST_BUFFER_SIZE (out)));
+ gst_video_frame_copy (out_frame, in_frame);
walk = navtest->clicks;
while (walk) {
ButtonClick *click = walk->data;
walk = g_slist_next (walk);
- draw_box_planar411 (GST_BUFFER_DATA (out), navtest->width, navtest->height,
+ draw_box_planar411 (out_frame,
rint (click->x), rint (click->y), click->cy, click->cu, click->cv);
if (--click->images_left < 1) {
navtest->clicks = g_slist_remove (navtest->clicks, click);
g_free (click);
}
}
- draw_box_planar411 (GST_BUFFER_DATA (out), navtest->width, navtest->height,
+ draw_box_planar411 (out_frame,
rint (navtest->x), rint (navtest->y), 0, 128, 128);
- return ret;
+ return GST_FLOW_OK;
}
static GstStateChangeReturn
}
static void
-gst_navigationtest_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_set_details_simple (element_class, "Video navigation test",
- "Filter/Effect/Video",
- "Handle navigation events showing a black square following mouse pointer",
- "David Schleef <ds@schleef.org>");
-
- gst_element_class_add_static_pad_template (element_class,
- &gst_navigationtest_sink_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_navigationtest_src_template);
-}
-
-static void
-gst_navigationtest_class_init (gpointer klass, gpointer class_data)
+gst_navigationtest_class_init (GstNavigationtestClass * klass)
{
GstElementClass *element_class;
GstBaseTransformClass *trans_class;
+ GstVideoFilterClass *vfilter_class;
element_class = (GstElementClass *) klass;
trans_class = (GstBaseTransformClass *) klass;
-
- parent_class = g_type_class_peek_parent (klass);
+ vfilter_class = (GstVideoFilterClass *) klass;
element_class->change_state =
GST_DEBUG_FUNCPTR (gst_navigationtest_change_state);
- trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_navigationtest_set_caps);
- trans_class->get_unit_size =
- GST_DEBUG_FUNCPTR (gst_navigationtest_get_unit_size);
- trans_class->transform = GST_DEBUG_FUNCPTR (gst_navigationtest_transform);
-}
+ gst_element_class_set_details_simple (element_class, "Video navigation test",
+ "Filter/Effect/Video",
+ "Handle navigation events showing a black square following mouse pointer",
+ "David Schleef <ds@schleef.org>");
-static void
-gst_navigationtest_init (GTypeInstance * instance, gpointer g_class)
-{
- GstNavigationtest *navtest = GST_NAVIGATIONTEST (instance);
- GstBaseTransform *btrans = GST_BASE_TRANSFORM (instance);
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_navigationtest_sink_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_navigationtest_src_template));
- gst_pad_set_event_function (btrans->srcpad,
- GST_DEBUG_FUNCPTR (gst_navigationtest_handle_src_event));
+ trans_class->src_event = GST_DEBUG_FUNCPTR (gst_navigationtest_src_event);
- navtest->x = -1;
- navtest->y = -1;
- g_value_init (&navtest->framerate, GST_TYPE_FRACTION);
+ vfilter_class->transform_frame =
+ GST_DEBUG_FUNCPTR (gst_navigationtest_transform_frame);
}
-GType
-gst_navigationtest_get_type (void)
+static void
+gst_navigationtest_init (GstNavigationtest * navtest)
{
- static GType navigationtest_type = 0;
-
- if (!navigationtest_type) {
- static const GTypeInfo navigationtest_info = {
- sizeof (GstNavigationtestClass),
- gst_navigationtest_base_init,
- NULL,
- gst_navigationtest_class_init,
- NULL,
- NULL,
- sizeof (GstNavigationtest),
- 0,
- gst_navigationtest_init,
- };
-
- navigationtest_type = g_type_register_static (GST_TYPE_VIDEO_FILTER,
- "GstNavigationtest", &navigationtest_info, 0);
- }
- return navigationtest_type;
+ navtest->x = -1;
+ navtest->y = -1;
}
static gboolean
#ifndef __GST_NAVIGATIONTEST_H__
#define __GST_NAVIGATIONTEST_H__
+#include <gst/video/video.h>
#include <gst/video/gstvideofilter.h>
G_BEGIN_DECLS
{
GstVideoFilter videofilter;
- gint width, height;
-
- GValue framerate;
gdouble x, y;
-
GSList *clicks;
};
GST_PAD_ALWAYS,
GST_STATIC_CAPS_ANY);
-static gboolean gst_navseek_event (GstBaseTransform * trans, GstEvent * event);
+static gboolean gst_navseek_sink_event (GstBaseTransform * trans,
+ GstEvent * event);
static GstFlowReturn gst_navseek_transform_ip (GstBaseTransform * basetrans,
GstBuffer * buf);
-static gboolean gst_navseek_handle_src_event (GstPad * pad, GstEvent * event);
+static gboolean gst_navseek_src_event (GstBaseTransform * trans,
+ GstEvent * event);
static gboolean gst_navseek_stop (GstBaseTransform * trans);
static gboolean gst_navseek_start (GstBaseTransform * trans);
GValue * value, GParamSpec * pspec);
GType gst_navseek_get_type (void);
-GST_BOILERPLATE (GstNavSeek, gst_navseek, GstBaseTransform,
- GST_TYPE_BASE_TRANSFORM);
-
-static void
-gst_navseek_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_static_pad_template (element_class,
- &navseek_sink_template);
- gst_element_class_add_static_pad_template (element_class,
- &navseek_src_template);
-
- gst_element_class_set_details_simple (element_class,
- "Seek based on left-right arrows", "Filter/Video",
- "Seek based on navigation keys left-right",
- "Jan Schmidt <thaytan@mad.scientist.com>");
-}
+#define gst_navseek_parent_class parent_class
+G_DEFINE_TYPE (GstNavSeek, gst_navseek, GST_TYPE_BASE_TRANSFORM);
static void
gst_navseek_class_init (GstNavSeekClass * klass)
{
GstBaseTransformClass *gstbasetrans_class;
+ GstElementClass *element_class;
GObjectClass *gobject_class;
gobject_class = G_OBJECT_CLASS (klass);
+ element_class = GST_ELEMENT_CLASS (klass);
gstbasetrans_class = GST_BASE_TRANSFORM_CLASS (klass);
gobject_class->set_property = gst_navseek_set_property;
"Time in seconds to seek by", 0.0, G_MAXDOUBLE, 5.0,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
- gstbasetrans_class->event = GST_DEBUG_FUNCPTR (gst_navseek_event);
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&navseek_sink_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&navseek_src_template));
+
+ gst_element_class_set_details_simple (element_class,
+ "Seek based on left-right arrows", "Filter/Video",
+ "Seek based on navigation keys left-right",
+ "Jan Schmidt <thaytan@mad.scientist.com>");
+
+ gstbasetrans_class->src_event = GST_DEBUG_FUNCPTR (gst_navseek_src_event);
+ gstbasetrans_class->sink_event = GST_DEBUG_FUNCPTR (gst_navseek_sink_event);
gstbasetrans_class->transform_ip =
GST_DEBUG_FUNCPTR (gst_navseek_transform_ip);
gstbasetrans_class->start = GST_DEBUG_FUNCPTR (gst_navseek_start);
}
static void
-gst_navseek_init (GstNavSeek * navseek, GstNavSeekClass * g_class)
+gst_navseek_init (GstNavSeek * navseek)
{
- gst_pad_set_event_function (GST_BASE_TRANSFORM (navseek)->srcpad,
- GST_DEBUG_FUNCPTR (gst_navseek_handle_src_event));
-
gst_base_transform_set_passthrough (GST_BASE_TRANSFORM (navseek), TRUE);
navseek->seek_offset = 5.0;
static void
gst_navseek_seek (GstNavSeek * navseek, gint64 offset)
{
- GstFormat peer_format = GST_FORMAT_TIME;
gboolean ret;
GstPad *peer_pad;
gint64 peer_value;
/* Query for the current time then attempt to set to time + offset */
peer_pad = gst_pad_get_peer (GST_BASE_TRANSFORM (navseek)->sinkpad);
- ret = gst_pad_query_position (peer_pad, &peer_format, &peer_value);
+ ret = gst_pad_query_position (peer_pad, GST_FORMAT_TIME, &peer_value);
- if (ret && peer_format == GST_FORMAT_TIME) {
+ if (ret) {
GstEvent *event;
peer_value += offset;
static void
gst_navseek_change_playback_rate (GstNavSeek * navseek, gdouble rate)
{
- GstFormat peer_format = GST_FORMAT_TIME;
gboolean ret;
GstPad *peer_pad;
gint64 current_position;
peer_pad = gst_pad_get_peer (GST_BASE_TRANSFORM (navseek)->sinkpad);
- ret = gst_pad_query_position (peer_pad, &peer_format, ¤t_position);
+ ret = gst_pad_query_position (peer_pad, GST_FORMAT_TIME, ¤t_position);
- if (ret && peer_format == GST_FORMAT_TIME) {
+ if (ret) {
GstEvent *event;
gint64 start;
gint64 stop;
}
static gboolean
-gst_navseek_handle_src_event (GstPad * pad, GstEvent * event)
+gst_navseek_src_event (GstBaseTransform * trans, GstEvent * event)
{
GstNavSeek *navseek;
gboolean ret = TRUE;
- navseek = GST_NAVSEEK (GST_PAD_PARENT (pad));
+ navseek = GST_NAVSEEK (trans);
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_NAVIGATION:
- /* Check for a keyup and convert left/right to a seek event */
{
+ /* Check for a keyup and convert left/right to a seek event */
const GstStructure *structure;
const gchar *event_type;
}
gst_event_unref (event);
event = NULL;
- }
break;
+ }
default:
break;
}
- if (event && GST_PAD_IS_LINKED (GST_BASE_TRANSFORM (navseek)->sinkpad)) {
- GstPad *peer_pad = gst_pad_get_peer (GST_BASE_TRANSFORM (navseek)->sinkpad);
-
- ret = gst_pad_send_event (peer_pad, event);
- gst_object_unref (peer_pad);
- }
+ if (event)
+ ret = GST_BASE_TRANSFORM_CLASS (parent_class)->src_event (trans, event);
return ret;
}
}
static gboolean
-gst_navseek_event (GstBaseTransform * trans, GstEvent * event)
+gst_navseek_sink_event (GstBaseTransform * trans, GstEvent * event)
{
GstNavSeek *navseek = GST_NAVSEEK (trans);
default:
break;
}
- return GST_BASE_TRANSFORM_CLASS (parent_class)->event (trans, event);
+ return GST_BASE_TRANSFORM_CLASS (parent_class)->sink_event (trans, event);
}
static GstFlowReturn
static void gst_push_file_src_uri_handler_init (gpointer g_iface,
gpointer iface_data);
-static void gst_file_push_src_add_uri_handler (GType type);
-GST_BOILERPLATE_FULL (GstPushFileSrc, gst_push_file_src, GstBin, GST_TYPE_BIN,
- gst_file_push_src_add_uri_handler);
-
-static void
-gst_file_push_src_add_uri_handler (GType type)
-{
- static const GInterfaceInfo info = {
- gst_push_file_src_uri_handler_init,
- NULL,
- NULL
- };
-
- g_type_add_interface_static (type, GST_TYPE_URI_HANDLER, &info);
- GST_DEBUG_CATEGORY_INIT (pushfilesrc_debug, "pushfilesrc", 0,
- "pushfilesrc element");
-}
-
-static void
-gst_push_file_src_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_static_pad_template (element_class, &srctemplate);
-
- gst_element_class_set_details_simple (element_class, "Push File Source",
- "Testing",
- "Implements pushfile:// URI-handler for push-based file access",
- "Tim-Philipp Müller <tim centricular net>");
-}
+#define gst_push_file_src_parent_class parent_class
+G_DEFINE_TYPE_WITH_CODE (GstPushFileSrc, gst_push_file_src, GST_TYPE_BIN,
+ G_IMPLEMENT_INTERFACE (GST_TYPE_URI_HANDLER,
+ gst_push_file_src_uri_handler_init));
static void
gst_push_file_src_dispose (GObject * obj)
gst_push_file_src_class_init (GstPushFileSrcClass * g_class)
{
GObjectClass *gobject_class;
+ GstElementClass *element_class;
gobject_class = G_OBJECT_CLASS (g_class);
+ element_class = GST_ELEMENT_CLASS (g_class);
+
+ GST_DEBUG_CATEGORY_INIT (pushfilesrc_debug, "pushfilesrc", 0,
+ "pushfilesrc element");
gobject_class->dispose = gst_push_file_src_dispose;
+
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&srctemplate));
+
+ gst_element_class_set_details_simple (element_class, "Push File Source",
+ "Testing",
+ "Implements pushfile:// URI-handler for push-based file access",
+ "Tim-Philipp Müller <tim centricular net>");
}
static gboolean
-gst_push_file_src_ghostpad_checkgetrange (GstPad * pad)
+gst_push_file_src_ghostpad_query (GstPad * pad, GstObject * parent,
+ GstQuery * query)
{
- return FALSE;
+ gboolean res;
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_SCHEDULING:
+ gst_query_set_scheduling (query, GST_SCHEDULING_FLAG_SEEKABLE, 1, -1, 0);
+ gst_query_add_scheduling_mode (query, GST_PAD_MODE_PUSH);
+ res = TRUE;
+ break;
+ default:
+ res = gst_proxy_pad_query_default (pad, parent, query);
+ break;
+ }
+ return res;
}
static void
-gst_push_file_src_init (GstPushFileSrc * src, GstPushFileSrcClass * g_class)
+gst_push_file_src_init (GstPushFileSrc * src)
{
src->filesrc = gst_element_factory_make ("filesrc", "real-filesrc");
if (src->filesrc) {
src->srcpad = gst_ghost_pad_new ("src", pad);
/* FIXME^H^HCORE: try pushfile:///foo/bar.ext ! typefind ! fakesink without
* this and watch core bugginess (some pad stays in flushing state) */
- gst_pad_set_checkgetrange_function (src->srcpad,
- GST_DEBUG_FUNCPTR (gst_push_file_src_ghostpad_checkgetrange));
+ gst_pad_set_query_function (src->srcpad,
+ GST_DEBUG_FUNCPTR (gst_push_file_src_ghostpad_query));
gst_element_add_pad (GST_ELEMENT (src), src->srcpad);
gst_object_unref (pad);
}
/*** GSTURIHANDLER INTERFACE *************************************************/
static GstURIType
-gst_push_file_src_uri_get_type (void)
+gst_push_file_src_uri_get_type (GType type)
{
return GST_URI_SRC;
}
-static gchar **
-gst_push_file_src_uri_get_protocols (void)
+static const gchar *const *
+gst_push_file_src_uri_get_protocols (GType type)
{
- static gchar *protocols[] = { (char *) "pushfile", NULL };
+ static const gchar *protocols[] = { "pushfile", NULL };
return protocols;
}
-static const gchar *
+static gchar *
gst_push_file_src_uri_get_uri (GstURIHandler * handler)
{
GstPushFileSrc *src = GST_PUSH_FILE_SRC (handler);
+ gchar *fileuri, *pushfileuri;
if (src->filesrc == NULL)
return NULL;
- return gst_uri_handler_get_uri (GST_URI_HANDLER (src->filesrc));
+ fileuri = gst_uri_handler_get_uri (GST_URI_HANDLER (src->filesrc));;
+ if (fileuri == NULL)
+ return NULL;
+ pushfileuri = g_strconcat ("push", fileuri, NULL);
+ g_free (fileuri);
+
+ return pushfileuri;
}
static gboolean
-gst_push_file_src_uri_set_uri (GstURIHandler * handler, const gchar * uri)
+gst_push_file_src_uri_set_uri (GstURIHandler * handler, const gchar * uri,
+ GError ** error)
{
GstPushFileSrc *src = GST_PUSH_FILE_SRC (handler);
- if (src->filesrc == NULL || !g_str_has_prefix (uri, "pushfile://"))
+ if (src->filesrc == NULL) {
+ g_set_error_literal (error, GST_URI_ERROR, GST_URI_ERROR_BAD_STATE,
+ "Could not create file source element");
return FALSE;
+ }
/* skip 'push' bit */
- return gst_uri_handler_set_uri (GST_URI_HANDLER (src->filesrc), uri + 4);
+ return gst_uri_handler_set_uri (GST_URI_HANDLER (src->filesrc), uri + 4,
+ error);
}
static void
};
-#define DEBUG_INIT(bla) \
- GST_DEBUG_CATEGORY_INIT (gst_tag_inject_debug, "taginject", 0, "tag inject element");
-
-GST_BOILERPLATE_FULL (GstTagInject, gst_tag_inject, GstBaseTransform,
- GST_TYPE_BASE_TRANSFORM, DEBUG_INIT);
+#define gst_tag_inject_parent_class parent_class
+G_DEFINE_TYPE (GstTagInject, gst_tag_inject, GST_TYPE_BASE_TRANSFORM);
static void gst_tag_inject_finalize (GObject * object);
static void gst_tag_inject_set_property (GObject * object, guint prop_id,
static void
-gst_tag_inject_base_init (gpointer g_class)
-{
- GstElementClass *gstelement_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_set_details_simple (gstelement_class,
- "TagInject",
- "Generic", "inject metadata tags", "Stefan Kost <ensonic@users.sf.net>");
- gst_element_class_add_static_pad_template (gstelement_class,
- &srctemplate);
- gst_element_class_add_static_pad_template (gstelement_class,
- &sinktemplate);
-}
-
-static void
gst_tag_inject_finalize (GObject * object)
{
GstTagInject *self = GST_TAG_INJECT (object);
gst_tag_inject_class_init (GstTagInjectClass * klass)
{
GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
GstBaseTransformClass *gstbasetrans_class;
gobject_class = G_OBJECT_CLASS (klass);
+ gstelement_class = GST_ELEMENT_CLASS (klass);
gstbasetrans_class = GST_BASE_TRANSFORM_CLASS (klass);
+ GST_DEBUG_CATEGORY_INIT (gst_tag_inject_debug, "taginject", 0,
+ "tag inject element");
+
gobject_class->set_property = gst_tag_inject_set_property;
gobject_class->get_property = gst_tag_inject_get_property;
gobject_class->finalize = gst_tag_inject_finalize;
+ gst_element_class_set_details_simple (gstelement_class,
+ "TagInject",
+ "Generic", "inject metadata tags", "Stefan Kost <ensonic@users.sf.net>");
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&srctemplate));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&sinktemplate));
+
gstbasetrans_class->transform_ip =
GST_DEBUG_FUNCPTR (gst_tag_inject_transform_ip);
}
static void
-gst_tag_inject_init (GstTagInject * self, GstTagInjectClass * g_class)
+gst_tag_inject_init (GstTagInject * self)
{
GstBaseTransform *trans = GST_BASE_TRANSFORM (self);
/* send tags */
if (self->tags && !gst_tag_list_is_empty (self->tags)) {
GST_DEBUG ("tag event :%" GST_PTR_FORMAT, self->tags);
- gst_element_found_tags (GST_ELEMENT (trans),
- gst_tag_list_copy (self->tags));
+ gst_pad_push_event (GST_BASE_TRANSFORM_SRC_PAD (trans),
+ gst_event_new_tag (gst_tag_list_copy (self->tags)));
}
}
case PROP_TAGS:{
gchar *structure =
g_strdup_printf ("taglist,%s", g_value_get_string (value));
- if (!(self->tags = gst_structure_from_string (structure, NULL))) {
+ if (!(self->tags = gst_tag_list_new_from_string (structure))) {
GST_WARNING ("unparsable taglist = '%s'", structure);
}
static void gst_progress_report_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
-static gboolean gst_progress_report_event (GstBaseTransform * trans,
+static gboolean gst_progress_report_sink_event (GstBaseTransform * trans,
GstEvent * event);
static GstFlowReturn gst_progress_report_transform_ip (GstBaseTransform * trans,
GstBuffer * buf);
static gboolean gst_progress_report_start (GstBaseTransform * trans);
static gboolean gst_progress_report_stop (GstBaseTransform * trans);
-GST_BOILERPLATE (GstProgressReport, gst_progress_report, GstBaseTransform,
- GST_TYPE_BASE_TRANSFORM);
-
-static void
-gst_progress_report_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_static_pad_template (element_class,
- &progress_report_sink_template);
- gst_element_class_add_static_pad_template (element_class,
- &progress_report_src_template);
-
- gst_element_class_set_details_simple (element_class, "Progress report",
- "Testing",
- "Periodically query and report on processing progress",
- "Jan Schmidt <thaytan@mad.scientist.com>");
-}
+#define gst_progress_report_parent_class parent_class
+G_DEFINE_TYPE (GstProgressReport, gst_progress_report, GST_TYPE_BASE_TRANSFORM);
static void
gst_progress_report_finalize (GObject * obj)
gst_progress_report_class_init (GstProgressReportClass * g_class)
{
GstBaseTransformClass *gstbasetrans_class;
+ GstElementClass *element_class;
GObjectClass *gobject_class;
gobject_class = G_OBJECT_CLASS (g_class);
+ element_class = GST_ELEMENT_CLASS (g_class);
gstbasetrans_class = GST_BASE_TRANSFORM_CLASS (g_class);
gobject_class->finalize = gst_progress_report_finalize;
"Format to use for the querying", DEFAULT_FORMAT,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
- gstbasetrans_class->event = GST_DEBUG_FUNCPTR (gst_progress_report_event);
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&progress_report_sink_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&progress_report_src_template));
+
+ gst_element_class_set_details_simple (element_class, "Progress report",
+ "Testing",
+ "Periodically query and report on processing progress",
+ "Jan Schmidt <thaytan@mad.scientist.com>");
+
+ gstbasetrans_class->sink_event =
+ GST_DEBUG_FUNCPTR (gst_progress_report_sink_event);
gstbasetrans_class->transform_ip =
GST_DEBUG_FUNCPTR (gst_progress_report_transform_ip);
gstbasetrans_class->start = GST_DEBUG_FUNCPTR (gst_progress_report_start);
}
static void
-gst_progress_report_init (GstProgressReport * report,
- GstProgressReportClass * g_class)
+gst_progress_report_init (GstProgressReport * report)
{
gst_base_transform_set_passthrough (GST_BASE_TRANSFORM (report), TRUE);
if (filter->do_query || !buf) {
GST_LOG_OBJECT (filter, "using upstream query");
- if (!gst_pad_query_peer_position (sink_pad, &format, &cur) ||
- !gst_pad_query_peer_duration (sink_pad, &format, &total)) {
+ if (!gst_pad_peer_query_position (sink_pad, format, &cur) ||
+ !gst_pad_peer_query_duration (sink_pad, format, &total)) {
return FALSE;
}
} else {
GstBaseTransform *base = GST_BASE_TRANSFORM (filter);
GST_LOG_OBJECT (filter, "using buffer metadata");
- if (format == GST_FORMAT_TIME && base->have_newsegment &&
- base->segment.format == GST_FORMAT_TIME) {
+ if (format == GST_FORMAT_TIME && base->segment.format == GST_FORMAT_TIME) {
cur = gst_segment_to_stream_time (&base->segment, format,
GST_BUFFER_TIMESTAMP (buf));
total = base->segment.duration;
GstCaps *caps;
format_name = "bogounits";
- caps = GST_PAD_CAPS (GST_BASE_TRANSFORM (filter)->sinkpad);
- if (caps && gst_caps_is_fixed (caps) && !gst_caps_is_any (caps)) {
- GstStructure *s = gst_caps_get_structure (caps, 0);
- const gchar *mime_type = gst_structure_get_name (s);
-
- if (g_str_has_prefix (mime_type, "video/") ||
- g_str_has_prefix (mime_type, "image/")) {
- format_name = "frames";
- } else if (g_str_has_prefix (mime_type, "audio/")) {
- format_name = "samples";
+ caps = gst_pad_get_current_caps (GST_BASE_TRANSFORM (filter)->sinkpad);
+ if (caps) {
+ if (gst_caps_is_fixed (caps) && !gst_caps_is_any (caps)) {
+ GstStructure *s = gst_caps_get_structure (caps, 0);
+ const gchar *mime_type = gst_structure_get_name (s);
+
+ if (g_str_has_prefix (mime_type, "video/") ||
+ g_str_has_prefix (mime_type, "image/")) {
+ format_name = "frames";
+ } else if (g_str_has_prefix (mime_type, "audio/")) {
+ format_name = "samples";
+ }
}
+ gst_caps_unref (caps);
}
break;
}
}
static gboolean
-gst_progress_report_event (GstBaseTransform * trans, GstEvent * event)
+gst_progress_report_sink_event (GstBaseTransform * trans, GstEvent * event)
{
GstProgressReport *filter;
filter = GST_PROGRESS_REPORT (trans);
- if (GST_EVENT_TYPE (event) == GST_EVENT_EOS) {
- GTimeVal cur_time;
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_EOS:
+ {
+ GTimeVal cur_time;
- g_get_current_time (&cur_time);
- gst_progress_report_report (filter, cur_time, NULL);
+ g_get_current_time (&cur_time);
+ gst_progress_report_report (filter, cur_time, NULL);
+ break;
+ }
+ default:
+ break;
}
- return GST_BASE_TRANSFORM_CLASS (parent_class)->event (trans, event);
+ return GST_BASE_TRANSFORM_CLASS (parent_class)->sink_event (trans, event);
}
static GstFlowReturn
static void gst_rnd_buffer_size_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
-static gboolean gst_rnd_buffer_size_activate (GstPad * pad);
-static gboolean gst_rnd_buffer_size_activate_pull (GstPad * pad,
- gboolean active);
+static gboolean gst_rnd_buffer_size_activate (GstPad * pad, GstObject * parent);
+static gboolean gst_rnd_buffer_size_activate_mode (GstPad * pad,
+ GstObject * parent, GstPadMode mode, gboolean active);
static void gst_rnd_buffer_size_loop (GstRndBufferSize * self);
static GstStateChangeReturn gst_rnd_buffer_size_change_state (GstElement *
element, GstStateChange transition);
-#define DEBUG_INIT(bla) \
- GST_DEBUG_CATEGORY_INIT (gst_rnd_buffer_size_debug, "rndbuffersize", 0, \
- "rndbuffersize element");
-
GType gst_rnd_buffer_size_get_type (void);
-GST_BOILERPLATE_FULL (GstRndBufferSize, gst_rnd_buffer_size, GstElement,
- GST_TYPE_ELEMENT, DEBUG_INIT);
-
-
-static void
-gst_rnd_buffer_size_base_init (gpointer g_class)
-{
- GstElementClass *gstelement_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_static_pad_template (gstelement_class,
- &sink_template);
- gst_element_class_add_static_pad_template (gstelement_class,
- &src_template);
-
- gst_element_class_set_details_simple (gstelement_class, "Random buffer size",
- "Testing", "pull random sized buffers",
- "Stefan Kost <stefan.kost@nokia.com>");
-}
-
+#define gst_rnd_buffer_size_parent_class parent_class
+G_DEFINE_TYPE (GstRndBufferSize, gst_rnd_buffer_size, GST_TYPE_ELEMENT);
static void
gst_rnd_buffer_size_class_init (GstRndBufferSizeClass * klass)
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
+ GST_DEBUG_CATEGORY_INIT (gst_rnd_buffer_size_debug, "rndbuffersize", 0,
+ "rndbuffersize element");
+
gobject_class->set_property = gst_rnd_buffer_size_set_property;
gobject_class->get_property = gst_rnd_buffer_size_get_property;
gobject_class->finalize = gst_rnd_buffer_size_finalize;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&src_template));
+
+ gst_element_class_set_details_simple (gstelement_class, "Random buffer size",
+ "Testing", "pull random sized buffers",
+ "Stefan Kost <stefan.kost@nokia.com>");
+
gstelement_class->change_state =
GST_DEBUG_FUNCPTR (gst_rnd_buffer_size_change_state);
}
static void
-gst_rnd_buffer_size_init (GstRndBufferSize * self,
- GstRndBufferSizeClass * g_class)
+gst_rnd_buffer_size_init (GstRndBufferSize * self)
{
self->sinkpad = gst_pad_new_from_static_template (&sink_template, "sink");
gst_pad_set_activate_function (self->sinkpad,
GST_DEBUG_FUNCPTR (gst_rnd_buffer_size_activate));
- gst_pad_set_activatepull_function (self->sinkpad,
- GST_DEBUG_FUNCPTR (gst_rnd_buffer_size_activate_pull));
+ gst_pad_set_activatemode_function (self->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_rnd_buffer_size_activate_mode));
gst_element_add_pad (GST_ELEMENT (self), self->sinkpad);
self->srcpad = gst_pad_new_from_static_template (&src_template, "src");
static gboolean
-gst_rnd_buffer_size_activate (GstPad * pad)
+gst_rnd_buffer_size_activate (GstPad * pad, GstObject * parent)
{
- if (gst_pad_check_pull_range (pad)) {
- return gst_pad_activate_pull (pad, TRUE);
- } else {
- GST_INFO_OBJECT (pad, "push mode not supported");
+ GstQuery *query;
+ gboolean pull_mode;
+
+ query = gst_query_new_scheduling ();
+
+ if (!gst_pad_peer_query (pad, query)) {
+ gst_query_unref (query);
+ goto no_pull;
+ }
+
+ pull_mode = gst_query_has_scheduling_mode (query, GST_PAD_MODE_PULL);
+ gst_query_unref (query);
+
+ if (!pull_mode)
+ goto no_pull;
+
+ GST_DEBUG_OBJECT (pad, "activating pull");
+ return gst_pad_activate_mode (pad, GST_PAD_MODE_PULL, TRUE);
+
+ /* ERRORS */
+no_pull:
+ {
+ GST_DEBUG_OBJECT (pad, "pull mode not supported");
return FALSE;
}
}
static gboolean
-gst_rnd_buffer_size_activate_pull (GstPad * pad, gboolean active)
+gst_rnd_buffer_size_activate_mode (GstPad * pad, GstObject * parent,
+ GstPadMode mode, gboolean active)
{
- GstRndBufferSize *self = GST_RND_BUFFER_SIZE (GST_OBJECT_PARENT (pad));
-
- if (active) {
- GST_INFO_OBJECT (self, "starting pull");
- return gst_pad_start_task (pad, (GstTaskFunction) gst_rnd_buffer_size_loop,
- self);
- } else {
- GST_INFO_OBJECT (self, "stopping pull");
- return gst_pad_stop_task (pad);
+ gboolean res;
+ GstRndBufferSize *self = GST_RND_BUFFER_SIZE (parent);
+
+ switch (mode) {
+ case GST_PAD_MODE_PULL:
+ if (active) {
+ GST_INFO_OBJECT (self, "starting pull");
+ res =
+ gst_pad_start_task (pad, (GstTaskFunction) gst_rnd_buffer_size_loop,
+ self);
+ } else {
+ GST_INFO_OBJECT (self, "stopping pull");
+ res = gst_pad_stop_task (pad);
+ }
+ break;
+ default:
+ res = FALSE;
+ break;
}
+ return res;
}
{
GstBuffer *buf = NULL;
GstFlowReturn ret;
- guint num_bytes;
+ guint num_bytes, size;
if (G_UNLIKELY (self->min > self->max))
goto bogus_minmax;
if (ret != GST_FLOW_OK)
goto pull_failed;
- if (GST_BUFFER_SIZE (buf) < num_bytes) {
- GST_WARNING_OBJECT (self, "short buffer: %u bytes", GST_BUFFER_SIZE (buf));
+ size = gst_buffer_get_size (buf);
+
+ if (size < num_bytes) {
+ GST_WARNING_OBJECT (self, "short buffer: %u bytes", size);
}
- self->offset += GST_BUFFER_SIZE (buf);
+ self->offset += size;
ret = gst_pad_push (self->srcpad, buf);
pull_failed:
{
- if (ret == GST_FLOW_UNEXPECTED) {
+ if (ret == GST_FLOW_EOS) {
GST_DEBUG_OBJECT (self, "eos");
gst_pad_push_event (self->srcpad, gst_event_new_eos ());
} else {
push_failed:
{
GST_DEBUG_OBJECT (self, "push flow: %s", gst_flow_get_name (ret));
- if (ret == GST_FLOW_UNEXPECTED) {
+ if (ret == GST_FLOW_EOS) {
GST_DEBUG_OBJECT (self, "eos");
gst_pad_push_event (self->srcpad, gst_event_new_eos ());
- } else if (ret < GST_FLOW_UNEXPECTED || ret == GST_FLOW_NOT_LINKED) {
+ } else if (ret < GST_FLOW_EOS || ret == GST_FLOW_NOT_LINKED) {
GST_ELEMENT_ERROR (self, STREAM, FAILED,
("Internal data stream error."),
("streaming stopped, reason: %s", gst_flow_get_name (ret)));
GST_PAD_ALWAYS,
GST_STATIC_CAPS_ANY);
-#define DEBUG_INIT(bla) \
- GST_DEBUG_CATEGORY_INIT (gst_test_debug, "testsink", 0, \
- "debugging category for testsink element");
-
GType gst_test_get_type (void);
-GST_BOILERPLATE_FULL (GstTest, gst_test, GstBaseSink, GST_TYPE_BASE_SINK,
- DEBUG_INIT);
-
-
-static void
-gst_test_base_init (gpointer g_class)
-{
- GstElementClass *gstelement_class = GST_ELEMENT_CLASS (g_class);
+#define gst_test_parent_class parent_class
+G_DEFINE_TYPE (GstTest, gst_test, GST_TYPE_BASE_SINK);
- gst_element_class_add_static_pad_template (gstelement_class,
- &sinktemplate);
-
- gst_element_class_set_details_simple (gstelement_class, "Test plugin",
- "Testing", "perform a number of tests", "Benjamin Otte <otte@gnome>");
-}
static void
gst_test_class_init (GstTestClass * klass)
{
GstBaseSinkClass *basesink_class = GST_BASE_SINK_CLASS (klass);
+ GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
GObjectClass *object_class = G_OBJECT_CLASS (klass);
guint i;
+ GST_DEBUG_CATEGORY_INIT (gst_test_debug, "testsink", 0,
+ "debugging category for testsink element");
+
object_class->set_property = gst_test_set_property;
object_class->get_property = gst_test_get_property;
g_object_class_install_property (object_class, 2 * i + 2, spec);
}
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&sinktemplate));
+
+ gst_element_class_set_details_simple (gstelement_class, "Test plugin",
+ "Testing", "perform a number of tests", "Benjamin Otte <otte@gnome>");
+
basesink_class->render = GST_DEBUG_FUNCPTR (gst_test_render_buffer);
basesink_class->event = GST_DEBUG_FUNCPTR (gst_test_sink_event);
basesink_class->start = GST_DEBUG_FUNCPTR (gst_test_start);
}
static void
-gst_test_init (GstTest * test, GstTestClass * g_class)
+gst_test_init (GstTest * test)
{
GstTestClass *klass;
guint i;
{
GstTestClass *klass = GST_TEST_GET_CLASS (basesink);
GstTest *test = GST_TEST (basesink);
- gboolean ret = FALSE;
switch (GST_EVENT_TYPE (event)) {
/*
}
}
g_object_thaw_notify (G_OBJECT (test));
- ret = TRUE;
break;
}
default:
break;
}
- return ret;
+ return GST_BASE_SINK_CLASS (parent_class)->event (basesink, event);
}
static GstFlowReturn
{
LengthTest *t = test;
- t->value += GST_BUFFER_SIZE (buffer);
+ t->value += gst_buffer_get_size (buffer);
}
static gboolean
static void
md5_add (gpointer checksum, GstBuffer * buffer)
{
- g_checksum_update (checksum, GST_BUFFER_DATA (buffer),
- GST_BUFFER_SIZE (buffer));
+ GstMapInfo map;
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ g_checksum_update (checksum, map.data, map.size);
+ gst_buffer_unmap (buffer, &map);
}
static gboolean
{
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
- gst_element_class_add_static_pad_template (element_class, &src_templ);
- gst_element_class_add_static_pad_template (element_class, &sink_templ);
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&src_templ));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&sink_templ));
gst_element_class_set_details_simple (element_class,
"Deinterlacer",
gstradioac.c gststreak.c gstripple.c
libgsteffectv_la_CFLAGS = \
$(GST_PLUGINS_BASE_CFLAGS) \
- $(GST_CONTROLLER_CFLAGS) \
$(GST_BASE_CFLAGS) \
$(GST_CFLAGS) \
-I$(top_srcdir)/gst/videofilter
libgsteffectv_la_LIBADD = \
$(GST_PLUGINS_BASE_LIBS) -lgstvideo-@GST_MAJORMINOR@ \
- $(GST_CONTROLLER_LIBS) \
$(GST_BASE_LIBS) \
$(GST_LIBS) \
$(LIBM)
* <refsect2>
* <title>Example launch line</title>
* |[
- * gst-launch -v videotestsrc ! agingtv ! ffmpegcolorspace ! autovideosink
+ * gst-launch -v videotestsrc ! agingtv ! videoconvert ! autovideosink
* ]| This pipeline shows the effect of agingtv on a test stream.
* </refsect2>
*/
#include "gstaging.h"
#include "gsteffectv.h"
-#include <gst/video/video.h>
-#include <gst/controller/gstcontroller.h>
-
static const gint dx[8] = { 1, 1, 0, -1, -1, -1, 0, 1 };
static const gint dy[8] = { 0, -1, -1, -1, 0, 1, 1, 1 };
#define DEFAULT_DUSTS TRUE
#if G_BYTE_ORDER == G_LITTLE_ENDIAN
-#define CAPS_STR GST_VIDEO_CAPS_BGRx ";" GST_VIDEO_CAPS_RGBx
+#define CAPS_STR GST_VIDEO_CAPS_MAKE ("{ BGRx, RGBx }")
#else
-#define CAPS_STR GST_VIDEO_CAPS_xRGB ";" GST_VIDEO_CAPS_xBGR
+#define CAPS_STR GST_VIDEO_CAPS_MAKE ("{ xRGB, xBGR }")
#endif
static GstStaticPadTemplate gst_agingtv_src_template =
GST_STATIC_CAPS (CAPS_STR)
);
-GST_BOILERPLATE (GstAgingTV, gst_agingtv, GstVideoFilter,
- GST_TYPE_VIDEO_FILTER);
-
-static gboolean
-gst_agingtv_set_caps (GstBaseTransform * btrans, GstCaps * incaps,
- GstCaps * outcaps)
-{
- GstAgingTV *filter = GST_AGINGTV (btrans);
- GstStructure *structure;
- gboolean ret = FALSE;
-
- structure = gst_caps_get_structure (incaps, 0);
-
- GST_OBJECT_LOCK (filter);
- if (gst_structure_get_int (structure, "width", &filter->width) &&
- gst_structure_get_int (structure, "height", &filter->height)) {
- ret = TRUE;
- }
- GST_OBJECT_UNLOCK (filter);
-
- return ret;
-}
+G_DEFINE_TYPE (GstAgingTV, gst_agingtv, GST_TYPE_VIDEO_FILTER);
static void
coloraging (guint32 * src, guint32 * dest, gint video_area, gint * c)
}
static GstFlowReturn
-gst_agingtv_transform (GstBaseTransform * trans, GstBuffer * in,
- GstBuffer * out)
+gst_agingtv_transform_frame (GstVideoFilter * filter, GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame)
{
- GstAgingTV *agingtv = GST_AGINGTV (trans);
- gint width, height, video_size;
- guint32 *src = (guint32 *) GST_BUFFER_DATA (in);
- guint32 *dest = (guint32 *) GST_BUFFER_DATA (out);
+ GstAgingTV *agingtv = GST_AGINGTV (filter);
gint area_scale;
- GstFlowReturn ret = GST_FLOW_OK;
GstClockTime timestamp, stream_time;
+ gint width, height, stride, video_size;
+ guint32 *src, *dest;
- timestamp = GST_BUFFER_TIMESTAMP (in);
+ timestamp = GST_BUFFER_TIMESTAMP (in_frame->buffer);
stream_time =
- gst_segment_to_stream_time (&trans->segment, GST_FORMAT_TIME, timestamp);
+ gst_segment_to_stream_time (&GST_BASE_TRANSFORM (filter)->segment,
+ GST_FORMAT_TIME, timestamp);
GST_DEBUG_OBJECT (agingtv, "sync to %" GST_TIME_FORMAT,
GST_TIME_ARGS (timestamp));
if (GST_CLOCK_TIME_IS_VALID (stream_time))
- gst_object_sync_values (G_OBJECT (agingtv), stream_time);
+ gst_object_sync_values (GST_OBJECT (agingtv), stream_time);
- GST_OBJECT_LOCK (agingtv);
- width = agingtv->width;
- height = agingtv->height;
- video_size = width * height;
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (in_frame);
+ stride = GST_VIDEO_FRAME_PLANE_STRIDE (in_frame, 0);
+ video_size = stride * height;
+
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
area_scale = width * height / 64 / 480;
if (area_scale <= 0)
if (agingtv->color_aging)
coloraging (src, dest, video_size, &agingtv->coloraging_state);
else
- memcpy (dest, src, GST_BUFFER_SIZE (in));
+ memcpy (dest, src, video_size);
scratching (agingtv->scratches, agingtv->scratch_lines, dest, width, height);
if (agingtv->pits)
pits (dest, width, height, area_scale, &agingtv->pits_interval);
if (area_scale > 1 && agingtv->dusts)
dusts (dest, width, height, &agingtv->dust_interval, area_scale);
- GST_OBJECT_UNLOCK (agingtv);
-
- return ret;
-}
-
-static void
-gst_agingtv_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_set_details_simple (element_class, "AgingTV effect",
- "Filter/Effect/Video",
- "AgingTV adds age to video input using scratches and dust",
- "Sam Lantinga <slouken@devolution.com>");
- gst_element_class_add_static_pad_template (element_class,
- &gst_agingtv_sink_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_agingtv_src_template);
+ return GST_FLOW_OK;
}
static void
gst_agingtv_class_init (GstAgingTVClass * klass)
{
GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass;
+ GstVideoFilterClass *vfilter_class = (GstVideoFilterClass *) klass;
gobject_class->set_property = gst_agingtv_set_property;
gobject_class->get_property = gst_agingtv_get_property;
"Dusts", DEFAULT_DUSTS,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
- trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_agingtv_set_caps);
- trans_class->transform = GST_DEBUG_FUNCPTR (gst_agingtv_transform);
+ gst_element_class_set_details_simple (gstelement_class, "AgingTV effect",
+ "Filter/Effect/Video",
+ "AgingTV adds age to video input using scratches and dust",
+ "Sam Lantinga <slouken@devolution.com>");
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_agingtv_sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_agingtv_src_template));
+
trans_class->start = GST_DEBUG_FUNCPTR (gst_agingtv_start);
+
+ vfilter_class->transform_frame =
+ GST_DEBUG_FUNCPTR (gst_agingtv_transform_frame);
}
static void
-gst_agingtv_init (GstAgingTV * agingtv, GstAgingTVClass * klass)
+gst_agingtv_init (GstAgingTV * agingtv)
{
agingtv->scratch_lines = DEFAULT_SCRATCH_LINES;
agingtv->color_aging = DEFAULT_COLOR_AGING;
#include <gst/gst.h>
+#include <gst/video/video.h>
#include <gst/video/gstvideofilter.h>
G_BEGIN_DECLS
GstVideoFilter videofilter;
/* < private > */
-
- gint width, height;
-
gboolean color_aging;
gboolean pits;
gboolean dusts;
* <refsect2>
* <title>Example launch line</title>
* |[
- * gst-launch -v videotestsrc ! dicetv ! ffmpegcolorspace ! autovideosink
+ * gst-launch -v videotestsrc ! dicetv ! videoconvert ! autovideosink
* ]| This pipeline shows the effect of dicetv on a test stream.
* </refsect2>
*/
#include "gstdice.h"
#include "gsteffectv.h"
-#include <gst/video/video.h>
-#include <gst/controller/gstcontroller.h>
-
#define DEFAULT_CUBE_BITS 4
#define MAX_CUBE_BITS 5
#define MIN_CUBE_BITS 0
DICE_LEFT = 3
} DiceDir;
-GST_BOILERPLATE (GstDiceTV, gst_dicetv, GstVideoFilter, GST_TYPE_VIDEO_FILTER);
+#define gst_dicetv_parent_class parent_class
+G_DEFINE_TYPE (GstDiceTV, gst_dicetv, GST_TYPE_VIDEO_FILTER);
-static void gst_dicetv_create_map (GstDiceTV * filter);
+static void gst_dicetv_create_map (GstDiceTV * filter, GstVideoInfo * info);
static GstStaticPadTemplate gst_dicetv_src_template =
- GST_STATIC_PAD_TEMPLATE ("src",
+GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS (GST_VIDEO_CAPS_RGBx ";" GST_VIDEO_CAPS_xRGB ";"
- GST_VIDEO_CAPS_BGRx ";" GST_VIDEO_CAPS_xBGR)
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ RGBx, xRGB, BGRx, xBGR }"))
);
static GstStaticPadTemplate gst_dicetv_sink_template =
- GST_STATIC_PAD_TEMPLATE ("sink",
+GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS (GST_VIDEO_CAPS_RGBx ";" GST_VIDEO_CAPS_xRGB ";"
- GST_VIDEO_CAPS_BGRx ";" GST_VIDEO_CAPS_xBGR)
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ RGBx, xRGB, BGRx, xBGR }"))
);
enum
};
static gboolean
-gst_dicetv_set_caps (GstBaseTransform * btrans, GstCaps * incaps,
- GstCaps * outcaps)
+gst_dicetv_set_info (GstVideoFilter * vfilter, GstCaps * incaps,
+ GstVideoInfo * in_info, GstCaps * outcaps, GstVideoInfo * out_info)
{
- GstDiceTV *filter = GST_DICETV (btrans);
- GstStructure *structure;
- gboolean ret = FALSE;
-
- structure = gst_caps_get_structure (incaps, 0);
+ GstDiceTV *filter = GST_DICETV (vfilter);
- GST_OBJECT_LOCK (filter);
- if (gst_structure_get_int (structure, "width", &filter->width) &&
- gst_structure_get_int (structure, "height", &filter->height)) {
- g_free (filter->dicemap);
- filter->dicemap = (guint8 *) g_malloc (filter->height * filter->width);
- gst_dicetv_create_map (filter);
- ret = TRUE;
- }
- GST_OBJECT_UNLOCK (filter);
+ g_free (filter->dicemap);
+ filter->dicemap =
+ (guint8 *) g_malloc (GST_VIDEO_INFO_WIDTH (in_info) *
+ GST_VIDEO_INFO_WIDTH (in_info));
+ gst_dicetv_create_map (filter, in_info);
- return ret;
+ return TRUE;
}
static GstFlowReturn
-gst_dicetv_transform (GstBaseTransform * trans, GstBuffer * in, GstBuffer * out)
+gst_dicetv_transform_frame (GstVideoFilter * vfilter, GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame)
{
- GstDiceTV *filter = GST_DICETV (trans);
+ GstDiceTV *filter = GST_DICETV (vfilter);
guint32 *src, *dest;
gint i, map_x, map_y, map_i, base, dx, dy, di;
- gint video_width, g_cube_bits, g_cube_size;
+ gint video_stride, g_cube_bits, g_cube_size;
gint g_map_height, g_map_width;
- GstFlowReturn ret = GST_FLOW_OK;
GstClockTime timestamp, stream_time;
const guint8 *dicemap;
- src = (guint32 *) GST_BUFFER_DATA (in);
- dest = (guint32 *) GST_BUFFER_DATA (out);
-
- timestamp = GST_BUFFER_TIMESTAMP (in);
+ timestamp = GST_BUFFER_TIMESTAMP (in_frame->buffer);
stream_time =
- gst_segment_to_stream_time (&trans->segment, GST_FORMAT_TIME, timestamp);
+ gst_segment_to_stream_time (&GST_BASE_TRANSFORM (vfilter)->segment,
+ GST_FORMAT_TIME, timestamp);
GST_DEBUG_OBJECT (filter, "sync to %" GST_TIME_FORMAT,
GST_TIME_ARGS (timestamp));
if (GST_CLOCK_TIME_IS_VALID (stream_time))
- gst_object_sync_values (G_OBJECT (filter), stream_time);
+ gst_object_sync_values (GST_OBJECT (filter), stream_time);
+
+ src = (guint32 *) GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = (guint32 *) GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+ video_stride = GST_VIDEO_FRAME_PLANE_STRIDE (in_frame, 0);
GST_OBJECT_LOCK (filter);
- video_width = filter->width;
g_cube_bits = filter->g_cube_bits;
g_cube_size = filter->g_cube_size;
g_map_height = filter->g_map_height;
map_i = 0;
for (map_y = 0; map_y < g_map_height; map_y++) {
for (map_x = 0; map_x < g_map_width; map_x++) {
- base = (map_y << g_cube_bits) * video_width + (map_x << g_cube_bits);
+ base = (map_y << g_cube_bits) * video_stride + (map_x << g_cube_bits);
switch (dicemap[map_i]) {
case DICE_UP:
for (dy = 0; dy < g_cube_size; dy++) {
- i = base + dy * video_width;
+ i = base + dy * video_stride;
for (dx = 0; dx < g_cube_size; dx++) {
dest[i] = src[i];
i++;
break;
case DICE_LEFT:
for (dy = 0; dy < g_cube_size; dy++) {
- i = base + dy * video_width;
+ i = base + dy * video_stride;
for (dx = 0; dx < g_cube_size; dx++) {
- di = base + (dx * video_width) + (g_cube_size - dy - 1);
+ di = base + (dx * video_stride) + (g_cube_size - dy - 1);
dest[di] = src[i];
i++;
}
break;
case DICE_DOWN:
for (dy = 0; dy < g_cube_size; dy++) {
- di = base + dy * video_width;
- i = base + (g_cube_size - dy - 1) * video_width + g_cube_size;
+ di = base + dy * video_stride;
+ i = base + (g_cube_size - dy - 1) * video_stride + g_cube_size;
for (dx = 0; dx < g_cube_size; dx++) {
i--;
dest[di] = src[i];
break;
case DICE_RIGHT:
for (dy = 0; dy < g_cube_size; dy++) {
- i = base + (dy * video_width);
+ i = base + (dy * video_stride);
for (dx = 0; dx < g_cube_size; dx++) {
- di = base + dy + (g_cube_size - dx - 1) * video_width;
+ di = base + dy + (g_cube_size - dx - 1) * video_stride;
dest[di] = src[i];
i++;
}
}
GST_OBJECT_UNLOCK (filter);
- return ret;
+ return GST_FLOW_OK;
}
static void
-gst_dicetv_create_map (GstDiceTV * filter)
+gst_dicetv_create_map (GstDiceTV * filter, GstVideoInfo * info)
{
gint x, y, i;
+ gint width, height;
+
+ width = GST_VIDEO_INFO_WIDTH (info);
+ height = GST_VIDEO_INFO_HEIGHT (info);
- if (filter->height <= 0 || filter->width <= 0)
+ if (width <= 0 || height <= 0)
return;
- filter->g_map_height = filter->height >> filter->g_cube_bits;
- filter->g_map_width = filter->width >> filter->g_cube_bits;
+ filter->g_map_height = height >> filter->g_cube_bits;
+ filter->g_map_width = width >> filter->g_cube_bits;
filter->g_cube_size = 1 << filter->g_cube_bits;
i = 0;
case PROP_CUBE_BITS:
GST_OBJECT_LOCK (filter);
filter->g_cube_bits = g_value_get_int (value);
- gst_dicetv_create_map (filter);
+ gst_dicetv_create_map (filter, &GST_VIDEO_FILTER (filter)->in_info);
GST_OBJECT_UNLOCK (filter);
break;
default:
}
static void
-gst_dicetv_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_set_details_simple (element_class, "DiceTV effect",
- "Filter/Effect/Video",
- "'Dices' the screen up into many small squares",
- "Wim Taymans <wim.taymans@chello.be>");
-
- gst_element_class_add_static_pad_template (element_class,
- &gst_dicetv_sink_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_dicetv_src_template);
-}
-
-static void
gst_dicetv_class_init (GstDiceTVClass * klass)
{
GObjectClass *gobject_class = (GObjectClass *) klass;
- GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
+ GstVideoFilterClass *vfilter_class = (GstVideoFilterClass *) klass;
gobject_class->set_property = gst_dicetv_set_property;
gobject_class->get_property = gst_dicetv_get_property;
MIN_CUBE_BITS, MAX_CUBE_BITS, DEFAULT_CUBE_BITS,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
- trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_dicetv_set_caps);
- trans_class->transform = GST_DEBUG_FUNCPTR (gst_dicetv_transform);
+ gst_element_class_set_details_simple (gstelement_class, "DiceTV effect",
+ "Filter/Effect/Video",
+ "'Dices' the screen up into many small squares",
+ "Wim Taymans <wim.taymans@gmail.be>");
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_dicetv_sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_dicetv_src_template));
+
+ vfilter_class->set_info = GST_DEBUG_FUNCPTR (gst_dicetv_set_info);
+ vfilter_class->transform_frame =
+ GST_DEBUG_FUNCPTR (gst_dicetv_transform_frame);
}
static void
-gst_dicetv_init (GstDiceTV * filter, GstDiceTVClass * klass)
+gst_dicetv_init (GstDiceTV * filter)
{
filter->dicemap = NULL;
filter->g_cube_bits = DEFAULT_CUBE_BITS;
#include <gst/gst.h>
+#include <gst/video/video.h>
#include <gst/video/gstvideofilter.h>
G_BEGIN_DECLS
GstVideoFilter videofilter;
/* < private > */
-
- gint width, height;
guint8 *dicemap;
gint g_cube_bits;
* <refsect2>
* <title>Example launch line</title>
* |[
- * gst-launch -v videotestsrc ! edgetv ! ffmpegcolorspace ! autovideosink
+ * gst-launch -v videotestsrc ! edgetv ! videoconvert ! autovideosink
* ]| This pipeline shows the effect of edgetv on a test stream.
* </refsect2>
*/
#include "gstedge.h"
-#include <gst/video/video.h>
-
-GST_BOILERPLATE (GstEdgeTV, gst_edgetv, GstVideoFilter, GST_TYPE_VIDEO_FILTER);
+#define gst_edgetv_parent_class parent_class
+G_DEFINE_TYPE (GstEdgeTV, gst_edgetv, GST_TYPE_VIDEO_FILTER);
#if G_BYTE_ORDER == G_LITTLE_ENDIAN
-#define CAPS_STR GST_VIDEO_CAPS_BGRx ";" GST_VIDEO_CAPS_RGBx
+#define CAPS_STR GST_VIDEO_CAPS_MAKE ("{ BGRx, RGBx }")
#else
-#define CAPS_STR GST_VIDEO_CAPS_xBGR ";" GST_VIDEO_CAPS_xRGB
+#define CAPS_STR GST_VIDEO_CAPS_MAKE ("{ xBGR, xRGB }")
#endif
static GstStaticPadTemplate gst_edgetv_src_template =
);
static gboolean
-gst_edgetv_set_caps (GstBaseTransform * btrans, GstCaps * incaps,
- GstCaps * outcaps)
+gst_edgetv_set_info (GstVideoFilter * filter, GstCaps * incaps,
+ GstVideoInfo * in_info, GstCaps * outcaps, GstVideoInfo * out_info)
{
- GstEdgeTV *edgetv = GST_EDGETV (btrans);
- GstStructure *structure;
- gboolean ret = FALSE;
-
- structure = gst_caps_get_structure (incaps, 0);
+ GstEdgeTV *edgetv = GST_EDGETV (filter);
+ guint map_size;
+ gint width, height;
- GST_OBJECT_LOCK (edgetv);
- if (gst_structure_get_int (structure, "width", &edgetv->width) &&
- gst_structure_get_int (structure, "height", &edgetv->height)) {
- guint map_size;
+ width = GST_VIDEO_INFO_WIDTH (in_info);
+ height = GST_VIDEO_INFO_HEIGHT (in_info);
- edgetv->map_width = edgetv->width / 4;
- edgetv->map_height = edgetv->height / 4;
- edgetv->video_width_margin = edgetv->width % 4;
+ edgetv->map_width = width / 4;
+ edgetv->map_height = height / 4;
+ edgetv->video_width_margin = width % 4;
- map_size = edgetv->map_width * edgetv->map_height * sizeof (guint32) * 2;
+ map_size = edgetv->map_width * edgetv->map_height * sizeof (guint32) * 2;
- g_free (edgetv->map);
- edgetv->map = (guint32 *) g_malloc0 (map_size);
- ret = TRUE;
- }
- GST_OBJECT_UNLOCK (edgetv);
+ g_free (edgetv->map);
+ edgetv->map = (guint32 *) g_malloc0 (map_size);
- return ret;
+ return TRUE;
}
static GstFlowReturn
-gst_edgetv_transform (GstBaseTransform * trans, GstBuffer * in, GstBuffer * out)
+gst_edgetv_transform_frame (GstVideoFilter * vfilter, GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame)
{
- GstEdgeTV *filter = GST_EDGETV (trans);
+ GstEdgeTV *filter = GST_EDGETV (vfilter);
gint x, y, r, g, b;
guint32 *src, *dest;
guint32 p, q;
guint32 *map;
GstFlowReturn ret = GST_FLOW_OK;
- src = (guint32 *) GST_BUFFER_DATA (in);
- dest = (guint32 *) GST_BUFFER_DATA (out);
-
- GST_OBJECT_LOCK (filter);
map = filter->map;
- width = filter->width;
map_height = filter->map_height;
map_width = filter->map_width;
video_width_margin = filter->video_width_margin;
+
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+
src += width * 4 + 4;
dest += width * 4 + 4;
src += width * 3 + 8 + video_width_margin;
dest += width * 3 + 8 + video_width_margin;
}
- GST_OBJECT_UNLOCK (filter);
return ret;
}
}
static void
-gst_edgetv_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_set_details_simple (element_class, "EdgeTV effect",
- "Filter/Effect/Video",
- "Apply edge detect on video", "Wim Taymans <wim.taymans@chello.be>");
-
- gst_element_class_add_static_pad_template (element_class,
- &gst_edgetv_sink_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_edgetv_src_template);
-}
-
-static void
gst_edgetv_class_init (GstEdgeTVClass * klass)
{
- GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass;
GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
+ GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass;
+ GstVideoFilterClass *vfilter_class = (GstVideoFilterClass *) klass;
gobject_class->finalize = gst_edgetv_finalize;
- trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_edgetv_set_caps);
- trans_class->transform = GST_DEBUG_FUNCPTR (gst_edgetv_transform);
+ gst_element_class_set_details_simple (gstelement_class, "EdgeTV effect",
+ "Filter/Effect/Video",
+ "Apply edge detect on video", "Wim Taymans <wim.taymans@chello.be>");
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_edgetv_sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_edgetv_src_template));
+
trans_class->start = GST_DEBUG_FUNCPTR (gst_edgetv_start);
+
+ vfilter_class->set_info = GST_DEBUG_FUNCPTR (gst_edgetv_set_info);
+ vfilter_class->transform_frame =
+ GST_DEBUG_FUNCPTR (gst_edgetv_transform_frame);
}
static void
-gst_edgetv_init (GstEdgeTV * edgetv, GstEdgeTVClass * klass)
+gst_edgetv_init (GstEdgeTV * edgetv)
{
}
#include <gst/gst.h>
+#include <gst/video/video.h>
#include <gst/video/gstvideofilter.h>
G_BEGIN_DECLS
GstVideoFilter videofilter;
/* < private > */
-
- gint width, height;
gint map_width, map_height;
guint32 *map;
gint video_width_margin;
* <refsect2>
* <title>Example launch line</title>
* |[
- * gst-launch -v videotestsrc ! optv ! ffmpegcolorspace ! autovideosink
+ * gst-launch -v videotestsrc ! optv ! videoconvert ! autovideosink
* ]| This pipeline shows the effect of optv on a test stream.
* </refsect2>
*/
#include "gsteffectv.h"
#include <gst/video/video.h>
-#include <gst/controller/gstcontroller.h>
enum
{
static guint32 palette[256];
-GST_BOILERPLATE (GstOpTV, gst_optv, GstVideoFilter, GST_TYPE_VIDEO_FILTER);
+#define gst_optv_parent_class parent_class
+G_DEFINE_TYPE (GstOpTV, gst_optv, GST_TYPE_VIDEO_FILTER);
#if G_BYTE_ORDER == G_LITTLE_ENDIAN
-#define CAPS_STR GST_VIDEO_CAPS_BGRx ";" GST_VIDEO_CAPS_RGBx
+#define CAPS_STR GST_VIDEO_CAPS_MAKE ("{ BGRx, RGBx }")
#else
-#define CAPS_STR GST_VIDEO_CAPS_xBGR ";" GST_VIDEO_CAPS_xRGB
+#define CAPS_STR GST_VIDEO_CAPS_MAKE ("{ xBGR, xRGB }")
#endif
static GstStaticPadTemplate gst_optv_src_template =
}
static GstFlowReturn
-gst_optv_transform (GstBaseTransform * trans, GstBuffer * in, GstBuffer * out)
+gst_optv_transform_frame (GstVideoFilter * vfilter, GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame)
{
- GstOpTV *filter = GST_OPTV (trans);
+ GstOpTV *filter = GST_OPTV (vfilter);
guint32 *src, *dest;
- GstFlowReturn ret = GST_FLOW_OK;
gint8 *p;
guint8 *diff;
gint x, y, width, height;
GstClockTime timestamp, stream_time;
guint8 phase;
- timestamp = GST_BUFFER_TIMESTAMP (in);
+ timestamp = GST_BUFFER_TIMESTAMP (in_frame->buffer);
stream_time =
- gst_segment_to_stream_time (&trans->segment, GST_FORMAT_TIME, timestamp);
+ gst_segment_to_stream_time (&GST_BASE_TRANSFORM (vfilter)->segment,
+ GST_FORMAT_TIME, timestamp);
GST_DEBUG_OBJECT (filter, "sync to %" GST_TIME_FORMAT,
GST_TIME_ARGS (timestamp));
if (GST_CLOCK_TIME_IS_VALID (stream_time))
- gst_object_sync_values (G_OBJECT (filter), stream_time);
-
- src = (guint32 *) GST_BUFFER_DATA (in);
- dest = (guint32 *) GST_BUFFER_DATA (out);
+ gst_object_sync_values (GST_OBJECT (filter), stream_time);
if (G_UNLIKELY (filter->opmap[0] == NULL))
return GST_FLOW_NOT_NEGOTIATED;
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (in_frame);
+
GST_OBJECT_LOCK (filter);
switch (filter->mode) {
default:
filter->phase -= filter->speed;
diff = filter->diff;
- image_y_over (src, diff, filter->threshold, filter->width * filter->height);
- height = filter->height;
- width = filter->width;
+ image_y_over (src, diff, filter->threshold, width * height);
phase = filter->phase;
for (y = 0; y < height; y++) {
}
GST_OBJECT_UNLOCK (filter);
- return ret;
+ return GST_FLOW_OK;
}
static gboolean
-gst_optv_set_caps (GstBaseTransform * btrans, GstCaps * incaps,
- GstCaps * outcaps)
+gst_optv_set_info (GstVideoFilter * vfilter, GstCaps * incaps,
+ GstVideoInfo * in_info, GstCaps * outcaps, GstVideoInfo * out_info)
{
- GstOpTV *filter = GST_OPTV (btrans);
- GstStructure *structure;
- gboolean ret = FALSE;
-
- structure = gst_caps_get_structure (incaps, 0);
-
- GST_OBJECT_LOCK (filter);
- if (gst_structure_get_int (structure, "width", &filter->width) &&
- gst_structure_get_int (structure, "height", &filter->height)) {
- gint i;
-
- for (i = 0; i < 4; i++) {
- if (filter->opmap[i])
- g_free (filter->opmap[i]);
- filter->opmap[i] = g_new (gint8, filter->width * filter->height);
- }
- setOpmap (filter->opmap, filter->width, filter->height);
+ GstOpTV *filter = GST_OPTV (vfilter);
+ gint i, width, height;
- if (filter->diff)
- g_free (filter->diff);
- filter->diff = g_new (guint8, filter->width * filter->height);
+ width = GST_VIDEO_INFO_WIDTH (in_info);
+ height = GST_VIDEO_INFO_HEIGHT (in_info);
- ret = TRUE;
+ for (i = 0; i < 4; i++) {
+ if (filter->opmap[i])
+ g_free (filter->opmap[i]);
+ filter->opmap[i] = g_new (gint8, width * height);
}
- GST_OBJECT_UNLOCK (filter);
+ setOpmap (filter->opmap, width, height);
- return ret;
+ if (filter->diff)
+ g_free (filter->diff);
+ filter->diff = g_new (guint8, width * height);
+
+ return TRUE;
}
static gboolean
}
static void
-gst_optv_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_set_details_simple (element_class, "OpTV effect",
- "Filter/Effect/Video",
- "Optical art meets real-time video effect",
- "FUKUCHI, Kentarou <fukuchi@users.sourceforge.net>, "
- "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
-
- gst_element_class_add_static_pad_template (element_class,
- &gst_optv_sink_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_optv_src_template);
-}
-
-static void
gst_optv_class_init (GstOpTVClass * klass)
{
GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass;
+ GstVideoFilterClass *vfilter_class = (GstVideoFilterClass *) klass;
gobject_class->set_property = gst_optv_set_property;
gobject_class->get_property = gst_optv_get_property;
"Luma threshold", 0, G_MAXINT, DEFAULT_THRESHOLD,
GST_PARAM_CONTROLLABLE | G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
- trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_optv_set_caps);
- trans_class->transform = GST_DEBUG_FUNCPTR (gst_optv_transform);
+ gst_element_class_set_details_simple (gstelement_class, "OpTV effect",
+ "Filter/Effect/Video",
+ "Optical art meets real-time video effect",
+ "FUKUCHI, Kentarou <fukuchi@users.sourceforge.net>, "
+ "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_optv_sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_optv_src_template));
+
trans_class->start = GST_DEBUG_FUNCPTR (gst_optv_start);
+ vfilter_class->set_info = GST_DEBUG_FUNCPTR (gst_optv_set_info);
+ vfilter_class->transform_frame = GST_DEBUG_FUNCPTR (gst_optv_transform_frame);
+
initPalette ();
}
static void
-gst_optv_init (GstOpTV * filter, GstOpTVClass * klass)
+gst_optv_init (GstOpTV * filter)
{
filter->speed = DEFAULT_SPEED;
filter->mode = DEFAULT_MODE;
#include <gst/gst.h>
+#include <gst/video/video.h>
#include <gst/video/gstvideofilter.h>
G_BEGIN_DECLS
GstVideoFilter element;
/* < private > */
- gint width, height;
-
gint mode;
gint speed;
guint threshold;
* <refsect2>
* <title>Example launch line</title>
* |[
- * gst-launch -v videotestsrc ! quarktv ! ffmpegcolorspace ! autovideosink
+ * gst-launch -v videotestsrc ! quarktv ! videoconvert ! autovideosink
* ]| This pipeline shows the effect of quarktv on a test stream.
* </refsect2>
*/
#include "gstquark.h"
#include "gsteffectv.h"
-#include <gst/controller/gstcontroller.h>
-#include <gst/video/video.h>
-
/* number of frames of time-buffer. It should be as a configurable paramater */
/* This number also must be 2^n just for the speed. */
#define PLANES 16
PROP_PLANES
};
-GST_BOILERPLATE (GstQuarkTV, gst_quarktv, GstVideoFilter,
- GST_TYPE_VIDEO_FILTER);
+#define gst_quarktv_parent_class parent_class
+G_DEFINE_TYPE (GstQuarkTV, gst_quarktv, GST_TYPE_VIDEO_FILTER);
static void gst_quarktv_planetable_clear (GstQuarkTV * filter);
static GstStaticPadTemplate gst_quarktv_src_template =
- GST_STATIC_PAD_TEMPLATE ("src",
+GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS (GST_VIDEO_CAPS_xRGB ";" GST_VIDEO_CAPS_xBGR ";"
- GST_VIDEO_CAPS_BGRx "; " GST_VIDEO_CAPS_RGBx)
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ xRGB, xBGR, BGRx, RGBx }"))
);
static GstStaticPadTemplate gst_quarktv_sink_template =
- GST_STATIC_PAD_TEMPLATE ("sink",
+GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS (GST_VIDEO_CAPS_xRGB ";" GST_VIDEO_CAPS_xBGR ";"
- GST_VIDEO_CAPS_BGRx "; " GST_VIDEO_CAPS_RGBx)
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ xRGB, xBGR, BGRx, RGBx }"))
);
static gboolean
-gst_quarktv_set_caps (GstBaseTransform * btrans, GstCaps * incaps,
- GstCaps * outcaps)
+gst_quarktv_set_info (GstVideoFilter * vfilter, GstCaps * incaps,
+ GstVideoInfo * in_info, GstCaps * outcaps, GstVideoInfo * out_info)
{
- GstQuarkTV *filter = GST_QUARKTV (btrans);
- GstStructure *structure;
- gboolean ret = FALSE;
+ GstQuarkTV *filter = GST_QUARKTV (vfilter);
+ gint width, height;
- structure = gst_caps_get_structure (incaps, 0);
+ width = GST_VIDEO_INFO_WIDTH (in_info);
+ height = GST_VIDEO_INFO_HEIGHT (in_info);
- GST_OBJECT_LOCK (filter);
- if (gst_structure_get_int (structure, "width", &filter->width) &&
- gst_structure_get_int (structure, "height", &filter->height)) {
- gst_quarktv_planetable_clear (filter);
- filter->area = filter->width * filter->height;
- ret = TRUE;
- }
- GST_OBJECT_UNLOCK (filter);
+ gst_quarktv_planetable_clear (filter);
+ filter->area = width * height;
- return ret;
+ return TRUE;
}
static GstFlowReturn
-gst_quarktv_transform (GstBaseTransform * trans, GstBuffer * in,
- GstBuffer * out)
+gst_quarktv_transform_frame (GstVideoFilter * vfilter, GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame)
{
- GstQuarkTV *filter = GST_QUARKTV (trans);
+ GstQuarkTV *filter = GST_QUARKTV (vfilter);
gint area;
guint32 *src, *dest;
- GstFlowReturn ret = GST_FLOW_OK;
GstClockTime timestamp;
GstBuffer **planetable;
gint planes, current_plane;
- timestamp = GST_BUFFER_TIMESTAMP (in);
+ timestamp = GST_BUFFER_TIMESTAMP (in_frame->buffer);
timestamp =
- gst_segment_to_stream_time (&trans->segment, GST_FORMAT_TIME, timestamp);
+ gst_segment_to_stream_time (&GST_BASE_TRANSFORM (vfilter)->segment,
+ GST_FORMAT_TIME, timestamp);
GST_DEBUG_OBJECT (filter, "sync to %" GST_TIME_FORMAT,
GST_TIME_ARGS (timestamp));
if (GST_CLOCK_TIME_IS_VALID (timestamp))
- gst_object_sync_values (G_OBJECT (filter), timestamp);
+ gst_object_sync_values (GST_OBJECT (filter), timestamp);
if (G_UNLIKELY (filter->planetable == NULL))
- return GST_FLOW_WRONG_STATE;
+ return GST_FLOW_FLUSHING;
+
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
GST_OBJECT_LOCK (filter);
area = filter->area;
- src = (guint32 *) GST_BUFFER_DATA (in);
- dest = (guint32 *) GST_BUFFER_DATA (out);
planetable = filter->planetable;
planes = filter->planes;
current_plane = filter->current_plane;
if (planetable[current_plane])
gst_buffer_unref (planetable[current_plane]);
- planetable[current_plane] = gst_buffer_ref (in);
+ planetable[current_plane] = gst_buffer_ref (in_frame->buffer);
/* For each pixel */
while (--area) {
/* pick a random buffer */
rand = planetable[(current_plane + (fastrand () >> 24)) % planes];
- /* Copy the pixel from the random buffer to dest */
- dest[area] =
- (rand ? ((guint32 *) GST_BUFFER_DATA (rand))[area] : src[area]);
+ /* Copy the pixel from the random buffer to dest, FIXME, slow */
+ if (rand)
+ gst_buffer_extract (rand, area * 4, &dest[area], 4);
+ else
+ dest[area] = src[area];
}
filter->current_plane--;
filter->current_plane = planes - 1;
GST_OBJECT_UNLOCK (filter);
- return ret;
+ return GST_FLOW_OK;
}
static void
}
static void
-gst_quarktv_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_set_details_simple (element_class, "QuarkTV effect",
- "Filter/Effect/Video",
- "Motion dissolver", "FUKUCHI, Kentarou <fukuchi@users.sourceforge.net>");
-
- gst_element_class_add_static_pad_template (element_class,
- &gst_quarktv_sink_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_quarktv_src_template);
-}
-
-static void
gst_quarktv_class_init (GstQuarkTVClass * klass)
{
GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass;
+ GstVideoFilterClass *vfilter_class = (GstVideoFilterClass *) klass;
gobject_class->set_property = gst_quarktv_set_property;
gobject_class->get_property = gst_quarktv_get_property;
"Number of planes", 0, 64, PLANES,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
- trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_quarktv_set_caps);
- trans_class->transform = GST_DEBUG_FUNCPTR (gst_quarktv_transform);
+ gst_element_class_set_details_simple (gstelement_class, "QuarkTV effect",
+ "Filter/Effect/Video",
+ "Motion dissolver", "FUKUCHI, Kentarou <fukuchi@users.sourceforge.net>");
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_quarktv_sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_quarktv_src_template));
+
trans_class->start = GST_DEBUG_FUNCPTR (gst_quarktv_start);
+
+ vfilter_class->set_info = GST_DEBUG_FUNCPTR (gst_quarktv_set_info);
+ vfilter_class->transform_frame =
+ GST_DEBUG_FUNCPTR (gst_quarktv_transform_frame);
}
static void
-gst_quarktv_init (GstQuarkTV * filter, GstQuarkTVClass * klass)
+gst_quarktv_init (GstQuarkTV * filter)
{
filter->planes = PLANES;
filter->current_plane = filter->planes - 1;
#include <gst/gst.h>
+#include <gst/video/video.h>
#include <gst/video/gstvideofilter.h>
G_BEGIN_DECLS
GstVideoFilter element;
/* < private > */
-
- gint width, height;
gint area;
gint planes;
gint current_plane;
* <refsect2>
* <title>Example launch line</title>
* |[
- * gst-launch -v videotestsrc ! radioactv ! ffmpegcolorspace ! autovideosink
+ * gst-launch -v videotestsrc ! radioactv ! videoconvert ! autovideosink
* ]| This pipeline shows the effect of radioactv on a test stream.
* </refsect2>
*/
#include "gstradioac.h"
#include "gsteffectv.h"
-#include <gst/video/video.h>
-#include <gst/controller/gstcontroller.h>
-
enum
{
RADIOAC_NORMAL = 0,
static guint32 palettes[COLORS * PATTERN];
static gint swap_tab[] = { 2, 1, 0, 3 };
-GST_BOILERPLATE (GstRadioacTV, gst_radioactv, GstVideoFilter,
- GST_TYPE_VIDEO_FILTER);
+#define gst_radioactv_parent_class parent_class
+G_DEFINE_TYPE (GstRadioacTV, gst_radioactv, GST_TYPE_VIDEO_FILTER);
#if G_BYTE_ORDER == G_LITTLE_ENDIAN
-#define CAPS_STR GST_VIDEO_CAPS_RGBx "; " GST_VIDEO_CAPS_BGRx
+#define CAPS_STR GST_VIDEO_CAPS_MAKE ("{ RGBx, BGRx }")
#else
-#define CAPS_STR GST_VIDEO_CAPS_xBGR "; " GST_VIDEO_CAPS_xRGB
+#define CAPS_STR GST_VIDEO_CAPS_MAKE ("{ xBGR, xRGB }")
#endif
static GstStaticPadTemplate gst_radioactv_src_template =
gint width;
guint8 *p, *q;
guint8 v;
+ GstVideoInfo *info;
+
+ info = &GST_VIDEO_FILTER (filter)->in_info;
width = filter->buf_width;
- p = filter->blurzoombuf + filter->width + 1;
+ p = filter->blurzoombuf + GST_VIDEO_INFO_WIDTH (info) + 1;
q = p + filter->buf_area;
for (y = filter->buf_height - 2; y > 0; y--) {
}
static GstFlowReturn
-gst_radioactv_transform (GstBaseTransform * trans, GstBuffer * in,
- GstBuffer * out)
+gst_radioactv_transform_frame (GstVideoFilter * vfilter,
+ GstVideoFrame * in_frame, GstVideoFrame * out_frame)
{
- GstRadioacTV *filter = GST_RADIOACTV (trans);
+ GstRadioacTV *filter = GST_RADIOACTV (vfilter);
guint32 *src, *dest;
- GstFlowReturn ret = GST_FLOW_OK;
GstClockTime timestamp, stream_time;
- gint x, y;
+ gint x, y, width, height;
guint32 a, b;
guint8 *diff, *p;
guint32 *palette;
- timestamp = GST_BUFFER_TIMESTAMP (in);
+ timestamp = GST_BUFFER_TIMESTAMP (in_frame->buffer);
stream_time =
- gst_segment_to_stream_time (&trans->segment, GST_FORMAT_TIME, timestamp);
+ gst_segment_to_stream_time (&GST_BASE_TRANSFORM (filter)->segment,
+ GST_FORMAT_TIME, timestamp);
GST_DEBUG_OBJECT (filter, "sync to %" GST_TIME_FORMAT,
GST_TIME_ARGS (timestamp));
if (GST_CLOCK_TIME_IS_VALID (stream_time))
- gst_object_sync_values (G_OBJECT (filter), stream_time);
+ gst_object_sync_values (GST_OBJECT (filter), stream_time);
+
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
- src = (guint32 *) GST_BUFFER_DATA (in);
- dest = (guint32 *) GST_BUFFER_DATA (out);
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (in_frame);
GST_OBJECT_LOCK (filter);
#if G_BYTE_ORDER == G_LITTLE_ENDIAN
- if (filter->format == GST_VIDEO_FORMAT_RGBx) {
+ if (GST_VIDEO_FRAME_FORMAT (in_frame) == GST_VIDEO_FORMAT_RGBx) {
palette = &palettes[COLORS * filter->color];
} else {
palette = &palettes[COLORS * swap_tab[filter->color]];
}
#else
- if (filter->format == GST_VIDEO_FORMAT_xBGR) {
+ if (GST_VIDEO_FRAME_FORMAT (in_frame) == GST_VIDEO_FORMAT_xBGR) {
palette = &palettes[COLORS * filter->color];
} else {
palette = &palettes[COLORS * swap_tab[filter->color]];
if (filter->mode != 2 || filter->snaptime <= 0) {
image_bgsubtract_update_y (src, filter->background, diff,
- filter->width * filter->height, MAGIC_THRESHOLD * 7);
+ width * height, MAGIC_THRESHOLD * 7);
if (filter->mode == 0 || filter->snaptime <= 0) {
diff += filter->buf_margin_left;
p = filter->blurzoombuf;
for (x = 0; x < filter->buf_width; x++) {
p[x] |= diff[x] >> 3;
}
- diff += filter->width;
+ diff += width;
p += filter->buf_width;
}
if (filter->mode == 1 || filter->mode == 2) {
- memcpy (filter->snapframe, src, filter->width * filter->height * 4);
+ memcpy (filter->snapframe, src, width * height * 4);
}
}
}
src = filter->snapframe;
}
p = filter->blurzoombuf;
- for (y = 0; y < filter->height; y++) {
+ for (y = 0; y < height; y++) {
for (x = 0; x < filter->buf_margin_left; x++) {
*dest++ = *src++;
}
}
GST_OBJECT_UNLOCK (filter);
- return ret;
+ return GST_FLOW_OK;
}
static gboolean
-gst_radioactv_set_caps (GstBaseTransform * btrans, GstCaps * incaps,
- GstCaps * outcaps)
+gst_radioactv_set_info (GstVideoFilter * vfilter, GstCaps * incaps,
+ GstVideoInfo * in_info, GstCaps * outcaps, GstVideoInfo * out_info)
{
- GstRadioacTV *filter = GST_RADIOACTV (btrans);
- gboolean ret = FALSE;
+ GstRadioacTV *filter = GST_RADIOACTV (vfilter);
+ gint width, height;
- GST_OBJECT_LOCK (filter);
+ width = GST_VIDEO_INFO_WIDTH (in_info);
+ height = GST_VIDEO_INFO_HEIGHT (in_info);
- if (gst_video_format_parse_caps (incaps, &filter->format, &filter->width,
- &filter->height)) {
- filter->buf_width_blocks = filter->width / 32;
- if (filter->buf_width_blocks > 255)
- goto out;
+ filter->buf_width_blocks = width / 32;
+ if (filter->buf_width_blocks > 255)
+ goto too_wide;
- filter->buf_width = filter->buf_width_blocks * 32;
- filter->buf_height = filter->height;
- filter->buf_area = filter->buf_height * filter->buf_width;
- filter->buf_margin_left = (filter->width - filter->buf_width) / 2;
- filter->buf_margin_right =
- filter->height - filter->buf_width - filter->buf_margin_left;
+ filter->buf_width = filter->buf_width_blocks * 32;
+ filter->buf_height = height;
+ filter->buf_area = filter->buf_height * filter->buf_width;
+ filter->buf_margin_left = (width - filter->buf_width) / 2;
+ filter->buf_margin_right =
+ height - filter->buf_width - filter->buf_margin_left;
- if (filter->blurzoombuf)
- g_free (filter->blurzoombuf);
- filter->blurzoombuf = g_new0 (guint8, filter->buf_area * 2);
+ if (filter->blurzoombuf)
+ g_free (filter->blurzoombuf);
+ filter->blurzoombuf = g_new0 (guint8, filter->buf_area * 2);
- if (filter->blurzoomx)
- g_free (filter->blurzoomx);
- filter->blurzoomx = g_new0 (gint, filter->buf_width);
+ if (filter->blurzoomx)
+ g_free (filter->blurzoomx);
+ filter->blurzoomx = g_new0 (gint, filter->buf_width);
- if (filter->blurzoomy)
- g_free (filter->blurzoomy);
- filter->blurzoomy = g_new0 (gint, filter->buf_height);
+ if (filter->blurzoomy)
+ g_free (filter->blurzoomy);
+ filter->blurzoomy = g_new0 (gint, filter->buf_height);
- if (filter->snapframe)
- g_free (filter->snapframe);
- filter->snapframe = g_new (guint32, filter->width * filter->height);
+ if (filter->snapframe)
+ g_free (filter->snapframe);
+ filter->snapframe = g_new (guint32, width * height);
- if (filter->diff)
- g_free (filter->diff);
- filter->diff = g_new (guint8, filter->width * filter->height);
+ if (filter->diff)
+ g_free (filter->diff);
+ filter->diff = g_new (guint8, width * height);
- if (filter->background)
- g_free (filter->background);
- filter->background = g_new0 (gint16, filter->width * filter->height);
+ if (filter->background)
+ g_free (filter->background);
+ filter->background = g_new0 (gint16, width * height);
- setTable (filter);
+ setTable (filter);
- ret = TRUE;
- }
-out:
- GST_OBJECT_UNLOCK (filter);
+ return TRUE;
- return ret;
+ /* ERRORS */
+too_wide:
+ {
+ GST_DEBUG_OBJECT (filter, "frame too wide");
+ return FALSE;
+ }
}
static gboolean
}
static void
-gst_radioactv_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_set_details_simple (element_class, "RadioacTV effect",
- "Filter/Effect/Video",
- "motion-enlightment effect",
- "FUKUCHI, Kentarou <fukuchi@users.sourceforge.net>, "
- "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
-
- gst_element_class_add_static_pad_template (element_class,
- &gst_radioactv_sink_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_radioactv_src_template);
-}
-
-static void
gst_radioactv_class_init (GstRadioacTVClass * klass)
{
GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass;
+ GstVideoFilterClass *vfilter_class = (GstVideoFilterClass *) klass;
gobject_class->set_property = gst_radioactv_set_property;
gobject_class->get_property = gst_radioactv_get_property;
"Trigger (in trigger mode)", DEFAULT_TRIGGER,
GST_PARAM_CONTROLLABLE | G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
- trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_radioactv_set_caps);
- trans_class->transform = GST_DEBUG_FUNCPTR (gst_radioactv_transform);
+ gst_element_class_set_details_simple (gstelement_class, "RadioacTV effect",
+ "Filter/Effect/Video",
+ "motion-enlightment effect",
+ "FUKUCHI, Kentarou <fukuchi@users.sourceforge.net>, "
+ "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_radioactv_sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_radioactv_src_template));
+
trans_class->start = GST_DEBUG_FUNCPTR (gst_radioactv_start);
+ vfilter_class->set_info = GST_DEBUG_FUNCPTR (gst_radioactv_set_info);
+ vfilter_class->transform_frame =
+ GST_DEBUG_FUNCPTR (gst_radioactv_transform_frame);
+
makePalette ();
}
static void
-gst_radioactv_init (GstRadioacTV * filter, GstRadioacTVClass * klass)
+gst_radioactv_init (GstRadioacTV * filter)
{
filter->mode = DEFAULT_MODE;
filter->color = DEFAULT_COLOR;
GstVideoFilter element;
/* < private > */
- gint width, height;
- GstVideoFormat format;
-
gint mode;
gint color;
guint interval;
* <refsect2>
* <title>Example launch line</title>
* |[
- * gst-launch -v videotestsrc ! revtv ! ffmpegcolorspace ! autovideosink
+ * gst-launch -v videotestsrc ! revtv ! videoconvert ! autovideosink
* ]| This pipeline shows the effect of revtv on a test stream.
* </refsect2>
*/
#include "gstrev.h"
-#include <gst/video/video.h>
-#include <gst/controller/gstcontroller.h>
-
#define THE_COLOR 0xffffffff
enum
PROP_GAIN
};
-GST_BOILERPLATE (GstRevTV, gst_revtv, GstVideoFilter, GST_TYPE_VIDEO_FILTER);
+#define gst_revtv_parent_class parent_class
+G_DEFINE_TYPE (GstRevTV, gst_revtv, GST_TYPE_VIDEO_FILTER);
#if G_BYTE_ORDER == G_LITTLE_ENDIAN
-#define CAPS_STR GST_VIDEO_CAPS_BGRx ";" GST_VIDEO_CAPS_RGBx
+#define CAPS_STR GST_VIDEO_CAPS_MAKE ("{ BGRx, RGBx }")
#else
-#define CAPS_STR GST_VIDEO_CAPS_xBGR ";" GST_VIDEO_CAPS_xRGB
+#define CAPS_STR GST_VIDEO_CAPS_MAKE ("{ xBGR, xRGB }")
#endif
static GstStaticPadTemplate gst_revtv_src_template =
GST_STATIC_CAPS (CAPS_STR)
);
-static gboolean
-gst_revtv_set_caps (GstBaseTransform * btrans, GstCaps * incaps,
- GstCaps * outcaps)
-{
- GstRevTV *filter = GST_REVTV (btrans);
- GstStructure *structure;
- gboolean ret = FALSE;
-
- structure = gst_caps_get_structure (incaps, 0);
-
- GST_OBJECT_LOCK (filter);
- if (gst_structure_get_int (structure, "width", &filter->width) &&
- gst_structure_get_int (structure, "height", &filter->height)) {
- ret = TRUE;
- }
- GST_OBJECT_UNLOCK (filter);
-
- return ret;
-}
-
static GstFlowReturn
-gst_revtv_transform (GstBaseTransform * trans, GstBuffer * in, GstBuffer * out)
+gst_revtv_transform_frame (GstVideoFilter * vfilter, GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame)
{
- GstRevTV *filter = GST_REVTV (trans);
+ GstRevTV *filter = GST_REVTV (vfilter);
guint32 *src, *dest;
- gint width, height;
+ gint width, height, sstride, dstride;
guint32 *nsrc;
gint y, x, R, G, B, yval;
- GstFlowReturn ret = GST_FLOW_OK;
gint linespace, vscale;
GstClockTime timestamp, stream_time;
- timestamp = GST_BUFFER_TIMESTAMP (in);
+ timestamp = GST_BUFFER_TIMESTAMP (in_frame->buffer);
stream_time =
- gst_segment_to_stream_time (&trans->segment, GST_FORMAT_TIME, timestamp);
+ gst_segment_to_stream_time (&GST_BASE_TRANSFORM (vfilter)->segment,
+ GST_FORMAT_TIME, timestamp);
GST_DEBUG_OBJECT (filter, "sync to %" GST_TIME_FORMAT,
GST_TIME_ARGS (timestamp));
if (GST_CLOCK_TIME_IS_VALID (stream_time))
- gst_object_sync_values (G_OBJECT (filter), stream_time);
+ gst_object_sync_values (GST_OBJECT (filter), stream_time);
- src = (guint32 *) GST_BUFFER_DATA (in);
- dest = (guint32 *) GST_BUFFER_DATA (out);
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ sstride = GST_VIDEO_FRAME_PLANE_STRIDE (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+ dstride = GST_VIDEO_FRAME_PLANE_STRIDE (out_frame, 0);
- GST_OBJECT_LOCK (filter);
- width = filter->width;
- height = filter->height;
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (in_frame);
/* Clear everything to black */
- memset (dest, 0, width * height * sizeof (guint32));
+ memset (dest, 0, dstride * height * sizeof (guint32));
+ GST_OBJECT_LOCK (filter);
linespace = filter->linespace;
vscale = filter->vscale;
/* draw the offset lines */
for (y = 0; y < height; y += linespace) {
for (x = 0; x <= width; x++) {
- nsrc = src + (y * width) + x;
+ nsrc = src + (y * sstride) + x;
/* Calc Y Value for curpix */
R = ((*nsrc) & 0xff0000) >> (16 - 1);
yval = y - ((short) (R + G + B) / vscale);
if (yval > 0) {
- dest[x + (yval * width)] = THE_COLOR;
+ dest[x + (yval * dstride)] = THE_COLOR;
}
}
}
GST_OBJECT_UNLOCK (filter);
- return ret;
+ return GST_FLOW_OK;
}
static void
}
static void
-gst_revtv_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_set_details_simple (element_class, "RevTV effect",
- "Filter/Effect/Video",
- "A video waveform monitor for each line of video processed",
- "Wim Taymans <wim.taymans@chello.be>");
-
- gst_element_class_add_static_pad_template (element_class,
- &gst_revtv_sink_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_revtv_src_template);
-}
-
-static void
gst_revtv_class_init (GstRevTVClass * klass)
{
GObjectClass *gobject_class = (GObjectClass *) klass;
- GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
+ GstVideoFilterClass *vfilter_class = (GstVideoFilterClass *) klass;
gobject_class->set_property = gst_revtv_set_property;
gobject_class->get_property = gst_revtv_get_property;
g_param_spec_int ("gain", "Gain", "Control gain", 1, 200, 50,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
- trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_revtv_set_caps);
- trans_class->transform = GST_DEBUG_FUNCPTR (gst_revtv_transform);
+ gst_element_class_set_details_simple (gstelement_class, "RevTV effect",
+ "Filter/Effect/Video",
+ "A video waveform monitor for each line of video processed",
+ "Wim Taymans <wim.taymans@gmail.be>");
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_revtv_sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_revtv_src_template));
+
+ vfilter_class->transform_frame =
+ GST_DEBUG_FUNCPTR (gst_revtv_transform_frame);
}
static void
-gst_revtv_init (GstRevTV * restv, GstRevTVClass * klass)
+gst_revtv_init (GstRevTV * restv)
{
restv->vgrabtime = 1;
restv->vgrab = 0;
#include <gst/gst.h>
+#include <gst/video/video.h>
#include <gst/video/gstvideofilter.h>
G_BEGIN_DECLS
GstVideoFilter videofilter;
/* < private > */
-
- gint width, height;
gint vgrabtime;
gint vgrab;
gint linespace;
* <refsect2>
* <title>Example launch line</title>
* |[
- * gst-launch -v videotestsrc ! rippletv ! ffmpegcolorspace ! autovideosink
+ * gst-launch -v videotestsrc ! rippletv ! videoconvert ! autovideosink
* ]| This pipeline shows the effect of rippletv on a test stream.
* </refsect2>
*/
#include "gstripple.h"
#include "gsteffectv.h"
-#include <gst/video/video.h>
-#include <gst/controller/gstcontroller.h>
-
#define DEFAULT_MODE 0
enum
return type;
}
-
-GST_BOILERPLATE (GstRippleTV, gst_rippletv, GstVideoFilter,
- GST_TYPE_VIDEO_FILTER);
+#define gst_rippletv_parent_class parent_class
+G_DEFINE_TYPE (GstRippleTV, gst_rippletv, GST_TYPE_VIDEO_FILTER);
static GstStaticPadTemplate gst_rippletv_src_template =
- GST_STATIC_PAD_TEMPLATE ("src",
+GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS (GST_VIDEO_CAPS_BGRx "; " GST_VIDEO_CAPS_RGBx ";"
- GST_VIDEO_CAPS_xBGR "; " GST_VIDEO_CAPS_xRGB)
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ BGRx, RGBx, xBGR, xRGB }"))
);
static GstStaticPadTemplate gst_rippletv_sink_template =
- GST_STATIC_PAD_TEMPLATE ("sink",
+GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS (GST_VIDEO_CAPS_BGRx "; " GST_VIDEO_CAPS_RGBx ";"
- GST_VIDEO_CAPS_xBGR "; " GST_VIDEO_CAPS_xRGB)
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ BGRx, RGBx, xBGR, xRGB }"))
);
static const gint point = 16;
static gint
setBackground (GstRippleTV * filter, guint32 * src)
{
- image_bgset_y (src, filter->background, filter->width * filter->height);
+ GstVideoInfo *info;
+
+ info = &GST_VIDEO_FILTER (filter)->in_info;
+
+ image_bgset_y (src, filter->background,
+ GST_VIDEO_INFO_WIDTH (info) * GST_VIDEO_INFO_HEIGHT (info));
filter->bg_is_set = TRUE;
return 0;
motiondetect (GstRippleTV * filter, guint32 * src)
{
guint8 *diff = filter->diff;
- gint width = filter->width;
+ gint width, height;
gint *p, *q;
gint x, y, h;
+ GstVideoInfo *info;
+
+ info = &GST_VIDEO_FILTER (filter)->in_info;
+
+ width = GST_VIDEO_INFO_WIDTH (info);
+ height = GST_VIDEO_INFO_HEIGHT (info);
if (!filter->bg_is_set)
setBackground (filter, src);
image_bgsubtract_update_y (src, filter->background, filter->diff,
- filter->width * filter->height);
+ width * height);
p = filter->map1 + filter->map_w + 1;
q = filter->map2 + filter->map_w + 1;
- diff += filter->width + 2;
+ diff += width + 2;
for (y = filter->map_h - 2; y > 0; y--) {
for (x = filter->map_w - 2; x > 0; x--) {
}
static GstFlowReturn
-gst_rippletv_transform (GstBaseTransform * trans, GstBuffer * in,
- GstBuffer * out)
+gst_rippletv_transform_frame (GstVideoFilter * vfilter,
+ GstVideoFrame * in_frame, GstVideoFrame * out_frame)
{
- GstRippleTV *filter = GST_RIPPLETV (trans);
+ GstRippleTV *filter = GST_RIPPLETV (vfilter);
guint32 *src, *dest;
- GstFlowReturn ret = GST_FLOW_OK;
gint x, y, i;
gint dx, dy, o_dx;
gint h, v;
gint8 *vp;
GstClockTime timestamp, stream_time;
- timestamp = GST_BUFFER_TIMESTAMP (in);
+ timestamp = GST_BUFFER_TIMESTAMP (in_frame->buffer);
stream_time =
- gst_segment_to_stream_time (&trans->segment, GST_FORMAT_TIME, timestamp);
+ gst_segment_to_stream_time (&GST_BASE_TRANSFORM (vfilter)->segment,
+ GST_FORMAT_TIME, timestamp);
GST_DEBUG_OBJECT (filter, "sync to %" GST_TIME_FORMAT,
GST_TIME_ARGS (timestamp));
if (GST_CLOCK_TIME_IS_VALID (stream_time))
- gst_object_sync_values (G_OBJECT (filter), stream_time);
+ gst_object_sync_values (GST_OBJECT (filter), stream_time);
- src = (guint32 *) GST_BUFFER_DATA (in);
- dest = (guint32 *) GST_BUFFER_DATA (out);
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
GST_OBJECT_LOCK (filter);
/* impact from the motion or rain drop */
m_w = filter->map_w;
m_h = filter->map_h;
- v_w = filter->width;
- v_h = filter->height;
+ v_w = GST_VIDEO_FRAME_WIDTH (in_frame);
+ v_h = GST_VIDEO_FRAME_HEIGHT (in_frame);
/* simulate surface wave */
}
GST_OBJECT_UNLOCK (filter);
- return ret;
+ return GST_FLOW_OK;
}
static gboolean
-gst_rippletv_set_caps (GstBaseTransform * btrans, GstCaps * incaps,
- GstCaps * outcaps)
+gst_rippletv_set_info (GstVideoFilter * vfilter, GstCaps * incaps,
+ GstVideoInfo * in_info, GstCaps * outcaps, GstVideoInfo * out_info)
{
- GstRippleTV *filter = GST_RIPPLETV (btrans);
- GstStructure *structure;
- gboolean ret = FALSE;
+ GstRippleTV *filter = GST_RIPPLETV (vfilter);
+ gint width, height;
- structure = gst_caps_get_structure (incaps, 0);
+ width = GST_VIDEO_INFO_WIDTH (in_info);
+ height = GST_VIDEO_INFO_HEIGHT (in_info);
GST_OBJECT_LOCK (filter);
- if (gst_structure_get_int (structure, "width", &filter->width) &&
- gst_structure_get_int (structure, "height", &filter->height)) {
-
- filter->map_h = filter->height / 2 + 1;
- filter->map_w = filter->width / 2 + 1;
-
- /* we over allocate the buffers, as the render code does not handle clipping
- * very well */
- if (filter->map)
- g_free (filter->map);
- filter->map = g_new0 (gint, (1 + filter->map_h) * filter->map_w * 3);
+ filter->map_h = height / 2 + 1;
+ filter->map_w = width / 2 + 1;
- filter->map1 = filter->map;
- filter->map2 = filter->map + filter->map_w * filter->map_h;
- filter->map3 = filter->map + filter->map_w * filter->map_h * 2;
+ /* we over allocate the buffers, as the render code does not handle clipping
+ * very well */
+ if (filter->map)
+ g_free (filter->map);
+ filter->map = g_new0 (gint, (1 + filter->map_h) * filter->map_w * 3);
- if (filter->vtable)
- g_free (filter->vtable);
- filter->vtable = g_new0 (gint8, (1 + filter->map_h) * filter->map_w * 2);
+ filter->map1 = filter->map;
+ filter->map2 = filter->map + filter->map_w * filter->map_h;
+ filter->map3 = filter->map + filter->map_w * filter->map_h * 2;
- if (filter->background)
- g_free (filter->background);
- filter->background = g_new0 (gint16, filter->width * (filter->height + 1));
+ if (filter->vtable)
+ g_free (filter->vtable);
+ filter->vtable = g_new0 (gint8, (1 + filter->map_h) * filter->map_w * 2);
- if (filter->diff)
- g_free (filter->diff);
- filter->diff = g_new0 (guint8, filter->width * (filter->height + 1));
+ if (filter->background)
+ g_free (filter->background);
+ filter->background = g_new0 (gint16, width * (height + 1));
- ret = TRUE;
- }
+ if (filter->diff)
+ g_free (filter->diff);
+ filter->diff = g_new0 (guint8, width * (height + 1));
GST_OBJECT_UNLOCK (filter);
- return ret;
+ return TRUE;
}
static gboolean
}
static void
-gst_rippletv_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_set_details_simple (element_class, "RippleTV effect",
- "Filter/Effect/Video",
- "RippleTV does ripple mark effect on the video input",
- "FUKUCHI, Kentarou <fukuchi@users.sourceforge.net>, "
- "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
-
- gst_element_class_add_static_pad_template (element_class,
- &gst_rippletv_sink_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_rippletv_src_template);
-}
-
-static void
gst_rippletv_class_init (GstRippleTVClass * klass)
{
GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass;
+ GstVideoFilterClass *vfilter_class = (GstVideoFilterClass *) klass;
gobject_class->set_property = gst_rippletv_set_property;
gobject_class->get_property = gst_rippletv_get_property;
"Mode", GST_TYPE_RIPPLETV_MODE, DEFAULT_MODE,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
- trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_rippletv_set_caps);
- trans_class->transform = GST_DEBUG_FUNCPTR (gst_rippletv_transform);
+ gst_element_class_set_details_simple (gstelement_class, "RippleTV effect",
+ "Filter/Effect/Video",
+ "RippleTV does ripple mark effect on the video input",
+ "FUKUCHI, Kentarou <fukuchi@users.sourceforge.net>, "
+ "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rippletv_sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rippletv_src_template));
+
trans_class->start = GST_DEBUG_FUNCPTR (gst_rippletv_start);
+ vfilter_class->set_info = GST_DEBUG_FUNCPTR (gst_rippletv_set_info);
+ vfilter_class->transform_frame =
+ GST_DEBUG_FUNCPTR (gst_rippletv_transform_frame);
+
setTable ();
}
static void
-gst_rippletv_init (GstRippleTV * filter, GstRippleTVClass * klass)
+gst_rippletv_init (GstRippleTV * filter)
{
filter->mode = DEFAULT_MODE;
#include <gst/gst.h>
+#include <gst/video/video.h>
#include <gst/video/gstvideofilter.h>
G_BEGIN_DECLS
GstVideoFilter element;
/* < private > */
- gint width, height;
-
gint mode;
gint16 *background;
* <refsect2>
* <title>Example launch line</title>
* |[
- * gst-launch -v videotestsrc ! shagadelictv ! ffmpegcolorspace ! autovideosink
+ * gst-launch -v videotestsrc ! shagadelictv ! videoconvert ! autovideosink
* ]| This pipeline shows the effect of shagadelictv on a test stream.
* </refsect2>
*/
#include "gstshagadelic.h"
#include "gsteffectv.h"
-#include <gst/video/video.h>
-
#ifndef M_PI
#define M_PI 3.14159265358979323846
#endif
-GST_BOILERPLATE (GstShagadelicTV, gst_shagadelictv, GstVideoFilter,
- GST_TYPE_VIDEO_FILTER);
+#define gst_shagadelictv_parent_class parent_class
+G_DEFINE_TYPE (GstShagadelicTV, gst_shagadelictv, GST_TYPE_VIDEO_FILTER);
-static void gst_shagadelic_initialize (GstShagadelicTV * filter);
+static void gst_shagadelic_initialize (GstShagadelicTV * filter,
+ GstVideoInfo * in_info);
#if G_BYTE_ORDER == G_LITTLE_ENDIAN
-#define CAPS_STR GST_VIDEO_CAPS_BGRx
+#define CAPS_STR GST_VIDEO_CAPS_MAKE ("BGRx")
#else
-#define CAPS_STR GST_VIDEO_CAPS_xRGB
+#define CAPS_STR GST_VIDEO_CAPS_MAKE ("xRGB")
#endif
static GstStaticPadTemplate gst_shagadelictv_src_template =
);
static gboolean
-gst_shagadelictv_set_caps (GstBaseTransform * btrans, GstCaps * incaps,
- GstCaps * outcaps)
+gst_shagadelictv_set_info (GstVideoFilter * vfilter, GstCaps * incaps,
+ GstVideoInfo * in_info, GstCaps * outcaps, GstVideoInfo * out_info)
{
- GstShagadelicTV *filter = GST_SHAGADELICTV (btrans);
- GstStructure *structure;
- gboolean ret = FALSE;
-
- structure = gst_caps_get_structure (incaps, 0);
+ GstShagadelicTV *filter = GST_SHAGADELICTV (vfilter);
+ gint width, height, area;
- GST_OBJECT_LOCK (filter);
- if (gst_structure_get_int (structure, "width", &filter->width) &&
- gst_structure_get_int (structure, "height", &filter->height)) {
- gint area = filter->width * filter->height;
+ width = GST_VIDEO_INFO_WIDTH (in_info);
+ height = GST_VIDEO_INFO_HEIGHT (in_info);
- g_free (filter->ripple);
- g_free (filter->spiral);
+ area = width * height;
- filter->ripple = (guint8 *) g_malloc (area * 4);
- filter->spiral = (guint8 *) g_malloc (area);
+ g_free (filter->ripple);
+ g_free (filter->spiral);
+ filter->ripple = (guint8 *) g_malloc (area * 4);
+ filter->spiral = (guint8 *) g_malloc (area);
- gst_shagadelic_initialize (filter);
- ret = TRUE;
- }
- GST_OBJECT_UNLOCK (filter);
+ gst_shagadelic_initialize (filter, in_info);
- return ret;
+ return TRUE;
}
static void
-gst_shagadelic_initialize (GstShagadelicTV * filter)
+gst_shagadelic_initialize (GstShagadelicTV * filter, GstVideoInfo * info)
{
int i, x, y;
-
#ifdef PS2
float xx, yy;
#else
double xx, yy;
#endif
+ gint width, height;
+
+ width = GST_VIDEO_INFO_WIDTH (info);
+ height = GST_VIDEO_INFO_HEIGHT (info);
i = 0;
- for (y = 0; y < filter->height * 2; y++) {
- yy = y - filter->height;
+ for (y = 0; y < height * 2; y++) {
+ yy = y - height;
yy *= yy;
- for (x = 0; x < filter->width * 2; x++) {
- xx = x - filter->width;
+ for (x = 0; x < width * 2; x++) {
+ xx = x - width;
#ifdef PS2
filter->ripple[i++] = ((unsigned int) (sqrtf (xx * xx + yy) * 8)) & 255;
#else
}
i = 0;
- for (y = 0; y < filter->height; y++) {
- yy = y - filter->height / 2;
+ for (y = 0; y < height; y++) {
+ yy = y - height / 2;
- for (x = 0; x < filter->width; x++) {
- xx = x - filter->width / 2;
+ for (x = 0; x < width; x++) {
+ xx = x - width / 2;
#ifdef PS2
filter->spiral[i++] = ((unsigned int)
((atan2f (xx,
*/
}
}
- filter->rx = fastrand () % filter->width;
- filter->ry = fastrand () % filter->height;
- filter->bx = fastrand () % filter->width;
- filter->by = fastrand () % filter->height;
+ filter->rx = fastrand () % width;
+ filter->ry = fastrand () % height;
+ filter->bx = fastrand () % width;
+ filter->by = fastrand () % height;
filter->rvx = -2;
filter->rvy = -2;
filter->bvx = 2;
}
static GstFlowReturn
-gst_shagadelictv_transform (GstBaseTransform * trans, GstBuffer * in,
- GstBuffer * out)
+gst_shagadelictv_transform_frame (GstVideoFilter * vfilter,
+ GstVideoFrame * in_frame, GstVideoFrame * out_frame)
{
- GstShagadelicTV *filter = GST_SHAGADELICTV (trans);
+ GstShagadelicTV *filter = GST_SHAGADELICTV (vfilter);
guint32 *src, *dest;
gint x, y;
guint32 v;
guint8 r, g, b;
gint width, height;
- GstFlowReturn ret = GST_FLOW_OK;
- src = (guint32 *) GST_BUFFER_DATA (in);
- dest = (guint32 *) GST_BUFFER_DATA (out);
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
- GST_OBJECT_LOCK (filter);
- width = filter->width;
- height = filter->height;
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (in_frame);
for (y = 0; y < height; y++) {
for (x = 0; x < width; x++) {
filter->ry += filter->rvy;
filter->bx += filter->bvx;
filter->by += filter->bvy;
- GST_OBJECT_UNLOCK (filter);
- return ret;
+ return GST_FLOW_OK;
}
static void
}
static void
-gst_shagadelictv_base_init (gpointer g_class)
+gst_shagadelictv_class_init (GstShagadelicTVClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
+ GstVideoFilterClass *vfilter_class = (GstVideoFilterClass *) klass;
+
+ gobject_class->finalize = gst_shagadelictv_finalize;
- gst_element_class_set_details_simple (element_class, "ShagadelicTV",
+ gst_element_class_set_details_simple (gstelement_class, "ShagadelicTV",
"Filter/Effect/Video",
"Oh behave, ShagedelicTV makes images shagadelic!",
"Wim Taymans <wim.taymans@chello.be>");
- gst_element_class_add_static_pad_template (element_class,
- &gst_shagadelictv_sink_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_shagadelictv_src_template);
-}
-
-static void
-gst_shagadelictv_class_init (GstShagadelicTVClass * klass)
-{
- GObjectClass *gobject_class = (GObjectClass *) klass;
- GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass;
-
- gobject_class->finalize = gst_shagadelictv_finalize;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_shagadelictv_sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_shagadelictv_src_template));
- trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_shagadelictv_set_caps);
- trans_class->transform = GST_DEBUG_FUNCPTR (gst_shagadelictv_transform);
+ vfilter_class->set_info = GST_DEBUG_FUNCPTR (gst_shagadelictv_set_info);
+ vfilter_class->transform_frame =
+ GST_DEBUG_FUNCPTR (gst_shagadelictv_transform_frame);
}
static void
-gst_shagadelictv_init (GstShagadelicTV * filter, GstShagadelicTVClass * klass)
+gst_shagadelictv_init (GstShagadelicTV * filter)
{
filter->ripple = NULL;
filter->spiral = NULL;
#include <gst/gst.h>
+#include <gst/video/video.h>
#include <gst/video/gstvideofilter.h>
G_BEGIN_DECLS
GstVideoFilter videofilter;
/* < private > */
-
- gint width, height;
- gint stat;
guint8 *ripple;
guint8 *spiral;
guint8 phase;
* <refsect2>
* <title>Example launch line</title>
* |[
- * gst-launch -v videotestsrc ! streaktv ! ffmpegcolorspace ! autovideosink
+ * gst-launch -v videotestsrc ! streaktv ! videoconvert ! autovideosink
* ]| This pipeline shows the effect of streaktv on a test stream.
* </refsect2>
*/
#include "gststreak.h"
#include "gsteffectv.h"
-#include <gst/video/video.h>
-
#define DEFAULT_FEEDBACK FALSE
enum
PROP_FEEDBACK
};
-GST_BOILERPLATE (GstStreakTV, gst_streaktv, GstVideoFilter,
- GST_TYPE_VIDEO_FILTER);
+#define gst_streaktv_parent_class parent_class
+G_DEFINE_TYPE (GstStreakTV, gst_streaktv, GST_TYPE_VIDEO_FILTER);
static GstStaticPadTemplate gst_streaktv_src_template =
- GST_STATIC_PAD_TEMPLATE ("src",
+GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS (GST_VIDEO_CAPS_BGRx "; " GST_VIDEO_CAPS_RGBx ";"
- GST_VIDEO_CAPS_xBGR "; " GST_VIDEO_CAPS_xRGB)
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ BGRx, RGBx, xBGR, xRGB }"))
);
static GstStaticPadTemplate gst_streaktv_sink_template =
- GST_STATIC_PAD_TEMPLATE ("sink",
+GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS (GST_VIDEO_CAPS_BGRx "; " GST_VIDEO_CAPS_RGBx ";"
- GST_VIDEO_CAPS_xBGR "; " GST_VIDEO_CAPS_xRGB)
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ BGRx, RGBx, xBGR, xRGB }"))
);
-
static GstFlowReturn
-gst_streaktv_transform (GstBaseTransform * trans, GstBuffer * in,
- GstBuffer * out)
+gst_streaktv_transform_frame (GstVideoFilter * vfilter,
+ GstVideoFrame * in_frame, GstVideoFrame * out_frame)
{
- GstStreakTV *filter = GST_STREAKTV (trans);
+ GstStreakTV *filter = GST_STREAKTV (vfilter);
guint32 *src, *dest;
- GstFlowReturn ret = GST_FLOW_OK;
gint i, cf;
- gint video_area = filter->width * filter->height;
+ gint video_area, width, height;
guint32 **planetable = filter->planetable;
gint plane = filter->plane;
guint stride_mask, stride_shift, stride;
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (in_frame);
+
+ video_area = width * height;
+
GST_OBJECT_LOCK (filter);
if (filter->feedback) {
stride_mask = 0xfcfcfcfc;
stride_shift = 3;
}
- src = (guint32 *) GST_BUFFER_DATA (in);
- dest = (guint32 *) GST_BUFFER_DATA (out);
-
for (i = 0; i < video_area; i++) {
planetable[plane][i] = (src[i] & stride_mask) >> stride_shift;
}
filter->plane = plane & (PLANES - 1);
GST_OBJECT_UNLOCK (filter);
- return ret;
+ return GST_FLOW_OK;
}
static gboolean
-gst_streaktv_set_caps (GstBaseTransform * btrans, GstCaps * incaps,
- GstCaps * outcaps)
+gst_streaktv_set_info (GstVideoFilter * vfilter, GstCaps * incaps,
+ GstVideoInfo * in_info, GstCaps * outcaps, GstVideoInfo * out_info)
{
- GstStreakTV *filter = GST_STREAKTV (btrans);
- GstStructure *structure;
- gboolean ret = FALSE;
+ GstStreakTV *filter = GST_STREAKTV (vfilter);
+ gint i, width, height;
- structure = gst_caps_get_structure (incaps, 0);
+ width = GST_VIDEO_INFO_WIDTH (in_info);
+ height = GST_VIDEO_INFO_HEIGHT (in_info);
- GST_OBJECT_LOCK (filter);
- if (gst_structure_get_int (structure, "width", &filter->width) &&
- gst_structure_get_int (structure, "height", &filter->height)) {
- gint i;
-
- if (filter->planebuffer)
- g_free (filter->planebuffer);
+ if (filter->planebuffer)
+ g_free (filter->planebuffer);
- filter->planebuffer =
- g_new0 (guint32, filter->width * filter->height * 4 * PLANES);
- for (i = 0; i < PLANES; i++)
- filter->planetable[i] =
- &filter->planebuffer[filter->width * filter->height * i];
+ filter->planebuffer = g_new0 (guint32, width * height * 4 * PLANES);
- ret = TRUE;
- }
- GST_OBJECT_UNLOCK (filter);
+ for (i = 0; i < PLANES; i++)
+ filter->planetable[i] = &filter->planebuffer[width * height * i];
- return ret;
+ return TRUE;
}
static gboolean
}
static void
-gst_streaktv_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_set_details_simple (element_class, "StreakTV effect",
- "Filter/Effect/Video",
- "StreakTV makes after images of moving objects",
- "FUKUCHI, Kentarou <fukuchi@users.sourceforge.net>, "
- "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
-
- gst_element_class_add_static_pad_template (element_class,
- &gst_streaktv_sink_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_streaktv_src_template);
-}
-
-static void
gst_streaktv_class_init (GstStreakTVClass * klass)
{
GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass;
+ GstVideoFilterClass *vfilter_class = (GstVideoFilterClass *) klass;
gobject_class->set_property = gst_streaktv_set_property;
gobject_class->get_property = gst_streaktv_get_property;
"Feedback", DEFAULT_FEEDBACK,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
- trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_streaktv_set_caps);
- trans_class->transform = GST_DEBUG_FUNCPTR (gst_streaktv_transform);
+ gst_element_class_set_details_simple (gstelement_class, "StreakTV effect",
+ "Filter/Effect/Video",
+ "StreakTV makes after images of moving objects",
+ "FUKUCHI, Kentarou <fukuchi@users.sourceforge.net>, "
+ "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_streaktv_sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_streaktv_src_template));
+
trans_class->start = GST_DEBUG_FUNCPTR (gst_streaktv_start);
+
+ vfilter_class->set_info = GST_DEBUG_FUNCPTR (gst_streaktv_set_info);
+ vfilter_class->transform_frame =
+ GST_DEBUG_FUNCPTR (gst_streaktv_transform_frame);
}
static void
-gst_streaktv_init (GstStreakTV * filter, GstStreakTVClass * klass)
+gst_streaktv_init (GstStreakTV * filter)
{
filter->feedback = DEFAULT_FEEDBACK;
}
#include <gst/gst.h>
+#include <gst/video/video.h>
#include <gst/video/gstvideofilter.h>
G_BEGIN_DECLS
GstVideoFilter element;
/* < private > */
- gint width, height;
-
gboolean feedback;
guint32 *planebuffer;
* <refsect2>
* <title>Example launch line</title>
* |[
- * gst-launch -v videotestsrc ! vertigotv ! ffmpegcolorspace ! autovideosink
+ * gst-launch -v videotestsrc ! vertigotv ! videoconvert ! autovideosink
* ]| This pipeline shows the effect of vertigotv on a test stream.
* </refsect2>
*/
#include "gstvertigo.h"
-#include <gst/video/video.h>
-#include <gst/controller/gstcontroller.h>
-
-GST_BOILERPLATE (GstVertigoTV, gst_vertigotv, GstVideoFilter,
- GST_TYPE_VIDEO_FILTER);
+#define gst_vertigotv_parent_class parent_class
+G_DEFINE_TYPE (GstVertigoTV, gst_vertigotv, GST_TYPE_VIDEO_FILTER);
/* Filter signals and args */
enum
};
#if G_BYTE_ORDER == G_LITTLE_ENDIAN
-#define CAPS_STR GST_VIDEO_CAPS_RGBx ";" GST_VIDEO_CAPS_BGRx
+#define CAPS_STR GST_VIDEO_CAPS_MAKE ("{ RGBx, BGRx }")
#else
-#define CAPS_STR GST_VIDEO_CAPS_xRGB ";" GST_VIDEO_CAPS_xBGR
+#define CAPS_STR GST_VIDEO_CAPS_MAKE ("{ xRGB, xBGR }")
#endif
static GstStaticPadTemplate gst_vertigotv_src_template =
);
static gboolean
-gst_vertigotv_set_caps (GstBaseTransform * btrans, GstCaps * incaps,
- GstCaps * outcaps)
+gst_vertigotv_set_info (GstVideoFilter * vfilter, GstCaps * incaps,
+ GstVideoInfo * in_info, GstCaps * outcaps, GstVideoInfo * out_info)
{
- GstVertigoTV *filter = GST_VERTIGOTV (btrans);
- GstStructure *structure;
- gboolean ret = FALSE;
-
- structure = gst_caps_get_structure (incaps, 0);
+ GstVertigoTV *filter = GST_VERTIGOTV (vfilter);
+ gint area, width, height;
- GST_OBJECT_LOCK (filter);
- if (gst_structure_get_int (structure, "width", &filter->width) &&
- gst_structure_get_int (structure, "height", &filter->height)) {
- gint area = filter->width * filter->height;
+ width = GST_VIDEO_INFO_WIDTH (in_info);
+ height = GST_VIDEO_INFO_HEIGHT (in_info);
- g_free (filter->buffer);
- filter->buffer = (guint32 *) g_malloc0 (area * 2 * sizeof (guint32));
+ area = width * height;
- filter->current_buffer = filter->buffer;
- filter->alt_buffer = filter->buffer + area;
- filter->phase = 0;
+ g_free (filter->buffer);
+ filter->buffer = (guint32 *) g_malloc0 (area * 2 * sizeof (guint32));
- ret = TRUE;
- }
- GST_OBJECT_UNLOCK (filter);
+ filter->current_buffer = filter->buffer;
+ filter->alt_buffer = filter->buffer + area;
+ filter->phase = 0;
- return ret;
+ return TRUE;
}
static void
double t;
double x, y;
double dizz;
+ gint width, height;
+ GstVideoInfo *info;
dizz = sin (filter->phase) * 10 + sin (filter->phase * 1.9 + 5) * 5;
- x = filter->width / 2;
- y = filter->height / 2;
+ info = &GST_VIDEO_FILTER (filter)->in_info;
+
+ width = GST_VIDEO_INFO_WIDTH (info);
+ height = GST_VIDEO_INFO_HEIGHT (info);
+
+ x = width / 2;
+ y = height / 2;
t = (x * x + y * y) * filter->zoomrate;
- if (filter->width > filter->height) {
+ if (width > height) {
if (dizz >= 0) {
if (dizz > x)
dizz = x;
}
static GstFlowReturn
-gst_vertigotv_transform (GstBaseTransform * trans, GstBuffer * in,
- GstBuffer * out)
+gst_vertigotv_transform_frame (GstVideoFilter * vfilter,
+ GstVideoFrame * in_frame, GstVideoFrame * out_frame)
{
- GstVertigoTV *filter = GST_VERTIGOTV (trans);
+ GstVertigoTV *filter = GST_VERTIGOTV (vfilter);
guint32 *src, *dest, *p;
guint32 v;
- gint x, y, ox, oy, i, width, height, area;
- GstFlowReturn ret = GST_FLOW_OK;
+ gint x, y, ox, oy, i, width, height, area, sstride, dstride;
GstClockTime timestamp, stream_time;
- timestamp = GST_BUFFER_TIMESTAMP (in);
+ timestamp = GST_BUFFER_TIMESTAMP (in_frame->buffer);
stream_time =
- gst_segment_to_stream_time (&trans->segment, GST_FORMAT_TIME, timestamp);
+ gst_segment_to_stream_time (&GST_BASE_TRANSFORM (filter)->segment,
+ GST_FORMAT_TIME, timestamp);
GST_DEBUG_OBJECT (filter, "sync to %" GST_TIME_FORMAT,
GST_TIME_ARGS (timestamp));
if (GST_CLOCK_TIME_IS_VALID (stream_time))
- gst_object_sync_values (G_OBJECT (filter), stream_time);
+ gst_object_sync_values (GST_OBJECT (filter), stream_time);
- src = (guint32 *) GST_BUFFER_DATA (in);
- dest = (guint32 *) GST_BUFFER_DATA (out);
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ sstride = GST_VIDEO_FRAME_PLANE_STRIDE (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+ dstride = GST_VIDEO_FRAME_PLANE_STRIDE (out_frame, 0);
- GST_OBJECT_LOCK (filter);
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (in_frame);
- width = filter->width;
- height = filter->height;
area = width * height;
+ sstride /= 4;
+ dstride /= 4;
+
gst_vertigotv_set_parms (filter);
p = filter->alt_buffer;
- for (y = height; y > 0; y--) {
+ for (y = 0; y < height; y++) {
ox = filter->sx;
oy = filter->sy;
- for (x = width; x > 0; x--) {
+ for (x = 0; x < width; x++) {
i = (oy >> 16) * width + (ox >> 16);
if (i < 0)
i = 0;
i = area;
v = filter->current_buffer[i] & 0xfcfcff;
- v = (v * 3) + ((*src++) & 0xfcfcff);
+ v = (v * 3) + (src[x] & 0xfcfcff);
- *p++ = (v >> 2);
+ *p++ = dest[x] = (v >> 2);
ox += filter->dx;
oy += filter->dy;
}
filter->sx -= filter->dy;
filter->sy += filter->dx;
- }
- memcpy (dest, filter->alt_buffer, area * sizeof (guint32));
+ src += sstride;
+ dest += dstride;
+ }
p = filter->current_buffer;
filter->current_buffer = filter->alt_buffer;
filter->alt_buffer = p;
- GST_OBJECT_UNLOCK (filter);
- return ret;
+ return GST_FLOW_OK;
}
static gboolean
}
static void
-gst_vertigotv_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_set_details_simple (element_class, "VertigoTV effect",
- "Filter/Effect/Video",
- "A loopback alpha blending effector with rotating and scaling",
- "Wim Taymans <wim.taymans@chello.be>");
-
- gst_element_class_add_static_pad_template (element_class,
- &gst_vertigotv_sink_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_vertigotv_src_template);
-}
-
-static void
gst_vertigotv_class_init (GstVertigoTVClass * klass)
{
GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass;
+ GstVideoFilterClass *vfilter_class = (GstVideoFilterClass *) klass;
gobject_class->set_property = gst_vertigotv_set_property;
gobject_class->get_property = gst_vertigotv_get_property;
"Control the rate of zooming", 1.01, 1.1, 1.01,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ gst_element_class_set_details_simple (gstelement_class, "VertigoTV effect",
+ "Filter/Effect/Video",
+ "A loopback alpha blending effector with rotating and scaling",
+ "Wim Taymans <wim.taymans@gmail.be>");
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_vertigotv_sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_vertigotv_src_template));
+
trans_class->start = GST_DEBUG_FUNCPTR (gst_vertigotv_start);
- trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_vertigotv_set_caps);
- trans_class->transform = GST_DEBUG_FUNCPTR (gst_vertigotv_transform);
+
+ vfilter_class->set_info = GST_DEBUG_FUNCPTR (gst_vertigotv_set_info);
+ vfilter_class->transform_frame =
+ GST_DEBUG_FUNCPTR (gst_vertigotv_transform_frame);
}
static void
-gst_vertigotv_init (GstVertigoTV * filter, GstVertigoTVClass * klass)
+gst_vertigotv_init (GstVertigoTV * filter)
{
filter->buffer = NULL;
filter->phase = 0.0;
#include <gst/gst.h>
+#include <gst/video/video.h>
#include <gst/video/gstvideofilter.h>
G_BEGIN_DECLS
GstVideoFilter videofilter;
/* < private > */
-
- gint width, height;
guint32 *buffer;
guint32 *current_buffer, *alt_buffer;
gint dx, dy;
* <refsect2>
* <title>Example launch line</title>
* |[
- * gst-launch -v videotestsrc ! warptv ! ffmpegcolorspace ! autovideosink
+ * gst-launch -v videotestsrc ! warptv ! videoconvert ! autovideosink
* ]| This pipeline shows the effect of warptv on a test stream.
* </refsect2>
*/
#include <math.h>
#include "gstwarp.h"
-
-#include <gst/video/video.h>
+#include <gst/video/gstvideometa.h>
+#include <gst/video/gstvideopool.h>
#ifndef M_PI
#define M_PI 3.14159265358979323846
#endif
-GST_BOILERPLATE (GstWarpTV, gst_warptv, GstVideoFilter, GST_TYPE_VIDEO_FILTER);
+#define gst_warptv_parent_class parent_class
+G_DEFINE_TYPE (GstWarpTV, gst_warptv, GST_TYPE_VIDEO_FILTER);
static void initSinTable ();
-static void initOffsTable (GstWarpTV * filter);
-static void initDistTable (GstWarpTV * filter);
+static void initDistTable (GstWarpTV * filter, gint width, gint height);
static GstStaticPadTemplate gst_warptv_src_template =
- GST_STATIC_PAD_TEMPLATE ("src",
+GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS (GST_VIDEO_CAPS_RGBx ";" GST_VIDEO_CAPS_xRGB ";"
- GST_VIDEO_CAPS_BGRx ";" GST_VIDEO_CAPS_xBGR)
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ RGBx, xRGB, BGRx, xBGR }"))
);
static GstStaticPadTemplate gst_warptv_sink_template =
- GST_STATIC_PAD_TEMPLATE ("sink",
+GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS (GST_VIDEO_CAPS_RGBx ";" GST_VIDEO_CAPS_xRGB ";"
- GST_VIDEO_CAPS_BGRx ";" GST_VIDEO_CAPS_xBGR)
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ RGBx, xRGB, BGRx, xBGR }"))
);
static gboolean
-gst_warptv_set_caps (GstBaseTransform * btrans, GstCaps * incaps,
- GstCaps * outcaps)
+gst_warptv_set_info (GstVideoFilter * vfilter, GstCaps * incaps,
+ GstVideoInfo * in_info, GstCaps * outcaps, GstVideoInfo * out_info)
{
- GstWarpTV *filter = GST_WARPTV (btrans);
- GstStructure *structure;
- gboolean ret = FALSE;
-
- structure = gst_caps_get_structure (incaps, 0);
-
- GST_OBJECT_LOCK (filter);
- if (gst_structure_get_int (structure, "width", &filter->width) &&
- gst_structure_get_int (structure, "height", &filter->height)) {
- g_free (filter->disttable);
- g_free (filter->offstable);
+ GstWarpTV *filter = GST_WARPTV (vfilter);
+ gint width, height;
- filter->offstable = g_malloc (filter->height * sizeof (guint32));
- filter->disttable =
- g_malloc (filter->width * filter->height * sizeof (guint32));
+ width = GST_VIDEO_INFO_WIDTH (in_info);
+ height = GST_VIDEO_INFO_HEIGHT (in_info);
- initOffsTable (filter);
- initDistTable (filter);
- ret = TRUE;
- }
- GST_OBJECT_UNLOCK (filter);
+ g_free (filter->disttable);
+ filter->disttable = g_malloc (width * height * sizeof (guint32));
+ initDistTable (filter, width, height);
- return ret;
+ return TRUE;
}
static gint32 sintable[1024 + 256];
}
static void
-initOffsTable (GstWarpTV * filter)
-{
- gint y;
-
- for (y = 0; y < filter->height; y++) {
- filter->offstable[y] = y * filter->width;
- }
-}
-
-static void
-initDistTable (GstWarpTV * filter)
+initDistTable (GstWarpTV * filter, gint width, gint height)
{
gint32 halfw, halfh, *distptr;
gint x, y;
float m;
- halfw = filter->width >> 1;
- halfh = filter->height >> 1;
+ halfw = width >> 1;
+ halfh = height >> 1;
distptr = filter->disttable;
}
static GstFlowReturn
-gst_warptv_transform (GstBaseTransform * trans, GstBuffer * in, GstBuffer * out)
+gst_warptv_transform_frame (GstVideoFilter * filter, GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame)
{
- GstWarpTV *warptv = GST_WARPTV (trans);
+ GstWarpTV *warptv = GST_WARPTV (filter);
gint width, height;
- guint32 *src = (guint32 *) GST_BUFFER_DATA (in);
- guint32 *dest = (guint32 *) GST_BUFFER_DATA (out);
gint xw, yw, cw;
gint32 c, i, x, y, dx, dy, maxx, maxy;
- gint32 skip, *ctptr, *distptr;
+ gint32 *ctptr, *distptr;
gint32 *ctable;
- GstFlowReturn ret = GST_FLOW_OK;
+ guint32 *src, *dest;
+ gint sstride, dstride;
- GST_OBJECT_LOCK (warptv);
- width = warptv->width;
- height = warptv->height;
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+
+ sstride = GST_VIDEO_FRAME_PLANE_STRIDE (in_frame, 0) / 4;
+ dstride = GST_VIDEO_FRAME_PLANE_STRIDE (out_frame, 0) / 4;
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (in_frame);
+
+ GST_OBJECT_LOCK (warptv);
xw = (gint) (sin ((warptv->tval + 100) * M_PI / 128) * 30);
yw = (gint) (sin ((warptv->tval) * M_PI / 256) * -35);
cw = (gint) (sin ((warptv->tval - 70) * M_PI / 64) * 50);
distptr = warptv->disttable;
ctable = warptv->ctable;
- skip = 0; /* video_width*sizeof(RGB32)/4 - video_width;; */
c = 0;
for (x = 0; x < 512; x++) {
dy = 0;
else if (dy > maxy)
dy = maxy;
- *dest++ = src[warptv->offstable[dy] + dx];
+
+ dest[x] = src[dy * sstride + dx];
}
- dest += skip;
+ dest += dstride;
}
warptv->tval = (warptv->tval + 1) & 511;
GST_OBJECT_UNLOCK (warptv);
- return ret;
+ return GST_FLOW_OK;
}
static gboolean
{
GstWarpTV *warptv = GST_WARPTV (object);
- g_free (warptv->offstable);
- warptv->offstable = NULL;
g_free (warptv->disttable);
warptv->disttable = NULL;
}
static void
-gst_warptv_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_set_details_simple (element_class, "WarpTV effect",
- "Filter/Effect/Video",
- "WarpTV does realtime goo'ing of the video input",
- "Sam Lantinga <slouken@devolution.com>");
-
- gst_element_class_add_static_pad_template (element_class,
- &gst_warptv_sink_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_warptv_src_template);
-}
-
-static void
gst_warptv_class_init (GstWarpTVClass * klass)
{
GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass;
+ GstVideoFilterClass *vfilter_class = (GstVideoFilterClass *) klass;
gobject_class->finalize = gst_warptv_finalize;
+ gst_element_class_set_details_simple (gstelement_class, "WarpTV effect",
+ "Filter/Effect/Video",
+ "WarpTV does realtime goo'ing of the video input",
+ "Sam Lantinga <slouken@devolution.com>");
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_warptv_sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_warptv_src_template));
+
trans_class->start = GST_DEBUG_FUNCPTR (gst_warptv_start);
- trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_warptv_set_caps);
- trans_class->transform = GST_DEBUG_FUNCPTR (gst_warptv_transform);
+
+ vfilter_class->set_info = GST_DEBUG_FUNCPTR (gst_warptv_set_info);
+ vfilter_class->transform_frame =
+ GST_DEBUG_FUNCPTR (gst_warptv_transform_frame);
initSinTable ();
}
static void
-gst_warptv_init (GstWarpTV * warptv, GstWarpTVClass * klass)
+gst_warptv_init (GstWarpTV * warptv)
{
/* nothing to do */
}
#include <gst/gst.h>
+#include <gst/video/video.h>
#include <gst/video/gstvideofilter.h>
G_BEGIN_DECLS
GstVideoFilter videofilter;
/* < private > */
-
- gint width, height;
- gint *offstable;
gint32 *disttable;
gint32 ctable[1024];
gint tval;
gstiirequalizer10bands.c gstiirequalizer10bands.h
libgstequalizer_la_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_BASE_CFLAGS) \
- $(GST_CONTROLLER_CFLAGS) $(GST_CFLAGS)
+ $(GST_CFLAGS)
libgstequalizer_la_LIBADD = $(GST_PLUGINS_BASE_LIBS) \
- -lgstaudio-$(GST_MAJORMINOR) $(GST_BASE_LIBS) $(GST_CONTROLLER_LIBS) \
+ -lgstaudio-$(GST_MAJORMINOR) $(GST_BASE_LIBS) \
$(GST_LIBS) $(LIBM)
libgstequalizer_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
libgstequalizer_la_LIBTOOLFLAGS = --tag=disable-static
#endif
#include <math.h>
+#include <stdio.h>
#include <string.h>
#include "gstiirequalizer.h"
GST_DEBUG_CATEGORY (equalizer_debug);
#define GST_CAT_DEFAULT equalizer_debug
-#define BANDS_LOCK(equ) g_mutex_lock(equ->bands_lock)
-#define BANDS_UNLOCK(equ) g_mutex_unlock(equ->bands_lock)
+#define BANDS_LOCK(equ) g_mutex_lock(&equ->bands_lock)
+#define BANDS_UNLOCK(equ) g_mutex_unlock(&equ->bands_lock)
static void gst_iir_equalizer_child_proxy_interface_init (gpointer g_iface,
gpointer iface_data);
static void gst_iir_equalizer_finalize (GObject * object);
static gboolean gst_iir_equalizer_setup (GstAudioFilter * filter,
- GstRingBufferSpec * fmt);
+ const GstAudioInfo * info);
static GstFlowReturn gst_iir_equalizer_transform_ip (GstBaseTransform * btrans,
GstBuffer * buf);
#define ALLOWED_CAPS \
- "audio/x-raw-int," \
- " depth=(int)16," \
- " width=(int)16," \
- " endianness=(int)BYTE_ORDER," \
- " signed=(bool)TRUE," \
- " rate=(int)[1000,MAX]," \
- " channels=(int)[1,MAX]; " \
- "audio/x-raw-float," \
- " width=(int) { 32, 64 } ," \
- " endianness=(int)BYTE_ORDER," \
- " rate=(int)[1000,MAX]," \
- " channels=(int)[1,MAX]"
+ "audio/x-raw," \
+ " format=(string) {"GST_AUDIO_NE(S16)","GST_AUDIO_NE(F32)"," \
+ GST_AUDIO_NE(F64)" }, " \
+ " rate=(int)[1000,MAX]," \
+ " channels=(int)[1,MAX]," \
+ " layout=(string)interleaved"
-static void
-_do_init (GType object_type)
-{
- const GInterfaceInfo child_proxy_interface_info = {
- (GInterfaceInitFunc) gst_iir_equalizer_child_proxy_interface_init,
- NULL, /* interface_finalize */
- NULL /* interface_data */
- };
-
- g_type_add_interface_static (object_type, GST_TYPE_CHILD_PROXY,
- &child_proxy_interface_info);
-}
+#define gst_iir_equalizer_parent_class parent_class
+G_DEFINE_TYPE_WITH_CODE (GstIirEqualizer, gst_iir_equalizer,
+ GST_TYPE_AUDIO_FILTER,
+ G_IMPLEMENT_INTERFACE (GST_TYPE_CHILD_PROXY,
+ gst_iir_equalizer_child_proxy_interface_init));
-GST_BOILERPLATE_FULL (GstIirEqualizer, gst_iir_equalizer,
- GstAudioFilter, GST_TYPE_AUDIO_FILTER, _do_init);
/* child object */
/* equalizer implementation */
static void
-gst_iir_equalizer_base_init (gpointer g_class)
-{
- GstAudioFilterClass *audiofilter_class = GST_AUDIO_FILTER_CLASS (g_class);
- GstCaps *caps;
-
- caps = gst_caps_from_string (ALLOWED_CAPS);
- gst_audio_filter_class_add_pad_templates (audiofilter_class, caps);
- gst_caps_unref (caps);
-}
-
-static void
gst_iir_equalizer_class_init (GstIirEqualizerClass * klass)
{
GstAudioFilterClass *audio_filter_class = (GstAudioFilterClass *) klass;
GstBaseTransformClass *btrans_class = (GstBaseTransformClass *) klass;
GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstCaps *caps;
gobject_class->finalize = gst_iir_equalizer_finalize;
audio_filter_class->setup = gst_iir_equalizer_setup;
btrans_class->transform_ip = gst_iir_equalizer_transform_ip;
+
+ caps = gst_caps_from_string (ALLOWED_CAPS);
+ gst_audio_filter_class_add_pad_templates (audio_filter_class, caps);
+ gst_caps_unref (caps);
}
static void
-gst_iir_equalizer_init (GstIirEqualizer * eq, GstIirEqualizerClass * g_class)
+gst_iir_equalizer_init (GstIirEqualizer * eq)
{
- eq->bands_lock = g_mutex_new ();
+ g_mutex_init (&eq->bands_lock);
eq->need_new_coefficients = TRUE;
}
g_free (equ->bands);
g_free (equ->history);
- g_mutex_free (equ->bands_lock);
+ g_mutex_clear (&equ->bands_lock);
G_OBJECT_CLASS (parent_class)->finalize (object);
}
static void
setup_peak_filter (GstIirEqualizer * equ, GstIirEqualizerBand * band)
{
- g_return_if_fail (GST_AUDIO_FILTER (equ)->format.rate);
+ gint rate = GST_AUDIO_FILTER_RATE (equ);
+
+ g_return_if_fail (rate);
{
gdouble gain, omega, bw;
gdouble alpha, alpha1, alpha2, b0;
gain = arg_to_scale (band->gain);
- omega = calculate_omega (band->freq, GST_AUDIO_FILTER (equ)->format.rate);
- bw = calculate_bw (band, GST_AUDIO_FILTER (equ)->format.rate);
+ omega = calculate_omega (band->freq, rate);
+ bw = calculate_bw (band, rate);
if (bw == 0.0)
goto out;
static void
setup_low_shelf_filter (GstIirEqualizer * equ, GstIirEqualizerBand * band)
{
- g_return_if_fail (GST_AUDIO_FILTER (equ)->format.rate);
+ gint rate = GST_AUDIO_FILTER_RATE (equ);
+
+ g_return_if_fail (rate);
{
gdouble gain, omega, bw;
gdouble egp, egm;
gain = arg_to_scale (band->gain);
- omega = calculate_omega (band->freq, GST_AUDIO_FILTER (equ)->format.rate);
- bw = calculate_bw (band, GST_AUDIO_FILTER (equ)->format.rate);
+ omega = calculate_omega (band->freq, rate);
+ bw = calculate_bw (band, rate);
if (bw == 0.0)
goto out;
static void
setup_high_shelf_filter (GstIirEqualizer * equ, GstIirEqualizerBand * band)
{
- g_return_if_fail (GST_AUDIO_FILTER (equ)->format.rate);
+ gint rate = GST_AUDIO_FILTER_RATE (equ);
+
+ g_return_if_fail (rate);
{
gdouble gain, omega, bw;
gdouble egp, egm;
gain = arg_to_scale (band->gain);
- omega = calculate_omega (band->freq, GST_AUDIO_FILTER (equ)->format.rate);
- bw = calculate_bw (band, GST_AUDIO_FILTER (equ)->format.rate);
+ omega = calculate_omega (band->freq, rate);
+ bw = calculate_bw (band, rate);
if (bw == 0.0)
goto out;
/* Must be called with transform lock! */
static void
-alloc_history (GstIirEqualizer * equ)
+alloc_history (GstIirEqualizer * equ, const GstAudioInfo * info)
{
/* free + alloc = no memcpy */
g_free (equ->history);
equ->history =
- g_malloc0 (equ->history_size * GST_AUDIO_FILTER (equ)->format.channels *
+ g_malloc0 (equ->history_size * GST_AUDIO_INFO_CHANNELS (info) *
equ->freq_band_count);
}
}
}
- alloc_history (equ);
+ alloc_history (equ, GST_AUDIO_FILTER_INFO (equ));
/* set center frequencies and name band objects
* FIXME: arg! we can't change the name of parented objects :(
GstAudioFilter *filter = GST_AUDIO_FILTER (btrans);
GstIirEqualizer *equ = GST_IIR_EQUALIZER (btrans);
GstClockTime timestamp;
+ GstMapInfo map;
+ gint channels = GST_AUDIO_FILTER_CHANNELS (filter);
- if (G_UNLIKELY (filter->format.channels < 1 || equ->process == NULL))
+ if (G_UNLIKELY (channels < 1 || equ->process == NULL))
return GST_FLOW_NOT_NEGOTIATED;
if (gst_base_transform_is_passthrough (btrans))
GstIirEqualizerBand **filters = equ->bands;
guint f, nf = equ->freq_band_count;
- gst_object_sync_values (G_OBJECT (equ), timestamp);
+ gst_object_sync_values (GST_OBJECT (equ), timestamp);
/* sync values for bands too */
for (f = 0; f < nf; f++) {
- gst_object_sync_values (G_OBJECT (filters[f]), timestamp);
+ gst_object_sync_values (GST_OBJECT (filters[f]), timestamp);
}
}
}
BANDS_UNLOCK (equ);
- equ->process (equ, GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (buf),
- filter->format.channels);
+ gst_buffer_map (buf, &map, GST_MAP_READWRITE);
+ equ->process (equ, map.data, map.size, channels);
+ gst_buffer_unmap (buf, &map);
return GST_FLOW_OK;
}
static gboolean
-gst_iir_equalizer_setup (GstAudioFilter * audio, GstRingBufferSpec * fmt)
+gst_iir_equalizer_setup (GstAudioFilter * audio, const GstAudioInfo * info)
{
GstIirEqualizer *equ = GST_IIR_EQUALIZER (audio);
- switch (fmt->type) {
- case GST_BUFTYPE_LINEAR:
- switch (fmt->width) {
- case 16:
- equ->history_size = history_size_gint16;
- equ->process = gst_iir_equ_process_gint16;
- break;
- default:
- return FALSE;
- }
+ switch (GST_AUDIO_INFO_FORMAT (info)) {
+ case GST_AUDIO_FORMAT_S16:
+ equ->history_size = history_size_gint16;
+ equ->process = gst_iir_equ_process_gint16;
break;
- case GST_BUFTYPE_FLOAT:
- switch (fmt->width) {
- case 32:
- equ->history_size = history_size_gfloat;
- equ->process = gst_iir_equ_process_gfloat;
- break;
- case 64:
- equ->history_size = history_size_gdouble;
- equ->process = gst_iir_equ_process_gdouble;
- break;
- default:
- return FALSE;
- }
+ case GST_AUDIO_FORMAT_F32:
+ equ->history_size = history_size_gfloat;
+ equ->process = gst_iir_equ_process_gfloat;
+ break;
+ case GST_AUDIO_FORMAT_F64:
+ equ->history_size = history_size_gdouble;
+ equ->process = gst_iir_equ_process_gdouble;
break;
default:
return FALSE;
}
- alloc_history (equ);
+ alloc_history (equ, info);
return TRUE;
}
#define __GST_IIR_EQUALIZER__
#include <gst/audio/gstaudiofilter.h>
-#include <gst/audio/gstringbuffer.h>
-#include <gst/controller/gstcontroller.h>
typedef struct _GstIirEqualizer GstIirEqualizer;
typedef struct _GstIirEqualizerClass GstIirEqualizerClass;
/*< private >*/
- GMutex *bands_lock;
+ GMutex bands_lock;
GstIirEqualizerBand **bands;
/* properties */
#define GST_CAT_DEFAULT equalizer_debug
-static void
-_do_init (GType object_type)
-{
- const GInterfaceInfo preset_interface_info = {
- NULL, /* interface_init */
- NULL, /* interface_finalize */
- NULL /* interface_data */
- };
-
- g_type_add_interface_static (object_type, GST_TYPE_PRESET,
- &preset_interface_info);
-}
-
-GST_BOILERPLATE_FULL (GstIirEqualizer10Bands, gst_iir_equalizer_10bands,
- GstIirEqualizer, GST_TYPE_IIR_EQUALIZER, _do_init);
+#define gst_iir_equalizer_10bands_parent_class parent_class
+G_DEFINE_TYPE_WITH_CODE (GstIirEqualizer10Bands, gst_iir_equalizer_10bands,
+ GST_TYPE_IIR_EQUALIZER, G_IMPLEMENT_INTERFACE (GST_TYPE_PRESET, NULL));
/* equalizer implementation */
static void
-gst_iir_equalizer_10bands_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_set_details_simple (element_class, "10 Band Equalizer",
- "Filter/Effect/Audio",
- "Direct Form 10 band IIR equalizer",
- "Stefan Kost <ensonic@users.sf.net>");
-}
-
-static void
gst_iir_equalizer_10bands_class_init (GstIirEqualizer10BandsClass * klass)
{
GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
gobject_class->set_property = gst_iir_equalizer_10bands_set_property;
gobject_class->get_property = gst_iir_equalizer_10bands_get_property;
"gain for the frequency band 15011 Hz, ranging from -24 dB to +12 dB",
-24.0, 12.0, 0.0,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
+
+ gst_element_class_set_details_simple (gstelement_class, "10 Band Equalizer",
+ "Filter/Effect/Audio",
+ "Direct Form 10 band IIR equalizer",
+ "Stefan Kost <ensonic@users.sf.net>");
}
static void
-gst_iir_equalizer_10bands_init (GstIirEqualizer10Bands * equ_n,
- GstIirEqualizer10BandsClass * g_class)
+gst_iir_equalizer_10bands_init (GstIirEqualizer10Bands * equ_n)
{
GstIirEqualizer *equ = GST_IIR_EQUALIZER (equ_n);
GST_DEBUG_CATEGORY_EXTERN (equalizer_debug);
#define GST_CAT_DEFAULT equalizer_debug
-
-static void
-_do_init (GType object_type)
-{
- const GInterfaceInfo preset_interface_info = {
- NULL, /* interface_init */
- NULL, /* interface_finalize */
- NULL /* interface_data */
- };
-
- g_type_add_interface_static (object_type, GST_TYPE_PRESET,
- &preset_interface_info);
-}
-
-GST_BOILERPLATE_FULL (GstIirEqualizer3Bands, gst_iir_equalizer_3bands,
- GstIirEqualizer, GST_TYPE_IIR_EQUALIZER, _do_init);
+#define gst_iir_equalizer_3bands_parent_class parent_class
+G_DEFINE_TYPE_WITH_CODE (GstIirEqualizer3Bands, gst_iir_equalizer_3bands,
+ GST_TYPE_IIR_EQUALIZER, G_IMPLEMENT_INTERFACE (GST_TYPE_PRESET, NULL));
/* equalizer implementation */
static void
-gst_iir_equalizer_3bands_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_set_details_simple (element_class, "3 Band Equalizer",
- "Filter/Effect/Audio",
- "Direct Form 3 band IIR equalizer", "Stefan Kost <ensonic@users.sf.net>");
-}
-
-static void
gst_iir_equalizer_3bands_class_init (GstIirEqualizer3BandsClass * klass)
{
GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
gobject_class->set_property = gst_iir_equalizer_3bands_set_property;
gobject_class->get_property = gst_iir_equalizer_3bands_get_property;
"gain for the frequency band 11 kHz, ranging from -24.0 to +12.0",
-24.0, 12.0, 0.0,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
+
+ gst_element_class_set_details_simple (gstelement_class, "3 Band Equalizer",
+ "Filter/Effect/Audio",
+ "Direct Form 3 band IIR equalizer", "Stefan Kost <ensonic@users.sf.net>");
}
static void
-gst_iir_equalizer_3bands_init (GstIirEqualizer3Bands * equ_n,
- GstIirEqualizer3BandsClass * g_class)
+gst_iir_equalizer_3bands_init (GstIirEqualizer3Bands * equ_n)
{
GstIirEqualizer *equ = GST_IIR_EQUALIZER (equ_n);
GST_DEBUG_CATEGORY_EXTERN (equalizer_debug);
#define GST_CAT_DEFAULT equalizer_debug
-GST_BOILERPLATE (GstIirEqualizerNBands, gst_iir_equalizer_nbands,
- GstIirEqualizer, GST_TYPE_IIR_EQUALIZER);
+#define gst_iir_equalizer_nbands_parent_class parent_class
+G_DEFINE_TYPE (GstIirEqualizerNBands, gst_iir_equalizer_nbands,
+ GST_TYPE_IIR_EQUALIZER);
/* equalizer implementation */
static void
-gst_iir_equalizer_nbands_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_set_details_simple (element_class, "N Band Equalizer",
- "Filter/Effect/Audio",
- "Direct Form IIR equalizer",
- "Benjamin Otte <otte@gnome.org>," " Stefan Kost <ensonic@users.sf.net>");
-}
-
-static void
gst_iir_equalizer_nbands_class_init (GstIirEqualizerNBandsClass * klass)
{
GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
gobject_class->set_property = gst_iir_equalizer_nbands_set_property;
gobject_class->get_property = gst_iir_equalizer_nbands_get_property;
g_param_spec_uint ("num-bands", "num-bands",
"number of different bands to use", 1, 64, 10,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | G_PARAM_CONSTRUCT));
+
+ gst_element_class_set_details_simple (gstelement_class, "N Band Equalizer",
+ "Filter/Effect/Audio",
+ "Direct Form IIR equalizer",
+ "Benjamin Otte <otte@gnome.org>," " Stefan Kost <ensonic@users.sf.net>");
}
static void
-gst_iir_equalizer_nbands_init (GstIirEqualizerNBands * equ_n,
- GstIirEqualizerNBandsClass * g_class)
+gst_iir_equalizer_nbands_init (GstIirEqualizerNBands * equ_n)
{
GstIirEqualizer *equ = GST_IIR_EQUALIZER (equ_n);
plugin_LTLIBRARIES = libgstflv.la
libgstflv_la_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_BASE_CFLAGS) $(GST_CFLAGS)
-libgstflv_la_LIBADD = -lgstpbutils-@GST_MAJORMINOR@ \
+libgstflv_la_LIBADD = -lgstpbutils-@GST_MAJORMINOR@ -lgstaudio-@GST_MAJORMINOR@\
$(GST_PLUGINS_BASE_LIBS) $(GST_BASE_LIBS) $(GST_LIBS)
libgstflv_la_LDFLAGS = ${GST_PLUGIN_LDFLAGS}
libgstflv_la_SOURCES = gstflvdemux.c gstflvmux.c
libgstflv_la_LIBTOOLFLAGS = --tag=disable-static
-noinst_HEADERS = gstflvdemux.h gstflvmux.h amfdefs.h
+noinst_HEADERS = gstflvdemux.h gstflvmux.h amfdefs.h gstindex.h
+
+EXTRA_DIST = gstindex.c gstmemindex.c
Android.mk: Makefile.am $(BUILT_SOURCES)
androgenizer \
#include "config.h"
#endif
-/* FIXME 0.11: suppress warnings for deprecated API such as GStaticRecMutex
- * with newer GLib versions (>= 2.31.0) */
-#define GLIB_DISABLE_DEPRECATION_WARNINGS
-
#include "gstflvdemux.h"
#include "gstflvmux.h"
#include <gst/base/gstbytereader.h>
#include <gst/pbutils/descriptions.h>
#include <gst/pbutils/pbutils.h>
+#include <gst/audio/audio.h>
+
+/* FIXME: don't rely on own GstIndex */
+#include "gstindex.c"
+#include "gstmemindex.c"
+#define GST_ASSOCIATION_FLAG_NONE GST_INDEX_ASSOCIATION_FLAG_NONE
+#define GST_ASSOCIATION_FLAG_KEY_UNIT GST_INDEX_ASSOCIATION_FLAG_KEY_UNIT
+#define GST_ASSOCIATION_FLAG_DELTA_UNIT GST_INDEX_ASSOCIATION_FLAG_DELTA_UNIT
static GstStaticPadTemplate flv_sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
"audio/mpeg, mpegversion = (int) 1, layer = (int) 3, channels = (int) { 1, 2 }, rate = (int) { 5512, 8000, 11025, 22050, 44100 }, parsed = (boolean) TRUE; "
"audio/mpeg, mpegversion = (int) 4, stream-format = (string) raw, framed = (boolean) TRUE; "
"audio/x-nellymoser, channels = (int) { 1, 2 }, rate = (int) { 5512, 8000, 11025, 16000, 22050, 44100 }; "
- "audio/x-raw-int, endianness = (int) LITTLE_ENDIAN, channels = (int) { 1, 2 }, width = (int) 8, depth = (int) 8, rate = (int) { 5512, 11025, 22050, 44100 }, signed = (boolean) FALSE; "
- "audio/x-raw-int, endianness = (int) LITTLE_ENDIAN, channels = (int) { 1, 2 }, width = (int) 16, depth = (int) 16, rate = (int) { 5512, 11025, 22050, 44100 }, signed = (boolean) TRUE; "
+ "audio/x-raw, format = (string) { U8, S16LE }, layout = (string) interleaved, channels = (int) { 1, 2 }, rate = (int) { 5512, 11025, 22050, 44100 }; "
"audio/x-alaw, channels = (int) { 1, 2 }, rate = (int) { 5512, 11025, 22050, 44100 }; "
"audio/x-mulaw, channels = (int) { 1, 2 }, rate = (int) { 5512, 11025, 22050, 44100 }; "
"audio/x-speex, channels = (int) { 1, 2 }, rate = (int) { 5512, 11025, 22050, 44100 };")
GST_DEBUG_CATEGORY_STATIC (flvdemux_debug);
#define GST_CAT_DEFAULT flvdemux_debug
-GST_BOILERPLATE (GstFlvDemux, gst_flv_demux, GstElement, GST_TYPE_ELEMENT);
+#define gst_flv_demux_parent_class parent_class
+G_DEFINE_TYPE (GstFlvDemux, gst_flv_demux, GST_TYPE_ELEMENT);
/* 9 bytes of header + 4 bytes of first previous tag size */
#define FLV_HEADER_SIZE 13
static gboolean gst_flv_demux_handle_seek_pull (GstFlvDemux * demux,
GstEvent * event, gboolean seeking);
-static gboolean gst_flv_demux_query (GstPad * pad, GstQuery * query);
-static gboolean gst_flv_demux_src_event (GstPad * pad, GstEvent * event);
+static gboolean gst_flv_demux_query (GstPad * pad, GstObject * parent,
+ GstQuery * query);
+static gboolean gst_flv_demux_src_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
static void
if (entry) {
#ifndef GST_DISABLE_GST_DEBUG
- gint64 time;
+ gint64 time = 0;
gboolean key;
gst_index_entry_assoc_map (entry, GST_FORMAT_TIME, &time);
return string;
}
-static const GstQueryType *
-gst_flv_demux_query_types (GstPad * pad)
-{
- static const GstQueryType query_types[] = {
- GST_QUERY_DURATION,
- GST_QUERY_POSITION,
- GST_QUERY_SEEKING,
- 0
- };
-
- return query_types;
-}
-
static void
gst_flv_demux_check_seekability (GstFlvDemux * demux)
{
/* try harder to query upstream size if we didn't get it the first time */
if (demux->upstream_seekable && stop == -1) {
- GstFormat fmt = GST_FORMAT_BYTES;
-
GST_DEBUG_OBJECT (demux, "doing duration query to fix up unset stop");
- gst_pad_query_peer_duration (demux->sinkpad, &fmt, &stop);
+ gst_pad_peer_query_duration (demux->sinkpad, GST_FORMAT_BYTES, &stop);
}
/* if upstream doesn't know the size, it's likely that it's not seekable in
gst_flv_demux_parse_tag_script (GstFlvDemux * demux, GstBuffer * buffer)
{
GstFlowReturn ret = GST_FLOW_OK;
- GstByteReader reader = GST_BYTE_READER_INIT_FROM_BUFFER (buffer);
+ GstByteReader reader;
guint8 type = 0;
+ GstMapInfo map;
- g_return_val_if_fail (GST_BUFFER_SIZE (buffer) >= 7, GST_FLOW_ERROR);
+ g_return_val_if_fail (gst_buffer_get_size (buffer) >= 7, GST_FLOW_ERROR);
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ gst_byte_reader_init (&reader, map.data, map.size);
gst_byte_reader_skip (&reader, 7);
GST_LOG_OBJECT (demux, "parsing a script tag");
if (!gst_byte_reader_get_uint8 (&reader, &type))
- return GST_FLOW_OK;
+ goto cleanup;
/* Must be string */
if (type == 2) {
if (!gst_byte_reader_get_uint8 (&reader, &type)) {
g_free (function_name);
- return GST_FLOW_OK;
+ goto cleanup;
}
switch (type) {
/* ECMA array */
if (!gst_byte_reader_get_uint32_be (&reader, &nb_elems)) {
g_free (function_name);
- return GST_FLOW_OK;
+ goto cleanup;
}
/* The number of elements is just a hint, some files have
default:
GST_DEBUG_OBJECT (demux, "Unhandled script data type : %d", type);
g_free (function_name);
- return GST_FLOW_OK;
+ goto cleanup;
}
demux->push_tags = TRUE;
}
}
+cleanup:
+ gst_buffer_unmap (buffer, &map);
+
return ret;
}
break;
case 0:
case 3:
+ {
+ GstAudioFormat format;
+
/* Assuming little endian for 0 (aka endianness of the
* system on which the file was created) as most people
* are probably using little endian machines */
- caps = gst_caps_new_simple ("audio/x-raw-int",
- "endianness", G_TYPE_INT, G_LITTLE_ENDIAN,
- "signed", G_TYPE_BOOLEAN, (width == 8) ? FALSE : TRUE,
- "width", G_TYPE_INT, width, "depth", G_TYPE_INT, width, NULL);
+ format = gst_audio_format_build_integer ((width == 8) ? FALSE : TRUE,
+ G_LITTLE_ENDIAN, width, width);
+
+ caps = gst_caps_new_simple ("audio/x-raw",
+ "format", G_TYPE_STRING, gst_audio_format_to_string (format),
+ "layout", G_TYPE_STRING, "interleaved", NULL);
break;
+ }
case 4:
case 5:
case 6:
- caps = gst_caps_new_simple ("audio/x-nellymoser", NULL);
+ caps = gst_caps_new_empty_simple ("audio/x-nellymoser");
break;
case 10:
{
- /* use codec-data to extract and verify samplerate */
- if (demux->audio_codec_data &&
- GST_BUFFER_SIZE (demux->audio_codec_data) >= 2) {
- gint freq_index;
-
- freq_index =
- ((GST_READ_UINT16_BE (GST_BUFFER_DATA (demux->audio_codec_data))));
- freq_index = (freq_index & 0x0780) >> 7;
- adjusted_rate =
- gst_codec_utils_aac_get_sample_rate_from_index (freq_index);
-
- if (adjusted_rate && (rate != adjusted_rate)) {
- GST_LOG_OBJECT (demux, "Ajusting AAC sample rate %d -> %d", rate,
- adjusted_rate);
- } else {
- adjusted_rate = rate;
+ if (demux->audio_codec_data) {
+ GstMapInfo map;
+
+ gst_buffer_map (demux->audio_codec_data, &map, GST_MAP_READ);
+
+ /* use codec-data to extract and verify samplerate */
+ if (map.size >= 2) {
+ gint freq_index;
+
+ freq_index = GST_READ_UINT16_BE (map.data);
+ freq_index = (freq_index & 0x0780) >> 7;
+ adjusted_rate =
+ gst_codec_utils_aac_get_sample_rate_from_index (freq_index);
+
+ if (adjusted_rate && (rate != adjusted_rate)) {
+ GST_LOG_OBJECT (demux, "Ajusting AAC sample rate %d -> %d", rate,
+ adjusted_rate);
+ } else {
+ adjusted_rate = rate;
+ }
}
+ gst_buffer_unmap (demux->audio_codec_data, &map);
}
+
caps = gst_caps_new_simple ("audio/mpeg",
"mpegversion", G_TYPE_INT, 4, "framed", G_TYPE_BOOLEAN, TRUE,
"stream-format", G_TYPE_STRING, "raw", NULL);
break;
}
case 7:
- caps = gst_caps_new_simple ("audio/x-alaw", NULL);
+ caps = gst_caps_new_empty_simple ("audio/x-alaw");
break;
case 8:
- caps = gst_caps_new_simple ("audio/x-mulaw", NULL);
+ caps = gst_caps_new_empty_simple ("audio/x-mulaw");
break;
case 11:
- caps = gst_caps_new_simple ("audio/x-speex", NULL);
+ caps = gst_caps_new_empty_simple ("audio/x-speex");
break;
default:
GST_WARNING_OBJECT (demux, "unsupported audio codec tag %u", codec_tag);
if (codec_name) {
if (demux->taglist == NULL)
- demux->taglist = gst_tag_list_new ();
+ demux->taglist = gst_tag_list_new_empty ();
gst_tag_list_add (demux->taglist, GST_TAG_MERGE_REPLACE,
GST_TAG_AUDIO_CODEC, codec_name, NULL);
g_free (codec_name);
return ret;
}
+static gboolean
+gst_flv_demux_push_src_event (GstFlvDemux * demux, GstEvent * event)
+{
+ gboolean ret = TRUE;
+
+ if (demux->audio_pad)
+ ret |= gst_pad_push_event (demux->audio_pad, gst_event_ref (event));
+
+ if (demux->video_pad)
+ ret |= gst_pad_push_event (demux->video_pad, gst_event_ref (event));
+
+ gst_event_unref (event);
+
+ return ret;
+}
+
static void
gst_flv_demux_push_tags (GstFlvDemux * demux)
{
if (demux->taglist) {
GST_DEBUG_OBJECT (demux, "pushing tags out %" GST_PTR_FORMAT,
demux->taglist);
- gst_element_found_tags (GST_ELEMENT (demux), demux->taglist);
- demux->taglist = gst_tag_list_new ();
+ gst_flv_demux_push_src_event (demux, gst_event_new_tag (demux->taglist));
+ demux->taglist = gst_tag_list_new_empty ();
demux->push_tags = FALSE;
}
}
guint32 pts = 0, codec_tag = 0, rate = 5512, width = 8, channels = 1;
guint32 codec_data = 0, pts_ext = 0;
guint8 flags = 0;
- guint8 *data = GST_BUFFER_DATA (buffer);
+ GstMapInfo map;
GstBuffer *outbuf;
+ guint8 *data;
GST_LOG_OBJECT (demux, "parsing an audio tag");
if (demux->no_more_pads && !demux->audio_pad) {
GST_WARNING_OBJECT (demux,
"Signaled no-more-pads already but had no audio pad -- ignoring");
- goto beach;
+ return GST_FLOW_OK;
}
- g_return_val_if_fail (GST_BUFFER_SIZE (buffer) == demux->tag_size,
+ g_return_val_if_fail (gst_buffer_get_size (buffer) == demux->tag_size,
GST_FLOW_ERROR);
+ /* Error out on tags with too small headers */
+ if (gst_buffer_get_size (buffer) < 11) {
+ GST_ERROR_OBJECT (demux, "Too small tag size (%" G_GSIZE_FORMAT ")",
+ gst_buffer_get_size (buffer));
+ return GST_FLOW_ERROR;
+ }
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ data = map.data;
+
/* Grab information about audio tag */
pts = GST_READ_UINT24_BE (data);
/* read the pts extension to 32 bits integer */
GST_LOG_OBJECT (demux, "pts bytes %02X %02X %02X %02X (%d)", data[0], data[1],
data[2], data[3], pts);
- /* Error out on tags with too small headers */
- if (GST_BUFFER_SIZE (buffer) < 11) {
- GST_ERROR_OBJECT (demux, "Too small tag size (%d)",
- GST_BUFFER_SIZE (buffer));
- return GST_FLOW_ERROR;
- }
-
- /* Silently skip buffers with no data */
- if (GST_BUFFER_SIZE (buffer) == 11)
- return GST_FLOW_OK;
-
/* Skip the stream id and go directly to the flags */
flags = GST_READ_UINT8 (data + 7);
+ /* Silently skip buffers with no data */
+ if (map.size == 11)
+ goto beach;
+
/* Channels */
if (flags & 0x01) {
channels = 2;
goto beach;
}
- /* Negotiate caps */
- if (!gst_flv_demux_audio_negotiate (demux, codec_tag, rate, channels,
- width)) {
- gst_object_unref (demux->audio_pad);
- demux->audio_pad = NULL;
- ret = GST_FLOW_ERROR;
- goto beach;
- }
-
- GST_DEBUG_OBJECT (demux, "created audio pad with caps %" GST_PTR_FORMAT,
- GST_PAD_CAPS (demux->audio_pad));
-
/* Set functions on the pad */
- gst_pad_set_query_type_function (demux->audio_pad,
- GST_DEBUG_FUNCPTR (gst_flv_demux_query_types));
gst_pad_set_query_function (demux->audio_pad,
GST_DEBUG_FUNCPTR (gst_flv_demux_query));
gst_pad_set_event_function (demux->audio_pad,
/* Make it active */
gst_pad_set_active (demux->audio_pad, TRUE);
+ /* Negotiate caps */
+ if (!gst_flv_demux_audio_negotiate (demux, codec_tag, rate, channels,
+ width)) {
+ gst_object_unref (demux->audio_pad);
+ demux->audio_pad = NULL;
+ ret = GST_FLOW_ERROR;
+ goto beach;
+ }
+#ifndef GST_DISABLE_GST_DEBUG
+ {
+ GstCaps *caps;
+
+ caps = gst_pad_get_current_caps (demux->audio_pad);
+ GST_DEBUG_OBJECT (demux, "created audio pad with caps %" GST_PTR_FORMAT,
+ caps);
+ if (caps)
+ gst_caps_unref (caps);
+ }
+#endif
+
/* We need to set caps before adding */
gst_element_add_pad (GST_ELEMENT (demux),
gst_object_ref (demux->audio_pad));
}
/* Create buffer from pad */
- outbuf =
- gst_buffer_create_sub (buffer, 7 + codec_data,
- demux->tag_data_size - codec_data);
+ outbuf = gst_buffer_copy_region (buffer, GST_BUFFER_COPY_MEMORY,
+ 7 + codec_data, demux->tag_data_size - codec_data);
if (demux->audio_codec_tag == 10) {
guint8 aac_packet_type = GST_READ_UINT8 (data + 8);
GST_BUFFER_DURATION (outbuf) = GST_CLOCK_TIME_NONE;
GST_BUFFER_OFFSET (outbuf) = demux->audio_offset++;
GST_BUFFER_OFFSET_END (outbuf) = demux->audio_offset;
- gst_buffer_set_caps (outbuf, GST_PAD_CAPS (demux->audio_pad));
if (demux->duration == GST_CLOCK_TIME_NONE ||
demux->duration < GST_BUFFER_TIMESTAMP (outbuf))
demux->audio_need_discont = FALSE;
}
- gst_segment_set_last_stop (&demux->segment, GST_FORMAT_TIME,
- GST_BUFFER_TIMESTAMP (outbuf));
+ demux->segment.position = GST_BUFFER_TIMESTAMP (outbuf);
/* Do we need a newsegment event ? */
if (G_UNLIKELY (demux->audio_need_segment)) {
- if (demux->close_seg_event)
- gst_pad_push_event (demux->audio_pad,
- gst_event_ref (demux->close_seg_event));
-
if (!demux->new_seg_event) {
GST_DEBUG_OBJECT (demux, "pushing newsegment from %"
GST_TIME_FORMAT " to %" GST_TIME_FORMAT,
- GST_TIME_ARGS (demux->segment.last_stop),
+ GST_TIME_ARGS (demux->segment.position),
GST_TIME_ARGS (demux->segment.stop));
- demux->new_seg_event =
- gst_event_new_new_segment (FALSE, demux->segment.rate,
- demux->segment.format, demux->segment.last_stop,
- demux->segment.stop, demux->segment.last_stop);
+ demux->segment.start = demux->segment.time = demux->segment.position;
+ demux->new_seg_event = gst_event_new_segment (&demux->segment);
} else {
GST_DEBUG_OBJECT (demux, "pushing pre-generated newsegment event");
}
demux->audio_need_segment = FALSE;
}
- GST_LOG_OBJECT (demux, "pushing %d bytes buffer at pts %" GST_TIME_FORMAT
+ GST_LOG_OBJECT (demux,
+ "pushing %" G_GSIZE_FORMAT " bytes buffer at pts %" GST_TIME_FORMAT
" with duration %" GST_TIME_FORMAT ", offset %" G_GUINT64_FORMAT,
- GST_BUFFER_SIZE (outbuf), GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)),
+ gst_buffer_get_size (outbuf),
+ GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)),
GST_TIME_ARGS (GST_BUFFER_DURATION (outbuf)), GST_BUFFER_OFFSET (outbuf));
if (!GST_CLOCK_TIME_IS_VALID (demux->audio_start)) {
/* Push downstream */
ret = gst_pad_push (demux->audio_pad, outbuf);
if (G_UNLIKELY (ret != GST_FLOW_OK)) {
- if (demux->segment.rate < 0.0 && ret == GST_FLOW_UNEXPECTED &&
- demux->segment.last_stop > demux->segment.stop) {
- /* In reverse playback we can get a GST_FLOW_UNEXPECTED when
+ if (demux->segment.rate < 0.0 && ret == GST_FLOW_EOS &&
+ demux->segment.position > demux->segment.stop) {
+ /* In reverse playback we can get a GST_FLOW_EOS when
* we are at the end of the segment, so we just need to jump
* back to the previous section. */
GST_DEBUG_OBJECT (demux, "downstream has reached end of segment");
demux->audio_linked = TRUE;
beach:
+ gst_buffer_unmap (buffer, &map);
+
return ret;
}
/* Generate caps for that pad */
switch (codec_tag) {
case 2:
- caps = gst_caps_new_simple ("video/x-flash-video", NULL);
+ caps = gst_caps_new_empty_simple ("video/x-flash-video");
break;
case 3:
- caps = gst_caps_new_simple ("video/x-flash-screen", NULL);
+ caps = gst_caps_new_empty_simple ("video/x-flash-screen");
break;
case 4:
- caps = gst_caps_new_simple ("video/x-vp6-flash", NULL);
+ caps = gst_caps_new_empty_simple ("video/x-vp6-flash");
break;
case 5:
- caps = gst_caps_new_simple ("video/x-vp6-alpha", NULL);
+ caps = gst_caps_new_empty_simple ("video/x-vp6-alpha");
break;
case 7:
caps =
if (codec_name) {
if (demux->taglist == NULL)
- demux->taglist = gst_tag_list_new ();
+ demux->taglist = gst_tag_list_new_empty ();
gst_tag_list_add (demux->taglist, GST_TAG_MERGE_REPLACE,
GST_TAG_VIDEO_CODEC, codec_name, NULL);
g_free (codec_name);
guint32 pts = 0, codec_data = 1, pts_ext = 0;
gboolean keyframe = FALSE;
guint8 flags = 0, codec_tag = 0;
- guint8 *data = GST_BUFFER_DATA (buffer);
GstBuffer *outbuf;
+ GstMapInfo map;
+ guint8 *data;
- g_return_val_if_fail (GST_BUFFER_SIZE (buffer) == demux->tag_size,
+ g_return_val_if_fail (gst_buffer_get_size (buffer) == demux->tag_size,
GST_FLOW_ERROR);
GST_LOG_OBJECT (demux, "parsing a video tag");
-
if (demux->no_more_pads && !demux->video_pad) {
GST_WARNING_OBJECT (demux,
"Signaled no-more-pads already but had no audio pad -- ignoring");
- goto beach;
+ return GST_FLOW_OK;
}
+ if (gst_buffer_get_size (buffer) < 12) {
+ GST_ERROR_OBJECT (demux, "Too small tag size");
+ return GST_FLOW_ERROR;
+ }
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ data = map.data;
+
/* Grab information about video tag */
pts = GST_READ_UINT24_BE (data);
/* read the pts extension to 32 bits integer */
GST_LOG_OBJECT (demux, "pts bytes %02X %02X %02X %02X (%d)", data[0], data[1],
data[2], data[3], pts);
- if (GST_BUFFER_SIZE (buffer) < 12) {
- GST_ERROR_OBJECT (demux, "Too small tag size");
- return GST_FLOW_ERROR;
- }
-
/* Skip the stream id and go directly to the flags */
flags = GST_READ_UINT8 (data + 7);
goto beach;
}
+ /* Set functions on the pad */
+ gst_pad_set_query_function (demux->video_pad,
+ GST_DEBUG_FUNCPTR (gst_flv_demux_query));
+ gst_pad_set_event_function (demux->video_pad,
+ GST_DEBUG_FUNCPTR (gst_flv_demux_src_event));
+
+ gst_pad_use_fixed_caps (demux->video_pad);
+
+ /* Make it active */
+ gst_pad_set_active (demux->video_pad, TRUE);
+
+ /* Needs to be active before setting caps */
if (!gst_flv_demux_video_negotiate (demux, codec_tag)) {
gst_object_unref (demux->video_pad);
demux->video_pad = NULL;
* metadata tag that would come later and trigger a caps change */
demux->got_par = FALSE;
- GST_DEBUG_OBJECT (demux, "created video pad with caps %" GST_PTR_FORMAT,
- GST_PAD_CAPS (demux->video_pad));
-
- /* Set functions on the pad */
- gst_pad_set_query_type_function (demux->video_pad,
- GST_DEBUG_FUNCPTR (gst_flv_demux_query_types));
- gst_pad_set_query_function (demux->video_pad,
- GST_DEBUG_FUNCPTR (gst_flv_demux_query));
- gst_pad_set_event_function (demux->video_pad,
- GST_DEBUG_FUNCPTR (gst_flv_demux_src_event));
-
- gst_pad_use_fixed_caps (demux->video_pad);
+#ifndef GST_DISABLE_GST_DEBUG
+ {
+ GstCaps *caps;
- /* Make it active */
- gst_pad_set_active (demux->video_pad, TRUE);
+ caps = gst_pad_get_current_caps (demux->video_pad);
+ GST_DEBUG_OBJECT (demux, "created video pad with caps %" GST_PTR_FORMAT,
+ caps);
+ if (caps)
+ gst_caps_unref (caps);
+ }
+#endif
/* We need to set caps before adding */
gst_element_add_pad (GST_ELEMENT (demux),
}
/* Create buffer from pad */
- outbuf =
- gst_buffer_create_sub (buffer, 7 + codec_data,
- demux->tag_data_size - codec_data);
+ outbuf = gst_buffer_copy_region (buffer, GST_BUFFER_COPY_MEMORY,
+ 7 + codec_data, demux->tag_data_size - codec_data);
if (demux->video_codec_tag == 7) {
guint8 avc_packet_type = GST_READ_UINT8 (data + 8);
GST_BUFFER_DURATION (outbuf) = GST_CLOCK_TIME_NONE;
GST_BUFFER_OFFSET (outbuf) = demux->video_offset++;
GST_BUFFER_OFFSET_END (outbuf) = demux->video_offset;
- gst_buffer_set_caps (outbuf, GST_PAD_CAPS (demux->video_pad));
if (demux->duration == GST_CLOCK_TIME_NONE ||
demux->duration < GST_BUFFER_TIMESTAMP (outbuf))
demux->video_need_discont = FALSE;
}
- gst_segment_set_last_stop (&demux->segment, GST_FORMAT_TIME,
- GST_BUFFER_TIMESTAMP (outbuf));
+ demux->segment.position = GST_BUFFER_TIMESTAMP (outbuf);
/* Do we need a newsegment event ? */
if (G_UNLIKELY (demux->video_need_segment)) {
- if (demux->close_seg_event)
- gst_pad_push_event (demux->video_pad,
- gst_event_ref (demux->close_seg_event));
-
if (!demux->new_seg_event) {
GST_DEBUG_OBJECT (demux, "pushing newsegment from %"
GST_TIME_FORMAT " to %" GST_TIME_FORMAT,
- GST_TIME_ARGS (demux->segment.last_stop),
+ GST_TIME_ARGS (demux->segment.position),
GST_TIME_ARGS (demux->segment.stop));
- demux->new_seg_event =
- gst_event_new_new_segment (FALSE, demux->segment.rate,
- demux->segment.format, demux->segment.last_stop,
- demux->segment.stop, demux->segment.last_stop);
+ demux->segment.start = demux->segment.time = demux->segment.position;
+ demux->new_seg_event = gst_event_new_segment (&demux->segment);
} else {
GST_DEBUG_OBJECT (demux, "pushing pre-generated newsegment event");
}
demux->video_need_segment = FALSE;
}
- GST_LOG_OBJECT (demux, "pushing %d bytes buffer at pts %" GST_TIME_FORMAT
+ GST_LOG_OBJECT (demux,
+ "pushing %" G_GSIZE_FORMAT " bytes buffer at pts %" GST_TIME_FORMAT
" with duration %" GST_TIME_FORMAT ", offset %" G_GUINT64_FORMAT
- ", keyframe (%d)", GST_BUFFER_SIZE (outbuf),
+ ", keyframe (%d)", gst_buffer_get_size (outbuf),
GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)),
GST_TIME_ARGS (GST_BUFFER_DURATION (outbuf)), GST_BUFFER_OFFSET (outbuf),
keyframe);
ret = gst_pad_push (demux->video_pad, outbuf);
if (G_UNLIKELY (ret != GST_FLOW_OK)) {
- if (demux->segment.rate < 0.0 && ret == GST_FLOW_UNEXPECTED &&
- demux->segment.last_stop > demux->segment.stop) {
- /* In reverse playback we can get a GST_FLOW_UNEXPECTED when
+ if (demux->segment.rate < 0.0 && ret == GST_FLOW_EOS &&
+ demux->segment.position > demux->segment.stop) {
+ /* In reverse playback we can get a GST_FLOW_EOS when
* we are at the end of the segment, so we just need to jump
* back to the previous section. */
GST_DEBUG_OBJECT (demux, "downstream has reached end of segment");
demux->video_linked = TRUE;
beach:
+ gst_buffer_unmap (buffer, &map);
return ret;
}
guint32 tag_data_size;
guint8 type;
gboolean keyframe = TRUE;
- GstClockTime ret;
- guint8 *data = GST_BUFFER_DATA (buffer);
+ GstClockTime ret = GST_CLOCK_TIME_NONE;
+ GstMapInfo map;
+ guint8 *data;
+ gsize size;
+
+ g_return_val_if_fail (gst_buffer_get_size (buffer) >= 12,
+ GST_CLOCK_TIME_NONE);
- g_return_val_if_fail (GST_BUFFER_SIZE (buffer) >= 12, GST_CLOCK_TIME_NONE);
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ data = map.data;
+ size = map.size;
type = data[0];
if (type != 9 && type != 8 && type != 18) {
GST_WARNING_OBJECT (demux, "Unsupported tag type %u", data[0]);
- return GST_CLOCK_TIME_NONE;
+ goto exit;
}
if (type == 9)
tag_data_size = GST_READ_UINT24_BE (data + 1);
- if (GST_BUFFER_SIZE (buffer) >= tag_data_size + 11 + 4) {
+ if (size >= tag_data_size + 11 + 4) {
if (GST_READ_UINT32_BE (data + tag_data_size + 11) != tag_data_size + 11) {
GST_WARNING_OBJECT (demux, "Invalid tag size");
- return GST_CLOCK_TIME_NONE;
+ goto exit;
}
}
if (demux->duration == GST_CLOCK_TIME_NONE || demux->duration < ret)
demux->duration = ret;
+exit:
+ gst_buffer_unmap (buffer, &map);
return ret;
}
{
GstFlowReturn ret = GST_FLOW_OK;
guint8 tag_type = 0;
- guint8 *data = GST_BUFFER_DATA (buffer);
+ GstMapInfo map;
- g_return_val_if_fail (GST_BUFFER_SIZE (buffer) >= 4, GST_FLOW_ERROR);
+ g_return_val_if_fail (gst_buffer_get_size (buffer) >= 4, GST_FLOW_ERROR);
- tag_type = data[0];
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+
+ tag_type = map.data[0];
switch (tag_type) {
case 9:
/* Tag size is 1 byte of type + 3 bytes of size + 7 bytes + tag data size +
* 4 bytes of previous tag size */
- demux->tag_data_size = GST_READ_UINT24_BE (data + 1);
+ demux->tag_data_size = GST_READ_UINT24_BE (map.data + 1);
demux->tag_size = demux->tag_data_size + 11;
GST_LOG_OBJECT (demux, "tag data size is %" G_GUINT64_FORMAT,
demux->tag_data_size);
+ gst_buffer_unmap (buffer, &map);
+
return ret;
}
gst_flv_demux_parse_header (GstFlvDemux * demux, GstBuffer * buffer)
{
GstFlowReturn ret = GST_FLOW_OK;
- guint8 *data = GST_BUFFER_DATA (buffer);
+ GstMapInfo map;
- g_return_val_if_fail (GST_BUFFER_SIZE (buffer) >= 9, GST_FLOW_ERROR);
+ g_return_val_if_fail (gst_buffer_get_size (buffer) >= 9, GST_FLOW_ERROR);
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
/* Check for the FLV tag */
- if (data[0] == 'F' && data[1] == 'L' && data[2] == 'V') {
+ if (map.data[0] == 'F' && map.data[1] == 'L' && map.data[2] == 'V') {
GST_DEBUG_OBJECT (demux, "FLV header detected");
} else {
if (G_UNLIKELY (demux->strict)) {
GST_WARNING_OBJECT (demux, "invalid header tag detected");
- ret = GST_FLOW_UNEXPECTED;
+ ret = GST_FLOW_EOS;
goto beach;
}
}
- /* Jump over the 4 first bytes */
- data += 4;
-
/* Now look at audio/video flags */
{
- guint8 flags = data[0];
+ guint8 flags = map.data[4];
demux->has_video = demux->has_audio = FALSE;
demux->need_header = FALSE;
beach:
+ gst_buffer_unmap (buffer, &map);
return ret;
}
demux->new_seg_event = NULL;
}
- if (demux->close_seg_event) {
- gst_event_unref (demux->close_seg_event);
- demux->close_seg_event = NULL;
- }
-
gst_adapter_clear (demux->adapter);
if (demux->audio_codec_data) {
}
static GstFlowReturn
-gst_flv_demux_chain (GstPad * pad, GstBuffer * buffer)
+gst_flv_demux_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer)
{
GstFlowReturn ret = GST_FLOW_OK;
GstFlvDemux *demux = NULL;
- demux = GST_FLV_DEMUX (gst_pad_get_parent (pad));
+ demux = GST_FLV_DEMUX (parent);
- GST_LOG_OBJECT (demux, "received buffer of %d bytes at offset %"
- G_GUINT64_FORMAT, GST_BUFFER_SIZE (buffer), GST_BUFFER_OFFSET (buffer));
+ GST_LOG_OBJECT (demux,
+ "received buffer of %" G_GSIZE_FORMAT " bytes at offset %"
+ G_GUINT64_FORMAT, gst_buffer_get_size (buffer),
+ GST_BUFFER_OFFSET (buffer));
if (G_UNLIKELY (GST_BUFFER_OFFSET (buffer) == 0)) {
GST_DEBUG_OBJECT (demux, "beginning of file, expect header");
if (G_UNLIKELY (demux->flushing)) {
GST_DEBUG_OBJECT (demux, "we are now flushing, exiting parser loop");
- ret = GST_FLOW_WRONG_STATE;
+ ret = GST_FLOW_FLUSHING;
goto beach;
}
if (!demux->indexed) {
if (demux->offset == demux->file_size - sizeof (guint32)) {
- GstBuffer *buffer =
- gst_adapter_take_buffer (demux->adapter, sizeof (guint32));
- GstByteReader *reader = gst_byte_reader_new_from_buffer (buffer);
guint64 seek_offset;
+ guint8 *data;
- if (!gst_adapter_available (demux->adapter) >= sizeof (guint32)) {
- /* error */
- }
+ data = gst_adapter_take (demux->adapter, 4);
+ if (!data)
+ goto no_index;
- seek_offset =
- demux->file_size - sizeof (guint32) -
- gst_byte_reader_peek_uint32_be_unchecked (reader);
- gst_byte_reader_free (reader);
- gst_buffer_unref (buffer);
+ seek_offset = demux->file_size - sizeof (guint32) -
+ GST_READ_UINT32_BE (data);
+ g_free (data);
GST_INFO_OBJECT (demux,
"Seeking to beginning of last tag at %" G_GUINT64_FORMAT,
}
}
- gst_object_unref (demux);
-
return ret;
/* ERRORS */
return ret;
}
- if (G_UNLIKELY (*buffer && GST_BUFFER_SIZE (*buffer) != size)) {
+ if (G_UNLIKELY (*buffer && gst_buffer_get_size (*buffer) != size)) {
GST_WARNING_OBJECT (demux,
- "partial pull got %d when expecting %d from offset %" G_GUINT64_FORMAT,
- GST_BUFFER_SIZE (*buffer), size, offset);
+ "partial pull got %" G_GSIZE_FORMAT " when expecting %d from offset %"
+ G_GUINT64_FORMAT, gst_buffer_get_size (*buffer), size, offset);
gst_buffer_unref (*buffer);
- ret = GST_FLOW_UNEXPECTED;
+ ret = GST_FLOW_EOS;
*buffer = NULL;
return ret;
}
static GstFlowReturn
gst_flv_demux_seek_to_prev_keyframe (GstFlvDemux * demux)
{
- GstFlowReturn ret = GST_FLOW_UNEXPECTED;
+ GstFlowReturn ret = GST_FLOW_EOS;
GstIndexEntry *entry = NULL;
GST_DEBUG_OBJECT (demux,
GST_FORMAT_BYTES, demux->from_offset - 1);
if (entry) {
- gint64 bytes, time;
+ gint64 bytes = 0, time = 0;
gst_index_entry_assoc_map (entry, GST_FORMAT_BYTES, &bytes);
gst_index_entry_assoc_map (entry, GST_FORMAT_TIME, &time);
return ret;
}
-static gboolean
-gst_flv_demux_push_src_event (GstFlvDemux * demux, GstEvent * event)
-{
- gboolean ret = TRUE;
-
- if (demux->audio_pad)
- ret |= gst_pad_push_event (demux->audio_pad, gst_event_ref (event));
-
- if (demux->video_pad)
- ret |= gst_pad_push_event (demux->video_pad, gst_event_ref (event));
-
- gst_event_unref (event);
-
- return ret;
-}
-
static GstFlowReturn
gst_flv_demux_create_index (GstFlvDemux * demux, gint64 pos, GstClockTime ts)
{
gint64 size;
- GstFormat fmt = GST_FORMAT_BYTES;
size_t tag_size;
guint64 old_offset;
GstBuffer *buffer;
GstClockTime tag_time;
GstFlowReturn ret = GST_FLOW_OK;
- if (G_UNLIKELY (!gst_pad_query_peer_duration (demux->sinkpad, &fmt, &size) ||
- fmt != GST_FORMAT_BYTES))
+ if (!gst_pad_peer_query_duration (demux->sinkpad, GST_FORMAT_BYTES, &size))
return GST_FLOW_OK;
GST_DEBUG_OBJECT (demux, "building index at %" G_GINT64_FORMAT
demux->offset += tag_size;
}
- if (ret == GST_FLOW_UNEXPECTED) {
+ if (ret == GST_FLOW_EOS) {
/* file ran out, so mark we have complete index */
demux->indexed = TRUE;
ret = GST_FLOW_OK;
gst_flv_demux_get_metadata (GstFlvDemux * demux)
{
gint64 ret = 0, offset;
- GstFormat fmt = GST_FORMAT_BYTES;
size_t tag_size, size;
GstBuffer *buffer = NULL;
+ GstMapInfo map;
- if (G_UNLIKELY (!gst_pad_query_peer_duration (demux->sinkpad, &fmt, &offset)
- || fmt != GST_FORMAT_BYTES))
+ if (!gst_pad_peer_query_duration (demux->sinkpad, GST_FORMAT_BYTES, &offset))
goto exit;
ret = offset;
4, &buffer))
goto exit;
- tag_size = GST_READ_UINT32_BE (GST_BUFFER_DATA (buffer));
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ tag_size = GST_READ_UINT32_BE (map.data);
+ gst_buffer_unmap (buffer, &map);
GST_DEBUG_OBJECT (demux, "last tag size: %" G_GSIZE_FORMAT, tag_size);
gst_buffer_unref (buffer);
buffer = NULL;
goto exit;
/* a consistency check */
- size = GST_READ_UINT24_BE (GST_BUFFER_DATA (buffer) + 1);
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ size = GST_READ_UINT24_BE (map.data + 1);
if (size != tag_size - 11) {
+ gst_buffer_unmap (buffer, &map);
GST_DEBUG_OBJECT (demux,
"tag size %" G_GSIZE_FORMAT ", expected %" G_GSIZE_FORMAT
", corrupt or truncated file", size, tag_size - 11);
gst_flv_demux_parse_tag_timestamp (demux, FALSE, buffer, &size);
/* maybe get some more metadata */
- if (GST_BUFFER_DATA (buffer)[0] == 18) {
+ if (map.data[0] == 18) {
+ gst_buffer_unmap (buffer, &map);
gst_buffer_unref (buffer);
buffer = NULL;
GST_DEBUG_OBJECT (demux, "script tag, pulling it to parse");
if (GST_FLOW_OK == gst_flv_demux_pull_range (demux, demux->sinkpad, offset,
tag_size, &buffer))
gst_flv_demux_parse_tag_script (demux, buffer);
+ } else {
+ gst_buffer_unmap (buffer, &map);
}
exit:
demux->file_size = gst_flv_demux_get_metadata (demux);
break;
case FLV_STATE_DONE:
- ret = GST_FLOW_UNEXPECTED;
+ ret = GST_FLOW_EOS;
break;
case FLV_STATE_SEEK:
/* seek issued with insufficient index;
if (demux->segment.rate < 0.0) {
/* check end of section */
if ((gint64) demux->offset >= demux->to_offset ||
- demux->segment.last_stop >= demux->segment.stop + 2 * GST_SECOND ||
+ demux->segment.position >= demux->segment.stop + 2 * GST_SECOND ||
(demux->audio_done && demux->video_done))
ret = gst_flv_demux_seek_to_prev_keyframe (demux);
} else {
/* check EOS condition */
if ((demux->segment.stop != -1) &&
- (demux->segment.last_stop >= demux->segment.stop)) {
- ret = GST_FLOW_UNEXPECTED;
+ (demux->segment.position >= demux->segment.stop)) {
+ ret = GST_FLOW_EOS;
}
}
GST_LOG_OBJECT (demux, "pausing task, reason %s", reason);
gst_pad_pause_task (pad);
- if (ret == GST_FLOW_UNEXPECTED) {
+ if (ret == GST_FLOW_EOS) {
+ /* handle end-of-stream/segment */
+ /* so align our position with the end of it, if there is one
+ * this ensures a subsequent will arrive at correct base/acc time */
+ if (demux->segment.rate > 0.0 &&
+ GST_CLOCK_TIME_IS_VALID (demux->segment.stop))
+ demux->segment.position = demux->segment.stop;
+ else if (demux->segment.rate < 0.0)
+ demux->segment.position = demux->segment.start;
+
/* perform EOS logic */
if (!demux->no_more_pads) {
gst_element_no_more_pads (GST_ELEMENT_CAST (demux));
if (!gst_flv_demux_push_src_event (demux, gst_event_new_eos ()))
GST_WARNING_OBJECT (demux, "failed pushing EOS on streams");
}
- } else if (ret == GST_FLOW_NOT_LINKED || ret < GST_FLOW_UNEXPECTED) {
+ } else if (ret == GST_FLOW_NOT_LINKED || ret < GST_FLOW_EOS) {
GST_ELEMENT_ERROR (demux, STREAM, FAILED,
("Internal data stream error."),
("stream stopped, reason %s", reason));
g_return_val_if_fail (segment != NULL, 0);
- time = segment->last_stop;
+ time = segment->position;
if (demux->index) {
/* Let's check if we have an index entry for that seek time */
GST_DEBUG_OBJECT (demux, "found index entry for %" GST_TIME_FORMAT
" at %" GST_TIME_FORMAT ", seeking to %" G_GINT64_FORMAT,
- GST_TIME_ARGS (segment->last_stop), GST_TIME_ARGS (time), bytes);
+ GST_TIME_ARGS (segment->position), GST_TIME_ARGS (time), bytes);
/* Key frame seeking */
if (segment->flags & GST_SEEK_FLAG_KEY_UNIT) {
if (time < segment->start) {
segment->start = segment->time = time;
}
- segment->last_stop = time;
+ segment->position = time;
}
} else {
GST_DEBUG_OBJECT (demux, "no index entry found for %" GST_TIME_FORMAT,
&demux->segment);
/* Apply the seek to our segment */
- gst_segment_set_seek (&seeksegment, rate, format, flags,
+ gst_segment_do_seek (&seeksegment, rate, format, flags,
start_type, start, stop_type, stop, &update);
GST_DEBUG_OBJECT (demux, "segment configured %" GST_SEGMENT_FORMAT,
&seeksegment);
- if (flush || seeksegment.last_stop != demux->segment.last_stop) {
+ if (flush || seeksegment.position != demux->segment.position) {
/* Do the actual seeking */
guint64 offset = gst_flv_demux_find_offset (demux, &seeksegment);
if (!demux->indexed) {
guint64 seek_offset = 0;
gboolean building_index;
- GstFormat fmt;
GST_OBJECT_LOCK (demux);
/* handle the seek in the chain function */
building_index = demux->building_index;
if (!building_index) {
demux->building_index = TRUE;
- fmt = GST_FORMAT_BYTES;
if (!demux->file_size
- && !gst_pad_query_peer_duration (demux->sinkpad, &fmt,
+ && !gst_pad_peer_query_duration (demux->sinkpad, GST_FORMAT_BYTES,
&demux->file_size)) {
- GST_WARNING_OBJECT (demux,
- "Cannot obtain file size - %" G_GINT64_FORMAT ", format %u",
- demux->file_size, fmt);
+ GST_WARNING_OBJECT (demux, "Failed to query upstream file size");
GST_OBJECT_UNLOCK (demux);
return FALSE;
}
if (flush) {
/* Stop flushing upstream we need to pull */
- gst_pad_push_event (demux->sinkpad, gst_event_new_flush_stop ());
+ gst_pad_push_event (demux->sinkpad, gst_event_new_flush_stop (TRUE));
}
/* Work on a copy until we are sure the seek succeeded. */
&demux->segment);
/* Apply the seek to our segment */
- gst_segment_set_seek (&seeksegment, rate, format, flags,
+ gst_segment_do_seek (&seeksegment, rate, format, flags,
start_type, start, stop_type, stop, &update);
GST_DEBUG_OBJECT (demux, "segment configured %" GST_SEGMENT_FORMAT,
&seeksegment);
- if (flush || seeksegment.last_stop != demux->segment.last_stop) {
+ if (flush || seeksegment.position != demux->segment.position) {
/* Do the actual seeking */
/* index is reliable if it is complete or we do not go to far ahead */
if (seeking && !demux->indexed &&
- seeksegment.last_stop > demux->index_max_time + 10 * GST_SECOND) {
+ seeksegment.position > demux->index_max_time + 10 * GST_SECOND) {
GST_DEBUG_OBJECT (demux, "delaying seek to post-scan; "
" index only up to %" GST_TIME_FORMAT,
GST_TIME_ARGS (demux->index_max_time));
/* stop flushing for now */
if (flush)
- gst_flv_demux_push_src_event (demux, gst_event_new_flush_stop ());
+ gst_flv_demux_push_src_event (demux, gst_event_new_flush_stop (TRUE));
/* delegate scanning and index building to task thread to avoid
* occupying main (UI) loop */
if (demux->seek_event)
gst_event_unref (demux->seek_event);
demux->seek_event = gst_event_ref (event);
- demux->seek_time = seeksegment.last_stop;
+ demux->seek_time = seeksegment.position;
demux->state = FLV_STATE_SEEK;
/* do not know about succes yet, but we did care and handled it */
ret = TRUE;
ret = TRUE;
}
- if (G_UNLIKELY (demux->close_seg_event)) {
- gst_event_unref (demux->close_seg_event);
- demux->close_seg_event = NULL;
- }
-
if (flush) {
/* Stop flushing, the sinks are at time 0 now */
- gst_flv_demux_push_src_event (demux, gst_event_new_flush_stop ());
- } else {
- GST_DEBUG_OBJECT (demux, "closing running segment %" GST_SEGMENT_FORMAT,
- &demux->segment);
-
- /* Close the current segment for a linear playback */
- if (demux->segment.rate >= 0) {
- /* for forward playback, we played from start to last_stop */
- demux->close_seg_event = gst_event_new_new_segment (TRUE,
- demux->segment.rate, demux->segment.format,
- demux->segment.start, demux->segment.last_stop, demux->segment.time);
- } else {
- gint64 stop;
-
- if ((stop = demux->segment.stop) == -1)
- stop = demux->segment.duration;
-
- /* for reverse playback, we played from stop to last_stop. */
- demux->close_seg_event = gst_event_new_new_segment (TRUE,
- demux->segment.rate, demux->segment.format,
- demux->segment.last_stop, stop, demux->segment.last_stop);
- }
+ gst_flv_demux_push_src_event (demux, gst_event_new_flush_stop (TRUE));
}
if (ret) {
if (demux->segment.flags & GST_SEEK_FLAG_SEGMENT) {
gst_element_post_message (GST_ELEMENT (demux),
gst_message_new_segment_start (GST_OBJECT (demux),
- demux->segment.format, demux->segment.last_stop));
+ demux->segment.format, demux->segment.position));
}
/* Tell all the stream a new segment is needed */
GST_TIME_FORMAT " to %" GST_TIME_FORMAT,
GST_TIME_ARGS (demux->segment.start),
GST_TIME_ARGS (demux->segment.stop));
- demux->new_seg_event =
- gst_event_new_new_segment (FALSE, demux->segment.rate,
- demux->segment.format, demux->segment.start,
- demux->segment.stop, demux->segment.start);
+ demux->new_seg_event = gst_event_new_segment (&demux->segment);
}
}
/* If we can pull that's prefered */
static gboolean
-gst_flv_demux_sink_activate (GstPad * sinkpad)
+gst_flv_demux_sink_activate (GstPad * sinkpad, GstObject * parent)
{
- if (gst_pad_check_pull_range (sinkpad)) {
- return gst_pad_activate_pull (sinkpad, TRUE);
- } else {
- return gst_pad_activate_push (sinkpad, TRUE);
- }
-}
+ GstQuery *query;
+ gboolean pull_mode;
-/* This function gets called when we activate ourselves in push mode.
- * We cannot seek (ourselves) in the stream */
-static gboolean
-gst_flv_demux_sink_activate_push (GstPad * sinkpad, gboolean active)
-{
- GstFlvDemux *demux;
+ query = gst_query_new_scheduling ();
+
+ if (!gst_pad_peer_query (sinkpad, query)) {
+ gst_query_unref (query);
+ goto activate_push;
+ }
- demux = GST_FLV_DEMUX (gst_pad_get_parent (sinkpad));
+ pull_mode = gst_query_has_scheduling_mode (query, GST_PAD_MODE_PULL);
+ gst_query_unref (query);
- demux->random_access = FALSE;
+ if (!pull_mode)
+ goto activate_push;
- gst_object_unref (demux);
+ GST_DEBUG_OBJECT (sinkpad, "activating pull");
+ return gst_pad_activate_mode (sinkpad, GST_PAD_MODE_PULL, TRUE);
- return TRUE;
+activate_push:
+ {
+ GST_DEBUG_OBJECT (sinkpad, "activating push");
+ return gst_pad_activate_mode (sinkpad, GST_PAD_MODE_PUSH, TRUE);
+ }
}
-/* this function gets called when we activate ourselves in pull mode.
- * We can perform random access to the resource and we start a task
- * to start reading */
static gboolean
-gst_flv_demux_sink_activate_pull (GstPad * sinkpad, gboolean active)
+gst_flv_demux_sink_activate_mode (GstPad * sinkpad, GstObject * parent,
+ GstPadMode mode, gboolean active)
{
+ gboolean res;
GstFlvDemux *demux;
- demux = GST_FLV_DEMUX (gst_pad_get_parent (sinkpad));
+ demux = GST_FLV_DEMUX (parent);
- if (active) {
- demux->random_access = TRUE;
- gst_object_unref (demux);
- return gst_pad_start_task (sinkpad, (GstTaskFunction) gst_flv_demux_loop,
- sinkpad);
- } else {
- demux->random_access = FALSE;
- gst_object_unref (demux);
- return gst_pad_stop_task (sinkpad);
+ switch (mode) {
+ case GST_PAD_MODE_PUSH:
+ demux->random_access = FALSE;
+ res = TRUE;
+ break;
+ case GST_PAD_MODE_PULL:
+ if (active) {
+ demux->random_access = TRUE;
+ res = gst_pad_start_task (sinkpad, (GstTaskFunction) gst_flv_demux_loop,
+ sinkpad);
+ } else {
+ demux->random_access = FALSE;
+ res = gst_pad_stop_task (sinkpad);
+ }
+ break;
+ default:
+ res = FALSE;
+ break;
}
+ return res;
}
static gboolean
-gst_flv_demux_sink_event (GstPad * pad, GstEvent * event)
+gst_flv_demux_sink_event (GstPad * pad, GstObject * parent, GstEvent * event)
{
GstFlvDemux *demux;
gboolean ret = FALSE;
- demux = GST_FLV_DEMUX (gst_pad_get_parent (pad));
+ demux = GST_FLV_DEMUX (parent);
GST_DEBUG_OBJECT (demux, "handling event %s", GST_EVENT_TYPE_NAME (event));
GST_WARNING_OBJECT (demux, "failed pushing EOS on streams");
ret = TRUE;
break;
- case GST_EVENT_NEWSEGMENT:
+ case GST_EVENT_SEGMENT:
{
- GstFormat format;
- gdouble rate;
- gint64 start, stop, time;
- gboolean update;
+ GstSegment in_segment;
GST_DEBUG_OBJECT (demux, "received new segment");
- gst_event_parse_new_segment (event, &update, &rate, &format, &start,
- &stop, &time);
+ gst_event_copy_segment (event, &in_segment);
- if (format == GST_FORMAT_TIME) {
+ if (in_segment.format == GST_FORMAT_TIME) {
/* time segment, this is perfect, copy over the values. */
- gst_segment_set_newsegment (&demux->segment, update, rate, format,
- start, stop, time);
+ memcpy (&demux->segment, &in_segment, sizeof (in_segment));
GST_DEBUG_OBJECT (demux, "NEWSEGMENT: %" GST_SEGMENT_FORMAT,
&demux->segment);
break;
}
- gst_object_unref (demux);
-
return ret;
}
static gboolean
-gst_flv_demux_src_event (GstPad * pad, GstEvent * event)
+gst_flv_demux_src_event (GstPad * pad, GstObject * parent, GstEvent * event)
{
GstFlvDemux *demux;
gboolean ret = FALSE;
- demux = GST_FLV_DEMUX (gst_pad_get_parent (pad));
+ demux = GST_FLV_DEMUX (parent);
GST_DEBUG_OBJECT (demux, "handling event %s", GST_EVENT_TYPE_NAME (event));
break;
}
- gst_object_unref (demux);
-
return ret;
}
static gboolean
-gst_flv_demux_query (GstPad * pad, GstQuery * query)
+gst_flv_demux_query (GstPad * pad, GstObject * parent, GstQuery * query)
{
gboolean res = TRUE;
GstFlvDemux *demux;
- demux = GST_FLV_DEMUX (gst_pad_get_parent (pad));
+ demux = GST_FLV_DEMUX (parent);
switch (GST_QUERY_TYPE (query)) {
case GST_QUERY_DURATION:
}
GST_DEBUG_OBJECT (pad, "position query, replying %" GST_TIME_FORMAT,
- GST_TIME_ARGS (demux->segment.last_stop));
+ GST_TIME_ARGS (demux->segment.position));
- gst_query_set_position (query, GST_FORMAT_TIME, demux->segment.last_stop);
+ gst_query_set_position (query, GST_FORMAT_TIME, demux->segment.position);
break;
}
}
case GST_QUERY_LATENCY:
default:
- {
- GstPad *peer;
-
- if ((peer = gst_pad_get_peer (demux->sinkpad))) {
- /* query latency on peer pad */
- res = gst_pad_query (peer, query);
- gst_object_unref (peer);
- } else {
- /* no peer, we don't know */
- res = FALSE;
- }
+ res = gst_pad_query_default (pad, parent, query);
break;
- }
}
beach:
- gst_object_unref (demux);
return res;
}
if (G_UNLIKELY (!demux->index)) {
GST_DEBUG_OBJECT (demux, "no index provided creating our own");
- demux->index = gst_index_factory_make ("memindex");
+ demux->index = g_object_new (gst_mem_index_get_type (), NULL);
gst_index_get_writer_id (demux->index, GST_OBJECT (demux),
&demux->index_id);
return ret;
}
+#if 0
static void
gst_flv_demux_set_index (GstElement * element, GstIndex * index)
{
return result;
}
+#endif
static void
gst_flv_demux_dispose (GObject * object)
demux->new_seg_event = NULL;
}
- if (demux->close_seg_event) {
- gst_event_unref (demux->close_seg_event);
- demux->close_seg_event = NULL;
- }
-
if (demux->audio_codec_data) {
gst_buffer_unref (demux->audio_codec_data);
demux->audio_codec_data = NULL;
}
static void
-gst_flv_demux_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_static_pad_template (element_class,
- &flv_sink_template);
- gst_element_class_add_static_pad_template (element_class,
- &audio_src_template);
- gst_element_class_add_static_pad_template (element_class,
- &video_src_template);
- gst_element_class_set_details_simple (element_class, "FLV Demuxer",
- "Codec/Demuxer",
- "Demux FLV feeds into digital streams",
- "Julien Moutte <julien@moutte.net>");
-}
-
-static void
gst_flv_demux_class_init (GstFlvDemuxClass * klass)
{
GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
gstelement_class->change_state =
GST_DEBUG_FUNCPTR (gst_flv_demux_change_state);
+
+#if 0
gstelement_class->set_index = GST_DEBUG_FUNCPTR (gst_flv_demux_set_index);
gstelement_class->get_index = GST_DEBUG_FUNCPTR (gst_flv_demux_get_index);
+#endif
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&flv_sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&audio_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&video_src_template));
+ gst_element_class_set_details_simple (gstelement_class, "FLV Demuxer",
+ "Codec/Demuxer",
+ "Demux FLV feeds into digital streams",
+ "Julien Moutte <julien@moutte.net>");
}
static void
-gst_flv_demux_init (GstFlvDemux * demux, GstFlvDemuxClass * g_class)
+gst_flv_demux_init (GstFlvDemux * demux)
{
demux->sinkpad =
gst_pad_new_from_static_template (&flv_sink_template, "sink");
GST_DEBUG_FUNCPTR (gst_flv_demux_chain));
gst_pad_set_activate_function (demux->sinkpad,
GST_DEBUG_FUNCPTR (gst_flv_demux_sink_activate));
- gst_pad_set_activatepull_function (demux->sinkpad,
- GST_DEBUG_FUNCPTR (gst_flv_demux_sink_activate_pull));
- gst_pad_set_activatepush_function (demux->sinkpad,
- GST_DEBUG_FUNCPTR (gst_flv_demux_sink_activate_push));
+ gst_pad_set_activatemode_function (demux->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_flv_demux_sink_activate_mode));
gst_element_add_pad (GST_ELEMENT (demux), demux->sinkpad);
demux->adapter = gst_adapter_new ();
- demux->taglist = gst_tag_list_new ();
+ demux->taglist = gst_tag_list_new_empty ();
gst_segment_init (&demux->segment, GST_FORMAT_TIME);
demux->own_index = FALSE;
+ GST_OBJECT_FLAG_SET (demux, GST_ELEMENT_FLAG_INDEXABLE);
+
gst_flv_demux_cleanup (demux);
}
#include <gst/gst.h>
#include <gst/base/gstadapter.h>
+#include "gstindex.h"
G_BEGIN_DECLS
#define GST_TYPE_FLV_DEMUX \
GstSegment segment;
- GstEvent *close_seg_event;
GstEvent *new_seg_event;
GstTagList *taglist;
#include <math.h>
#include <string.h>
+#include <gst/audio/audio.h>
+
#include "gstflvmux.h"
#include "amfdefs.h"
"audio/mpeg, mpegversion = (int) 2, framed = (boolean) TRUE; "
"audio/mpeg, mpegversion = (int) 4, stream-format = (string) raw, framed = (boolean) TRUE; "
"audio/x-nellymoser, channels = (int) { 1, 2 }, rate = (int) { 5512, 8000, 11025, 16000, 22050, 44100 }; "
- "audio/x-raw-int, endianness = (int) LITTLE_ENDIAN, channels = (int) { 1, 2 }, width = (int) 8, depth = (int) 8, rate = (int) { 5512, 11025, 22050, 44100 }, signed = (boolean) FALSE; "
- "audio/x-raw-int, endianness = (int) LITTLE_ENDIAN, channels = (int) { 1, 2 }, width = (int) 16, depth = (int) 16, rate = (int) { 5512, 11025, 22050, 44100 }, signed = (boolean) TRUE; "
+ "audio/x-raw, format = (string) { U8, S16LE}, layout = (string) interleaved, channels = (int) { 1, 2 }, rate = (int) { 5512, 11025, 22050, 44100 }; "
"audio/x-alaw, channels = (int) { 1, 2 }, rate = (int) { 5512, 11025, 22050, 44100 }; "
"audio/x-mulaw, channels = (int) { 1, 2 }, rate = (int) { 5512, 11025, 22050, 44100 }; "
"audio/x-speex, channels = (int) { 1, 2 }, rate = (int) { 5512, 11025, 22050, 44100 };")
);
-#define _do_init(type) \
- G_STMT_START{ \
- static const GInterfaceInfo tag_setter_info = { \
- NULL, \
- NULL, \
- NULL \
- }; \
- g_type_add_interface_static (type, GST_TYPE_TAG_SETTER, \
- &tag_setter_info); \
- }G_STMT_END
-
-GST_BOILERPLATE_FULL (GstFlvMux, gst_flv_mux, GstElement, GST_TYPE_ELEMENT,
- _do_init);
+#define gst_flv_mux_parent_class parent_class
+G_DEFINE_TYPE_WITH_CODE (GstFlvMux, gst_flv_mux, GST_TYPE_ELEMENT,
+ G_IMPLEMENT_INTERFACE (GST_TYPE_TAG_SETTER, NULL));
static void gst_flv_mux_finalize (GObject * object);
static GstFlowReturn
gst_flv_mux_handle_sink_event (GstCollectPads2 * pads, GstCollectData2 * data,
GstEvent * event, gpointer user_data);
-static gboolean gst_flv_mux_handle_src_event (GstPad * pad, GstEvent * event);
+static gboolean gst_flv_mux_handle_src_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
static GstPad *gst_flv_mux_request_new_pad (GstElement * element,
- GstPadTemplate * templ, const gchar * name);
+ GstPadTemplate * templ, const gchar * req_name, const GstCaps * caps);
static void gst_flv_mux_release_pad (GstElement * element, GstPad * pad);
+static gboolean gst_flv_mux_video_pad_setcaps (GstPad * pad, GstCaps * caps);
+static gboolean gst_flv_mux_audio_pad_setcaps (GstPad * pad, GstCaps * caps);
+
static void gst_flv_mux_get_property (GObject * object,
guint prop_id, GValue * value, GParamSpec * pspec);
static void gst_flv_mux_set_property (GObject * object,
g_slice_free (GstFlvMuxIndexEntry, entry);
}
+static GstBuffer *
+_gst_buffer_new_wrapped (gpointer mem, gsize size, GFreeFunc free_func)
+{
+ GstBuffer *buf;
+
+ buf = gst_buffer_new ();
+ gst_buffer_take_memory (buf, -1,
+ gst_memory_new_wrapped (free_func ? 0 : GST_MEMORY_FLAG_READONLY,
+ mem, free_func, size, 0, size));
+
+ return buf;
+}
+
static void
-gst_flv_mux_base_init (gpointer g_class)
+_gst_buffer_new_and_alloc (gsize size, GstBuffer ** buffer, guint8 ** data)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_static_pad_template (element_class,
- &videosink_templ);
- gst_element_class_add_static_pad_template (element_class,
- &audiosink_templ);
- gst_element_class_add_static_pad_template (element_class, &src_templ);
- gst_element_class_set_details_simple (element_class, "FLV muxer",
- "Codec/Muxer",
- "Muxes video/audio streams into a FLV stream",
- "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
+ g_return_if_fail (data != NULL);
+ g_return_if_fail (buffer != NULL);
- GST_DEBUG_CATEGORY_INIT (flvmux_debug, "flvmux", 0, "FLV muxer");
+ *data = g_malloc (size);
+ *buffer = _gst_buffer_new_wrapped (*data, size, g_free);
}
static void
gstelement_class->request_new_pad =
GST_DEBUG_FUNCPTR (gst_flv_mux_request_new_pad);
gstelement_class->release_pad = GST_DEBUG_FUNCPTR (gst_flv_mux_release_pad);
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&videosink_templ));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&audiosink_templ));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&src_templ));
+ gst_element_class_set_details_simple (gstelement_class, "FLV muxer",
+ "Codec/Muxer",
+ "Muxes video/audio streams into a FLV stream",
+ "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
+
+ GST_DEBUG_CATEGORY_INIT (flvmux_debug, "flvmux", 0, "FLV muxer");
}
static void
-gst_flv_mux_init (GstFlvMux * mux, GstFlvMuxClass * g_class)
+gst_flv_mux_init (GstFlvMux * mux)
{
mux->srcpad = gst_pad_new_from_static_template (&src_templ, "src");
gst_pad_set_event_function (mux->srcpad, gst_flv_mux_handle_src_event);
}
static gboolean
-gst_flv_mux_handle_src_event (GstPad * pad, GstEvent * event)
+gst_flv_mux_handle_src_event (GstPad * pad, GstObject * parent,
+ GstEvent * event)
{
GstEventType type;
break;
}
- return gst_pad_event_default (pad, event);
+ return gst_pad_event_default (pad, parent, event);
}
static gboolean
GstEvent * event, gpointer user_data)
{
GstFlvMux *mux = GST_FLV_MUX (user_data);
+ gboolean ret = FALSE;
switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_CAPS:
+ {
+ GstCaps *caps;
+ GstFlvPad *flvpad;
+
+ gst_event_parse_caps (event, &caps);
+
+ /* find stream data */
+ flvpad = (GstFlvPad *) data;
+ g_assert (flvpad);
+
+ if (flvpad->video) {
+ ret = gst_flv_mux_video_pad_setcaps (data->pad, caps);
+ } else {
+ ret = gst_flv_mux_audio_pad_setcaps (data->pad, caps);
+ }
+ /* and eat */
+ gst_event_unref (event);
+ break;
+ }
case GST_EVENT_TAG:{
GstTagList *list;
GstTagSetter *setter = GST_TAG_SETTER (mux);
gst_event_parse_tag (event, &list);
gst_tag_setter_merge_tags (setter, list, mode);
mux->new_tags = TRUE;
+ ret = TRUE;
+ gst_event_unref (event);
break;
}
+ case GST_EVENT_EOS:
+ case GST_EVENT_SEGMENT:
+ gst_event_unref (event);
+ ret = TRUE;
+ break;
default:
+ ret = gst_pad_event_default (data->pad, GST_OBJECT (mux), event);
break;
}
- /* now GstCollectPads2 can take care of the rest, e.g. EOS */
- return FALSE;
+ return ret;
}
static gboolean
} else {
cpad->audio_codec = 6;
}
- } else if (strcmp (gst_structure_get_name (s), "audio/x-raw-int") == 0) {
- gint endianness;
+ } else if (strcmp (gst_structure_get_name (s), "audio/x-raw") == 0) {
+ GstAudioInfo info;
- if (gst_structure_get_int (s, "endianness", &endianness)
- && endianness == G_LITTLE_ENDIAN)
+ if (gst_audio_info_from_caps (&info, caps)) {
cpad->audio_codec = 3;
- else
+
+ if (GST_AUDIO_INFO_WIDTH (&info) == 8)
+ cpad->width = 0;
+ else if (GST_AUDIO_INFO_WIDTH (&info) == 16)
+ cpad->width = 1;
+ else
+ ret = FALSE;
+ } else
ret = FALSE;
} else if (strcmp (gst_structure_get_name (s), "audio/x-alaw") == 0) {
cpad->audio_codec = 7;
}
if (ret) {
- gint rate, channels, width;
+ gint rate, channels;
if (gst_structure_get_int (s, "rate", &rate)) {
if (cpad->audio_codec == 10)
ret = FALSE;
}
- if (gst_structure_get_int (s, "width", &width)) {
- if (cpad->audio_codec != 3)
- cpad->width = 1;
- else if (width == 8)
- cpad->width = 0;
- else if (width == 16)
- cpad->width = 1;
- else
- ret = FALSE;
- } else if (cpad->audio_codec != 3) {
+ if (cpad->audio_codec != 3)
cpad->width = 1;
- } else {
- ret = FALSE;
- }
}
if (ret && gst_structure_has_field (s, "codec_data")) {
static GstPad *
gst_flv_mux_request_new_pad (GstElement * element,
- GstPadTemplate * templ, const gchar * pad_name)
+ GstPadTemplate * templ, const gchar * req_name, const GstCaps * caps)
{
GstElementClass *klass = GST_ELEMENT_GET_CLASS (element);
GstFlvMux *mux = GST_FLV_MUX (element);
GstFlvPad *cpad;
GstPad *pad = NULL;
const gchar *name = NULL;
- GstPadSetCapsFunction setcapsfunc = NULL;
gboolean video;
if (mux->state != GST_FLV_MUX_STATE_HEADER) {
mux->have_audio = TRUE;
name = "audio";
video = FALSE;
- setcapsfunc = GST_DEBUG_FUNCPTR (gst_flv_mux_audio_pad_setcaps);
} else if (templ == gst_element_class_get_pad_template (klass, "video")) {
if (mux->have_video) {
GST_WARNING_OBJECT (mux, "Already have a video pad");
mux->have_video = TRUE;
name = "video";
video = TRUE;
- setcapsfunc = GST_DEBUG_FUNCPTR (gst_flv_mux_video_pad_setcaps);
} else {
GST_WARNING_OBJECT (mux, "Invalid template");
return NULL;
cpad->video_codec_data = NULL;
gst_flv_mux_reset_pad (mux, cpad, video);
- gst_pad_set_setcaps_function (pad, setcapsfunc);
gst_pad_set_active (pad, TRUE);
gst_element_add_pad (element, pad);
static GstFlowReturn
gst_flv_mux_push (GstFlvMux * mux, GstBuffer * buffer)
{
- buffer = gst_buffer_make_metadata_writable (buffer);
- gst_buffer_set_caps (buffer, GST_PAD_CAPS (mux->srcpad));
/* pushing the buffer that rewrites the header will make it no longer be the
* total output size in bytes, but it doesn't matter at that point */
- mux->byte_count += GST_BUFFER_SIZE (buffer);
+ mux->byte_count += gst_buffer_get_size (buffer);
return gst_pad_push (mux->srcpad, buffer);
}
GstBuffer *header;
guint8 *data;
- header = gst_buffer_new_and_alloc (9 + 4);
- data = GST_BUFFER_DATA (header);
+ _gst_buffer_new_and_alloc (9 + 4, &header, &data);
data[0] = 'F';
data[1] = 'L';
GST_DEBUG_OBJECT (mux, "preallocating %d bytes for the index",
preallocate_size);
- tmp = gst_buffer_new_and_alloc (preallocate_size);
- data = GST_BUFFER_DATA (tmp);
+ _gst_buffer_new_and_alloc (preallocate_size, &tmp, &data);
/* prefill the space with a gstfiller: <spaces> script tag variable */
GST_WRITE_UINT16_BE (data, 9); /* 9 characters */
static GstBuffer *
gst_flv_mux_create_number_script_value (const gchar * name, gdouble value)
{
- GstBuffer *tmp = gst_buffer_new_and_alloc (2 + strlen (name) + 1 + 8);
- guint8 *data = GST_BUFFER_DATA (tmp);
+ GstBuffer *tmp;
+ guint8 *data;
gsize len = strlen (name);
+ _gst_buffer_new_and_alloc (2 + len + 1 + 8, &tmp, &data);
+
GST_WRITE_UINT16_BE (data, len);
data += 2; /* name length */
memcpy (data, name, len);
{
const GstTagList *tags;
GstBuffer *script_tag, *tmp;
+ GstMapInfo map;
guint8 *data;
gint i, n_tags, tags_written = 0;
GST_DEBUG_OBJECT (mux, "tags = %" GST_PTR_FORMAT, tags);
- script_tag = gst_buffer_new_and_alloc (11);
- data = GST_BUFFER_DATA (script_tag);
+ /* FIXME perhaps some bytewriter'ing here ... */
+
+ _gst_buffer_new_and_alloc (11, &script_tag, &data);
data[0] = 18;
/* Stream ID */
data[8] = data[9] = data[10] = 0;
- tmp = gst_buffer_new_and_alloc (13);
- data = GST_BUFFER_DATA (tmp);
+ _gst_buffer_new_and_alloc (13, &tmp, &data);
data[0] = AMF0_STRING_MARKER; /* string */
data[1] = 0;
data[2] = 10; /* length 10 */
script_tag = gst_buffer_join (script_tag, tmp);
n_tags = (tags) ? gst_structure_n_fields ((GstStructure *) tags) : 0;
- tmp = gst_buffer_new_and_alloc (5);
- data = GST_BUFFER_DATA (tmp);
+ _gst_buffer_new_and_alloc (5, &tmp, &data);
data[0] = 8; /* ECMA array */
GST_WRITE_UINT32_BE (data + 1, n_tags);
script_tag = gst_buffer_join (script_tag, tmp);
if (!gst_tag_list_get_string (tags, tag_name, &s))
continue;
- tmp = gst_buffer_new_and_alloc (2 + strlen (t) + 1 + 2 + strlen (s));
- data = GST_BUFFER_DATA (tmp);
+ _gst_buffer_new_and_alloc (2 + strlen (t) + 1 + 2 + strlen (s),
+ &tmp, &data);
data[0] = 0; /* tag name length */
data[1] = strlen (t);
memcpy (&data[2], t, strlen (t));
if (mux->duration == GST_CLOCK_TIME_NONE) {
GSList *l;
-
- GstFormat fmt = GST_FORMAT_TIME;
guint64 dur;
for (l = mux->collect->data; l; l = l->next) {
GstCollectData2 *cdata = l->data;
- fmt = GST_FORMAT_TIME;
-
- if (gst_pad_query_peer_duration (cdata->pad, &fmt, (gint64 *) & dur) &&
- fmt == GST_FORMAT_TIME && dur != GST_CLOCK_TIME_NONE) {
+ if (gst_pad_peer_query_duration (cdata->pad, GST_FORMAT_TIME,
+ (gint64 *) & dur) && dur != GST_CLOCK_TIME_NONE) {
if (mux->duration == GST_CLOCK_TIME_NONE)
mux->duration = dur;
else
if (!mux->streamable && mux->duration != GST_CLOCK_TIME_NONE) {
gdouble d;
+ GstMapInfo map;
+
d = gst_guint64_to_gdouble (mux->duration);
d /= (gdouble) GST_SECOND;
GST_DEBUG_OBJECT (mux, "determined the duration to be %f", d);
- data = GST_BUFFER_DATA (script_tag);
- GST_WRITE_DOUBLE_BE (data + 29 + 2 + 8 + 1, d);
+ gst_buffer_map (script_tag, &map, GST_MAP_WRITE);
+ GST_WRITE_DOUBLE_BE (map.data + 29 + 2 + 8 + 1, d);
+ gst_buffer_unmap (script_tag, &map);
}
if (mux->have_video) {
}
}
- if (video_pad && GST_PAD_CAPS (video_pad)) {
- GstStructure *s = gst_caps_get_structure (GST_PAD_CAPS (video_pad), 0);
+ if (video_pad && gst_pad_has_current_caps (video_pad)) {
+ GstCaps *caps;
+ GstStructure *s;
gint size;
gint num, den;
script_tag = gst_buffer_join (script_tag, tmp);
tags_written++;
+ caps = gst_pad_get_current_caps (video_pad);
+ s = gst_caps_get_structure (caps, 0);
+ gst_caps_unref (caps);
+
if (gst_structure_get_int (s, "width", &size)) {
GST_DEBUG_OBJECT (mux, "putting width %d in the metadata", size);
{
const gchar *s = "GStreamer FLV muxer";
- tmp = gst_buffer_new_and_alloc (2 + 15 + 1 + 2 + strlen (s));
- data = GST_BUFFER_DATA (tmp);
+ _gst_buffer_new_and_alloc (2 + 15 + 1 + 2 + strlen (s), &tmp, &data);
data[0] = 0; /* 15 bytes name */
data[1] = 15;
memcpy (&data[2], "metadatacreator", 15);
months[tm->tm_mon], tm->tm_mday, tm->tm_hour, tm->tm_min, tm->tm_sec,
tm->tm_year + 1900);
- tmp = gst_buffer_new_and_alloc (2 + 12 + 1 + 2 + strlen (s));
- data = GST_BUFFER_DATA (tmp);
+ _gst_buffer_new_and_alloc (2 + 12 + 1 + 2 + strlen (s), &tmp, &data);
data[0] = 0; /* 12 bytes name */
data[1] = 12;
memcpy (&data[2], "creationdate", 12);
goto exit;
}
- tmp = gst_buffer_new_and_alloc (2 + 0 + 1);
- data = GST_BUFFER_DATA (tmp);
+ _gst_buffer_new_and_alloc (2 + 0 + 1, &tmp, &data);
data[0] = 0; /* 0 byte size */
data[1] = 0;
data[2] = 9; /* end marker */
script_tag = gst_buffer_join (script_tag, tmp);
tags_written++;
- tmp = gst_buffer_new_and_alloc (4);
- data = GST_BUFFER_DATA (tmp);
- GST_WRITE_UINT32_BE (data, GST_BUFFER_SIZE (script_tag));
+ _gst_buffer_new_and_alloc (4, &tmp, &data);
+ GST_WRITE_UINT32_BE (data, gst_buffer_get_size (script_tag));
script_tag = gst_buffer_join (script_tag, tmp);
- data = GST_BUFFER_DATA (script_tag);
- data[1] = ((GST_BUFFER_SIZE (script_tag) - 11 - 4) >> 16) & 0xff;
- data[2] = ((GST_BUFFER_SIZE (script_tag) - 11 - 4) >> 8) & 0xff;
- data[3] = ((GST_BUFFER_SIZE (script_tag) - 11 - 4) >> 0) & 0xff;
+ gst_buffer_map (script_tag, &map, GST_MAP_WRITE);
+ map.data[1] = ((gst_buffer_get_size (script_tag) - 11 - 4) >> 16) & 0xff;
+ map.data[2] = ((gst_buffer_get_size (script_tag) - 11 - 4) >> 8) & 0xff;
+ map.data[3] = ((gst_buffer_get_size (script_tag) - 11 - 4) >> 0) & 0xff;
- GST_WRITE_UINT32_BE (data + 11 + 13 + 1, tags_written);
+ GST_WRITE_UINT32_BE (map.data + 11 + 13 + 1, tags_written);
+ gst_buffer_unmap (script_tag, &map);
exit:
return script_tag;
GstFlvPad * cpad, gboolean is_codec_data)
{
GstBuffer *tag;
- guint8 *data;
+ GstMapInfo map;
guint size;
guint32 timestamp =
(GST_BUFFER_TIMESTAMP_IS_VALID (buffer)) ? GST_BUFFER_TIMESTAMP (buffer) /
GST_MSECOND : cpad->last_timestamp / GST_MSECOND;
+ guint8 *data, *bdata;
+ gsize bsize;
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ bdata = map.data;
+ bsize = map.size;
size = 11;
if (cpad->video) {
size += 1;
if (cpad->video_codec == 7)
- size += 4 + GST_BUFFER_SIZE (buffer);
+ size += 4 + bsize;
else
- size += GST_BUFFER_SIZE (buffer);
+ size += bsize;
} else {
size += 1;
if (cpad->audio_codec == 10)
- size += 1 + GST_BUFFER_SIZE (buffer);
+ size += 1 + bsize;
else
- size += GST_BUFFER_SIZE (buffer);
+ size += bsize;
}
size += 4;
- tag = gst_buffer_new_and_alloc (size);
+ _gst_buffer_new_and_alloc (size, &tag, &data);
GST_BUFFER_TIMESTAMP (tag) = timestamp * GST_MSECOND;
- data = GST_BUFFER_DATA (tag);
memset (data, 0, size);
data[0] = (cpad->video) ? 9 : 8;
/* FIXME: what to do about composition time */
data[13] = data[14] = data[15] = 0;
- memcpy (data + 11 + 1 + 4, GST_BUFFER_DATA (buffer),
- GST_BUFFER_SIZE (buffer));
+ memcpy (data + 11 + 1 + 4, bdata, bsize);
} else {
- memcpy (data + 11 + 1, GST_BUFFER_DATA (buffer),
- GST_BUFFER_SIZE (buffer));
+ memcpy (data + 11 + 1, bdata, bsize);
}
} else {
data[11] |= (cpad->audio_codec << 4) & 0xf0;
if (cpad->audio_codec == 10) {
data[12] = is_codec_data ? 0 : 1;
- memcpy (data + 11 + 1 + 1, GST_BUFFER_DATA (buffer),
- GST_BUFFER_SIZE (buffer));
+ memcpy (data + 11 + 1 + 1, bdata, bsize);
} else {
- memcpy (data + 11 + 1, GST_BUFFER_DATA (buffer),
- GST_BUFFER_SIZE (buffer));
+ memcpy (data + 11 + 1, bdata, bsize);
}
}
+ gst_buffer_unmap (buffer, &map);
+
GST_WRITE_UINT32_BE (data + size - 4, size - 4);
- gst_buffer_copy_metadata (tag, buffer, GST_BUFFER_COPY_TIMESTAMPS);
+ GST_BUFFER_TIMESTAMP (tag) = GST_BUFFER_TIMESTAMP (buffer);
+ GST_BUFFER_DURATION (tag) = GST_BUFFER_DURATION (buffer);
+ GST_BUFFER_OFFSET (tag) = GST_BUFFER_OFFSET (buffer);
+ GST_BUFFER_OFFSET_END (tag) = GST_BUFFER_OFFSET_END (buffer);
+
/* mark the buffer if it's an audio buffer and there's also video being muxed
* or it's a video interframe */
if ((mux->have_video && !cpad->video) ||
}
/* mark buffers that will go in the streamheader */
- GST_BUFFER_FLAG_SET (header, GST_BUFFER_FLAG_IN_CAPS);
- GST_BUFFER_FLAG_SET (metadata, GST_BUFFER_FLAG_IN_CAPS);
+ GST_BUFFER_FLAG_SET (header, GST_BUFFER_FLAG_HEADER);
+ GST_BUFFER_FLAG_SET (metadata, GST_BUFFER_FLAG_HEADER);
if (video_codec_data != NULL) {
- GST_BUFFER_FLAG_SET (video_codec_data, GST_BUFFER_FLAG_IN_CAPS);
+ GST_BUFFER_FLAG_SET (video_codec_data, GST_BUFFER_FLAG_HEADER);
/* mark as a delta unit, so downstream will not try to synchronize on that
* buffer - to actually start playback you need a real video keyframe */
GST_BUFFER_FLAG_SET (video_codec_data, GST_BUFFER_FLAG_DELTA_UNIT);
}
if (audio_codec_data != NULL) {
- GST_BUFFER_FLAG_SET (audio_codec_data, GST_BUFFER_FLAG_IN_CAPS);
+ GST_BUFFER_FLAG_SET (audio_codec_data, GST_BUFFER_FLAG_HEADER);
}
/* put buffers in streamheader */
gst_flv_mux_put_buffer_in_streamheader (&streamheader, audio_codec_data);
/* create the caps and put the streamheader in them */
- caps = gst_caps_new_simple ("video/x-flv", NULL);
- caps = gst_caps_make_writable (caps);
+ caps = gst_caps_new_empty_simple ("video/x-flv");
structure = gst_caps_get_structure (caps, 0);
gst_structure_set_value (structure, "streamheader", &streamheader);
g_value_unset (&streamheader);
- if (GST_PAD_CAPS (mux->srcpad) == NULL)
+ if (!gst_pad_has_current_caps (mux->srcpad))
gst_pad_set_caps (mux->srcpad, caps);
gst_caps_unref (caps);
GList *l;
guint32 index_len, allocate_size;
guint32 i, index_skip;
+ GstSegment segment;
GstClockTime dur;
if (mux->streamable)
return GST_FLOW_OK;
/* seek back to the preallocated index space */
- event = gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_BYTES,
- 13 + 29, GST_CLOCK_TIME_NONE, 13 + 29);
+ gst_segment_init (&segment, GST_FORMAT_BYTES);
+ segment.start = segment.time = 13 + 29;
+ event = gst_event_new_segment (&segment);
if (!gst_pad_push_event (mux->srcpad, event)) {
GST_WARNING_OBJECT (mux, "Seek to rewrite header failed");
return GST_FLOW_OK;
/* see size calculation in gst_flv_mux_preallocate_index */
allocate_size = 11 + 8 + 22 + 10 + index_len * 18;
GST_DEBUG_OBJECT (mux, "Allocating %d bytes for index", allocate_size);
- index = gst_buffer_new_and_alloc (allocate_size);
- data = GST_BUFFER_DATA (index);
+ _gst_buffer_new_and_alloc (allocate_size, &index, &data);
GST_WRITE_UINT16_BE (data, 9); /* the 'keyframes' key */
memcpy (data + 2, "keyframes", 9);
guint8 *data;
guint32 remaining_filler_size;
- tmp = gst_buffer_new_and_alloc (14);
- data = GST_BUFFER_DATA (tmp);
+ _gst_buffer_new_and_alloc (14, &tmp, &data);
GST_WRITE_UINT16_BE (data, 9);
memcpy (data + 2, "gstfiller", 9);
GST_WRITE_UINT8 (data + 11, 2); /* string */
rewrite = gst_buffer_join (rewrite, index);
- gst_buffer_set_caps (rewrite, GST_PAD_CAPS (mux->srcpad));
return gst_flv_mux_push (mux, rewrite);
}
GstFlowReturn ret;
if (mux->state == GST_FLV_MUX_STATE_HEADER) {
+ GstSegment segment;
+
if (mux->collect->data == NULL) {
GST_ELEMENT_ERROR (mux, STREAM, MUX, (NULL),
("No input streams configured"));
return GST_FLOW_ERROR;
}
- if (gst_pad_push_event (mux->srcpad,
- gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_BYTES, 0, -1, 0)))
+ gst_segment_init (&segment, GST_FORMAT_BYTES);
+ if (gst_pad_push_event (mux->srcpad, gst_event_new_segment (&segment)))
ret = gst_flv_mux_write_header (mux);
else
ret = GST_FLOW_ERROR;
} else {
gst_flv_mux_rewrite_header (mux);
gst_pad_push_event (mux->srcpad, gst_event_new_eos ());
- return GST_FLOW_UNEXPECTED;
+ return GST_FLOW_EOS;
}
}
--- /dev/null
+/* GStreamer
+ * Copyright (C) 2001 RidgeRun (http://www.ridgerun.com/)
+ * Written by Erik Walthinsen <omega@ridgerun.com>
+ *
+ * gstindex.c: Index for mappings and other data
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+/**
+ * SECTION:gstindex
+ * @short_description: Generate indexes on objects
+ * @see_also: #GstIndexFactory
+ *
+ * GstIndex is used to generate a stream index of one or more elements
+ * in a pipeline.
+ *
+ * Elements will overload the set_index and get_index virtual methods in
+ * #GstElement. When streaming data, the element will add index entries if it
+ * has an index set.
+ *
+ * Each element that adds to the index will do that using a writer_id. The
+ * writer_id is obtained from gst_index_get_writer_id().
+ *
+ * The application that wants to index the stream will create a new index object
+ * using gst_index_new() or gst_index_factory_make(). The index is assigned to a
+ * specific element, a bin or the whole pipeline. This will cause indexable
+ * elements to add entires to the index while playing.
+ */
+
+/* FIXME: complete gobject annotations */
+/* FIXME-0.11: cleanup API
+ * - no one seems to use GstIndexGroup, GstIndexCertainty
+ *
+ * - the API for application to use the index is mostly missing
+ * - apps need to get a list of writers
+ * - apps need to be able to iterate over each writers index entry collection
+ * - gst_index_get_assoc_entry() should pass ownership
+ * - the GstIndexEntry structure is large and contains repetitive information
+ * - we want to allow Indexers to implement a saner storage and create
+ * GstIndexEntries on demand (the app has to free them), might even make
+ * sense to ask the app to provide a ptr and fill it.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <gst/gst.h>
+
+/* Index signals and args */
+enum
+{
+ ENTRY_ADDED,
+ LAST_SIGNAL
+};
+
+enum
+{
+ ARG_0,
+ ARG_RESOLVER
+ /* FILL ME */
+};
+
+#if 0
+GST_DEBUG_CATEGORY_STATIC (index_debug);
+#define GST_CAT_DEFAULT index_debug
+#endif
+
+static void gst_index_finalize (GObject * object);
+
+static void gst_index_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_index_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+static GstIndexGroup *gst_index_group_new (guint groupnum);
+static void gst_index_group_free (GstIndexGroup * group);
+
+static gboolean gst_index_path_resolver (GstIndex * index, GstObject * writer,
+ gchar ** writer_string, gpointer data);
+static gboolean gst_index_gtype_resolver (GstIndex * index, GstObject * writer,
+ gchar ** writer_string, gpointer data);
+static void gst_index_add_entry (GstIndex * index, GstIndexEntry * entry);
+
+static guint gst_index_signals[LAST_SIGNAL] = { 0 };
+
+typedef struct
+{
+ GstIndexResolverMethod method;
+ GstIndexResolver resolver;
+ gpointer user_data;
+}
+ResolverEntry;
+
+static const ResolverEntry resolvers[] = {
+ {GST_INDEX_RESOLVER_CUSTOM, NULL, NULL},
+ {GST_INDEX_RESOLVER_GTYPE, gst_index_gtype_resolver, NULL},
+ {GST_INDEX_RESOLVER_PATH, gst_index_path_resolver, NULL},
+};
+
+#define GST_TYPE_INDEX_RESOLVER (gst_index_resolver_get_type())
+static GType
+gst_index_resolver_get_type (void)
+{
+ static GType index_resolver_type = 0;
+ static const GEnumValue index_resolver[] = {
+ {GST_INDEX_RESOLVER_CUSTOM, "GST_INDEX_RESOLVER_CUSTOM", "custom"},
+ {GST_INDEX_RESOLVER_GTYPE, "GST_INDEX_RESOLVER_GTYPE", "gtype"},
+ {GST_INDEX_RESOLVER_PATH, "GST_INDEX_RESOLVER_PATH", "path"},
+ {0, NULL, NULL},
+ };
+
+ if (!index_resolver_type) {
+ index_resolver_type =
+ g_enum_register_static ("GstFlvDemuxIndexResolver", index_resolver);
+ }
+ return index_resolver_type;
+}
+
+GType
+gst_index_entry_get_type (void)
+{
+ static GType index_entry_type = 0;
+
+ if (!index_entry_type) {
+ index_entry_type = g_boxed_type_register_static ("GstFlvDemuxIndexEntry",
+ (GBoxedCopyFunc) gst_index_entry_copy,
+ (GBoxedFreeFunc) gst_index_entry_free);
+ }
+ return index_entry_type;
+}
+
+#if 0
+#define _do_init \
+{ \
+ GST_DEBUG_CATEGORY_INIT (index_debug, "GST_INDEX", GST_DEBUG_BOLD, \
+ "Generic indexing support"); \
+}
+#endif
+
+typedef GstIndex GstFlvDemuxIndex;
+typedef GstIndexClass GstFlvDemuxIndexClass;
+//typedef GstIndexEntry GstFlvDemuxIndexEntry;
+G_DEFINE_TYPE (GstFlvDemuxIndex, gst_index, GST_TYPE_OBJECT);
+
+static void
+gst_index_class_init (GstIndexClass * klass)
+{
+ GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
+
+ /**
+ * GstIndex::entry-added
+ * @gstindex: the object which received the signal.
+ * @arg1: The entry added to the index.
+ *
+ * Is emitted when a new entry is added to the index.
+ */
+ gst_index_signals[ENTRY_ADDED] =
+ g_signal_new ("entry-added", G_TYPE_FROM_CLASS (klass), G_SIGNAL_RUN_LAST,
+ G_STRUCT_OFFSET (GstIndexClass, entry_added), NULL, NULL,
+ gst_marshal_VOID__BOXED, G_TYPE_NONE, 1, GST_TYPE_INDEX_ENTRY);
+
+ gobject_class->set_property = gst_index_set_property;
+ gobject_class->get_property = gst_index_get_property;
+ gobject_class->finalize = gst_index_finalize;
+
+ g_object_class_install_property (gobject_class, ARG_RESOLVER,
+ g_param_spec_enum ("resolver", "Resolver",
+ "Select a predefined object to string mapper",
+ GST_TYPE_INDEX_RESOLVER, GST_INDEX_RESOLVER_PATH,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+}
+
+static void
+gst_index_init (GstIndex * index)
+{
+ index->curgroup = gst_index_group_new (0);
+ index->maxgroup = 0;
+ index->groups = g_list_prepend (NULL, index->curgroup);
+
+ index->writers = g_hash_table_new (NULL, NULL);
+ index->last_id = 0;
+
+ index->method = GST_INDEX_RESOLVER_PATH;
+ index->resolver = resolvers[index->method].resolver;
+ index->resolver_user_data = resolvers[index->method].user_data;
+
+ GST_OBJECT_FLAG_SET (index, GST_INDEX_WRITABLE);
+ GST_OBJECT_FLAG_SET (index, GST_INDEX_READABLE);
+
+ GST_DEBUG ("created new index");
+}
+
+static void
+gst_index_free_writer (gpointer key, gpointer value, gpointer user_data)
+{
+ GstIndexEntry *entry = (GstIndexEntry *) value;
+
+ if (entry) {
+ gst_index_entry_free (entry);
+ }
+}
+
+static void
+gst_index_finalize (GObject * object)
+{
+ GstIndex *index = GST_INDEX (object);
+
+ if (index->groups) {
+ g_list_foreach (index->groups, (GFunc) gst_index_group_free, NULL);
+ g_list_free (index->groups);
+ index->groups = NULL;
+ }
+
+ if (index->writers) {
+ g_hash_table_foreach (index->writers, gst_index_free_writer, NULL);
+ g_hash_table_destroy (index->writers);
+ index->writers = NULL;
+ }
+
+ if (index->filter_user_data && index->filter_user_data_destroy)
+ index->filter_user_data_destroy (index->filter_user_data);
+
+ if (index->resolver_user_data && index->resolver_user_data_destroy)
+ index->resolver_user_data_destroy (index->resolver_user_data);
+
+ G_OBJECT_CLASS (gst_index_parent_class)->finalize (object);
+}
+
+static void
+gst_index_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstIndex *index;
+
+ index = GST_INDEX (object);
+
+ switch (prop_id) {
+ case ARG_RESOLVER:
+ index->method = (GstIndexResolverMethod) g_value_get_enum (value);
+ index->resolver = resolvers[index->method].resolver;
+ index->resolver_user_data = resolvers[index->method].user_data;
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_index_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstIndex *index;
+
+ index = GST_INDEX (object);
+
+ switch (prop_id) {
+ case ARG_RESOLVER:
+ g_value_set_enum (value, index->method);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static GstIndexGroup *
+gst_index_group_new (guint groupnum)
+{
+ GstIndexGroup *indexgroup = g_slice_new (GstIndexGroup);
+
+ indexgroup->groupnum = groupnum;
+ indexgroup->entries = NULL;
+ indexgroup->certainty = GST_INDEX_UNKNOWN;
+ indexgroup->peergroup = -1;
+
+ GST_DEBUG ("created new index group %d", groupnum);
+
+ return indexgroup;
+}
+
+static void
+gst_index_group_free (GstIndexGroup * group)
+{
+ g_slice_free (GstIndexGroup, group);
+}
+
+/* do not resurrect this, add a derived dummy index class instead */
+#if 0
+/**
+ * gst_index_new:
+ *
+ * Create a new dummy index object. Use gst_element_set_index() to assign that
+ * to an element or pipeline. This index is not storing anything, but will
+ * still emit e.g. the #GstIndex::entry-added signal.
+ *
+ * Returns: (transfer full): a new index object
+ */
+GstIndex *
+gst_index_new (void)
+{
+ GstIndex *index;
+
+ index = g_object_newv (gst_index_get_type (), 0, NULL);
+
+ return index;
+}
+#endif
+
+/**
+ * gst_index_commit:
+ * @index: the index to commit
+ * @id: the writer that commited the index
+ *
+ * Tell the index that the writer with the given id is done
+ * with this index and is not going to write any more entries
+ * to it.
+ */
+void
+gst_index_commit (GstIndex * index, gint id)
+{
+ GstIndexClass *iclass;
+
+ iclass = GST_INDEX_GET_CLASS (index);
+
+ if (iclass->commit)
+ iclass->commit (index, id);
+}
+
+#if 0
+/**
+ * gst_index_get_group:
+ * @index: the index to get the current group from
+ *
+ * Get the id of the current group.
+ *
+ * Returns: the id of the current group.
+ */
+gint
+gst_index_get_group (GstIndex * index)
+{
+ return index->curgroup->groupnum;
+}
+
+/**
+ * gst_index_new_group:
+ * @index: the index to create the new group in
+ *
+ * Create a new group for the given index. It will be
+ * set as the current group.
+ *
+ * Returns: the id of the newly created group.
+ */
+gint
+gst_index_new_group (GstIndex * index)
+{
+ index->curgroup = gst_index_group_new (++index->maxgroup);
+ index->groups = g_list_append (index->groups, index->curgroup);
+ GST_DEBUG ("created new group %d in index", index->maxgroup);
+ return index->maxgroup;
+}
+
+/**
+ * gst_index_set_group:
+ * @index: the index to set the new group in
+ * @groupnum: the groupnumber to set
+ *
+ * Set the current groupnumber to the given argument.
+ *
+ * Returns: TRUE if the operation succeeded, FALSE if the group
+ * did not exist.
+ */
+gboolean
+gst_index_set_group (GstIndex * index, gint groupnum)
+{
+ GList *list;
+ GstIndexGroup *indexgroup;
+
+ /* first check for null change */
+ if (groupnum == index->curgroup->groupnum)
+ return TRUE;
+
+ /* else search for the proper group */
+ list = index->groups;
+ while (list) {
+ indexgroup = (GstIndexGroup *) (list->data);
+ list = g_list_next (list);
+ if (indexgroup->groupnum == groupnum) {
+ index->curgroup = indexgroup;
+ GST_DEBUG ("switched to index group %d", indexgroup->groupnum);
+ return TRUE;
+ }
+ }
+
+ /* couldn't find the group in question */
+ GST_DEBUG ("couldn't find index group %d", groupnum);
+ return FALSE;
+}
+#endif
+
+#if 0
+/**
+ * gst_index_set_certainty:
+ * @index: the index to set the certainty on
+ * @certainty: the certainty to set
+ *
+ * Set the certainty of the given index.
+ */
+void
+gst_index_set_certainty (GstIndex * index, GstIndexCertainty certainty)
+{
+ index->curgroup->certainty = certainty;
+}
+
+/**
+ * gst_index_get_certainty:
+ * @index: the index to get the certainty of
+ *
+ * Get the certainty of the given index.
+ *
+ * Returns: the certainty of the index.
+ */
+GstIndexCertainty
+gst_index_get_certainty (GstIndex * index)
+{
+ return index->curgroup->certainty;
+}
+#endif
+
+#if 0
+/**
+ * gst_index_set_filter:
+ * @index: the index to register the filter on
+ * @filter: the filter to register
+ * @user_data: data passed to the filter function
+ *
+ * Lets the app register a custom filter function so that
+ * it can select what entries should be stored in the index.
+ */
+void
+gst_index_set_filter (GstIndex * index,
+ GstIndexFilter filter, gpointer user_data)
+{
+ g_return_if_fail (GST_IS_INDEX (index));
+
+ gst_index_set_filter_full (index, filter, user_data, NULL);
+}
+
+/**
+ * gst_index_set_filter_full:
+ * @index: the index to register the filter on
+ * @filter: the filter to register
+ * @user_data: data passed to the filter function
+ * @user_data_destroy: function to call when @user_data is unset
+ *
+ * Lets the app register a custom filter function so that
+ * it can select what entries should be stored in the index.
+ */
+void
+gst_index_set_filter_full (GstIndex * index,
+ GstIndexFilter filter, gpointer user_data, GDestroyNotify user_data_destroy)
+{
+ g_return_if_fail (GST_IS_INDEX (index));
+
+ if (index->filter_user_data && index->filter_user_data_destroy)
+ index->filter_user_data_destroy (index->filter_user_data);
+
+ index->filter = filter;
+ index->filter_user_data = user_data;
+ index->filter_user_data_destroy = user_data_destroy;
+}
+
+/**
+ * gst_index_set_resolver:
+ * @index: the index to register the resolver on
+ * @resolver: the resolver to register
+ * @user_data: data passed to the resolver function
+ *
+ * Lets the app register a custom function to map index
+ * ids to writer descriptions.
+ */
+void
+gst_index_set_resolver (GstIndex * index,
+ GstIndexResolver resolver, gpointer user_data)
+{
+ gst_index_set_resolver_full (index, resolver, user_data, NULL);
+}
+
+/**
+ * gst_index_set_resolver_full:
+ * @index: the index to register the resolver on
+ * @resolver: the resolver to register
+ * @user_data: data passed to the resolver function
+ * @user_data_destroy: destroy function for @user_data
+ *
+ * Lets the app register a custom function to map index
+ * ids to writer descriptions.
+ *
+ * Since: 0.10.18
+ */
+void
+gst_index_set_resolver_full (GstIndex * index, GstIndexResolver resolver,
+ gpointer user_data, GDestroyNotify user_data_destroy)
+{
+ g_return_if_fail (GST_IS_INDEX (index));
+
+ if (index->resolver_user_data && index->resolver_user_data_destroy)
+ index->resolver_user_data_destroy (index->resolver_user_data);
+
+ index->resolver = resolver;
+ index->resolver_user_data = user_data;
+ index->resolver_user_data_destroy = user_data_destroy;
+ index->method = GST_INDEX_RESOLVER_CUSTOM;
+}
+#endif
+
+/**
+ * gst_index_entry_copy:
+ * @entry: the entry to copy
+ *
+ * Copies an entry and returns the result.
+ *
+ * Free-function: gst_index_entry_free
+ *
+ * Returns: (transfer full): a newly allocated #GstIndexEntry.
+ */
+GstIndexEntry *
+gst_index_entry_copy (GstIndexEntry * entry)
+{
+ GstIndexEntry *new_entry = g_slice_new (GstIndexEntry);
+
+ memcpy (new_entry, entry, sizeof (GstIndexEntry));
+ return new_entry;
+}
+
+/**
+ * gst_index_entry_free:
+ * @entry: (transfer full): the entry to free
+ *
+ * Free the memory used by the given entry.
+ */
+void
+gst_index_entry_free (GstIndexEntry * entry)
+{
+ switch (entry->type) {
+ case GST_INDEX_ENTRY_ID:
+ if (entry->data.id.description) {
+ g_free (entry->data.id.description);
+ entry->data.id.description = NULL;
+ }
+ break;
+ case GST_INDEX_ENTRY_ASSOCIATION:
+ if (entry->data.assoc.assocs) {
+ g_free (entry->data.assoc.assocs);
+ entry->data.assoc.assocs = NULL;
+ }
+ break;
+ case GST_INDEX_ENTRY_OBJECT:
+ break;
+ case GST_INDEX_ENTRY_FORMAT:
+ break;
+ }
+
+ g_slice_free (GstIndexEntry, entry);
+}
+
+#if 0
+/**
+ * gst_index_add_format:
+ * @index: the index to add the entry to
+ * @id: the id of the index writer
+ * @format: the format to add to the index
+ *
+ * Adds a format entry into the index. This function is
+ * used to map dynamic GstFormat ids to their original
+ * format key.
+ *
+ * Free-function: gst_index_entry_free
+ *
+ * Returns: (transfer full): a pointer to the newly added entry in the index.
+ */
+GstIndexEntry *
+gst_index_add_format (GstIndex * index, gint id, GstFormat format)
+{
+ GstIndexEntry *entry;
+ const GstFormatDefinition *def;
+
+ g_return_val_if_fail (GST_IS_INDEX (index), NULL);
+ g_return_val_if_fail (format != 0, NULL);
+
+ if (!GST_INDEX_IS_WRITABLE (index) || id == -1)
+ return NULL;
+
+ entry = g_slice_new (GstIndexEntry);
+ entry->type = GST_INDEX_ENTRY_FORMAT;
+ entry->id = id;
+ entry->data.format.format = format;
+
+ def = gst_format_get_details (format);
+ entry->data.format.key = def->nick;
+
+ gst_index_add_entry (index, entry);
+
+ return entry;
+}
+#endif
+
+/**
+ * gst_index_add_id:
+ * @index: the index to add the entry to
+ * @id: the id of the index writer
+ * @description: the description of the index writer
+ *
+ * Add an id entry into the index.
+ *
+ * Returns: a pointer to the newly added entry in the index.
+ */
+GstIndexEntry *
+gst_index_add_id (GstIndex * index, gint id, gchar * description)
+{
+ GstIndexEntry *entry;
+
+ g_return_val_if_fail (GST_IS_INDEX (index), NULL);
+ g_return_val_if_fail (description != NULL, NULL);
+
+ if (!GST_INDEX_IS_WRITABLE (index) || id == -1)
+ return NULL;
+
+ entry = g_slice_new (GstIndexEntry);
+ entry->type = GST_INDEX_ENTRY_ID;
+ entry->id = id;
+ entry->data.id.description = description;
+
+ gst_index_add_entry (index, entry);
+
+ return entry;
+}
+
+static gboolean
+gst_index_path_resolver (GstIndex * index, GstObject * writer,
+ gchar ** writer_string, gpointer data)
+{
+ *writer_string = gst_object_get_path_string (writer);
+
+ return TRUE;
+}
+
+static gboolean
+gst_index_gtype_resolver (GstIndex * index, GstObject * writer,
+ gchar ** writer_string, gpointer data)
+{
+ g_return_val_if_fail (writer != NULL, FALSE);
+
+ if (GST_IS_PAD (writer)) {
+ GstObject *element = gst_object_get_parent (GST_OBJECT (writer));
+ gchar *name;
+
+ name = gst_object_get_name (writer);
+ if (element) {
+ *writer_string = g_strdup_printf ("%s.%s",
+ G_OBJECT_TYPE_NAME (element), name);
+ gst_object_unref (element);
+ } else {
+ *writer_string = name;
+ name = NULL;
+ }
+
+ g_free (name);
+
+ } else {
+ *writer_string = g_strdup (G_OBJECT_TYPE_NAME (writer));
+ }
+
+ return TRUE;
+}
+
+/**
+ * gst_index_get_writer_id:
+ * @index: the index to get a unique write id for
+ * @writer: the GstObject to allocate an id for
+ * @id: a pointer to a gint to hold the id
+ *
+ * Before entries can be added to the index, a writer
+ * should obtain a unique id. The methods to add new entries
+ * to the index require this id as an argument.
+ *
+ * The application can implement a custom function to map the writer object
+ * to a string. That string will be used to register or look up an id
+ * in the index.
+ *
+ * <note>
+ * The caller must not hold @writer's #GST_OBJECT_LOCK, as the default
+ * resolver may call functions that take the object lock as well, and
+ * the lock is not recursive.
+ * </note>
+ *
+ * Returns: TRUE if the writer would be mapped to an id.
+ */
+gboolean
+gst_index_get_writer_id (GstIndex * index, GstObject * writer, gint * id)
+{
+ gchar *writer_string = NULL;
+ GstIndexEntry *entry;
+ GstIndexClass *iclass;
+ gboolean success = FALSE;
+
+ g_return_val_if_fail (GST_IS_INDEX (index), FALSE);
+ g_return_val_if_fail (GST_IS_OBJECT (writer), FALSE);
+ g_return_val_if_fail (id, FALSE);
+
+ *id = -1;
+
+ /* first try to get a previously cached id */
+ entry = g_hash_table_lookup (index->writers, writer);
+ if (entry == NULL) {
+
+ iclass = GST_INDEX_GET_CLASS (index);
+
+ /* let the app make a string */
+ if (index->resolver) {
+ gboolean res;
+
+ res =
+ index->resolver (index, writer, &writer_string,
+ index->resolver_user_data);
+ if (!res)
+ return FALSE;
+ } else {
+ g_warning ("no resolver found");
+ return FALSE;
+ }
+
+ /* if the index has a resolver, make it map this string to an id */
+ if (iclass->get_writer_id) {
+ success = iclass->get_writer_id (index, id, writer_string);
+ }
+ /* if the index could not resolve, we allocate one ourselves */
+ if (!success) {
+ *id = ++index->last_id;
+ }
+
+ entry = gst_index_add_id (index, *id, writer_string);
+ if (!entry) {
+ /* index is probably not writable, make an entry anyway
+ * to keep it in our cache */
+ entry = g_slice_new (GstIndexEntry);
+ entry->type = GST_INDEX_ENTRY_ID;
+ entry->id = *id;
+ entry->data.id.description = writer_string;
+ }
+ g_hash_table_insert (index->writers, writer, entry);
+ } else {
+ *id = entry->id;
+ }
+
+ return TRUE;
+}
+
+static void
+gst_index_add_entry (GstIndex * index, GstIndexEntry * entry)
+{
+ GstIndexClass *iclass;
+
+ iclass = GST_INDEX_GET_CLASS (index);
+
+ if (iclass->add_entry) {
+ iclass->add_entry (index, entry);
+ }
+
+ g_signal_emit (index, gst_index_signals[ENTRY_ADDED], 0, entry);
+}
+
+/**
+ * gst_index_add_associationv:
+ * @index: the index to add the entry to
+ * @id: the id of the index writer
+ * @flags: optinal flags for this entry
+ * @n: number of associations
+ * @list: list of associations
+ *
+ * Associate given format/value pairs with each other.
+ *
+ * Returns: a pointer to the newly added entry in the index.
+ */
+GstIndexEntry *
+gst_index_add_associationv (GstIndex * index, gint id,
+ GstIndexAssociationFlags flags, gint n, const GstIndexAssociation * list)
+{
+ GstIndexEntry *entry;
+
+ g_return_val_if_fail (n > 0, NULL);
+ g_return_val_if_fail (list != NULL, NULL);
+ g_return_val_if_fail (GST_IS_INDEX (index), NULL);
+
+ if (!GST_INDEX_IS_WRITABLE (index) || id == -1)
+ return NULL;
+
+ entry = g_slice_new (GstIndexEntry);
+
+ entry->type = GST_INDEX_ENTRY_ASSOCIATION;
+ entry->id = id;
+ entry->data.assoc.flags = flags;
+ entry->data.assoc.assocs = g_memdup (list, sizeof (GstIndexAssociation) * n);
+ entry->data.assoc.nassocs = n;
+
+ gst_index_add_entry (index, entry);
+
+ return entry;
+}
+
+#if 0
+/**
+ * gst_index_add_association:
+ * @index: the index to add the entry to
+ * @id: the id of the index writer
+ * @flags: optinal flags for this entry
+ * @format: the format of the value
+ * @value: the value
+ * @...: other format/value pairs or 0 to end the list
+ *
+ * Associate given format/value pairs with each other.
+ * Be sure to pass gint64 values to this functions varargs,
+ * you might want to use a gint64 cast to be sure.
+ *
+ * Returns: a pointer to the newly added entry in the index.
+ */
+GstIndexEntry *
+gst_index_add_association (GstIndex * index, gint id,
+ GstIndexAssociationFlags flags, GstFormat format, gint64 value, ...)
+{
+ va_list args;
+ GstIndexEntry *entry;
+ GstIndexAssociation *list;
+ gint n_assocs = 0;
+ GstFormat cur_format;
+ GArray *array;
+
+ g_return_val_if_fail (GST_IS_INDEX (index), NULL);
+ g_return_val_if_fail (format != 0, NULL);
+
+ if (!GST_INDEX_IS_WRITABLE (index) || id == -1)
+ return NULL;
+
+ array = g_array_new (FALSE, FALSE, sizeof (GstIndexAssociation));
+
+ {
+ GstIndexAssociation a;
+
+ a.format = format;
+ a.value = value;
+ n_assocs = 1;
+ g_array_append_val (array, a);
+ }
+
+ va_start (args, value);
+
+ while ((cur_format = va_arg (args, GstFormat))) {
+ GstIndexAssociation a;
+
+ a.format = cur_format;
+ a.value = va_arg (args, gint64);
+ n_assocs++;
+ g_array_append_val (array, a);
+ }
+
+ va_end (args);
+
+ list = (GstIndexAssociation *) g_array_free (array, FALSE);
+
+ entry = gst_index_add_associationv (index, id, flags, n_assocs, list);
+ g_free (list);
+
+ return entry;
+}
+
+/**
+ * gst_index_add_object:
+ * @index: the index to add the object to
+ * @id: the id of the index writer
+ * @key: a key for the object
+ * @type: the GType of the object
+ * @object: a pointer to the object to add
+ *
+ * Add the given object to the index with the given key.
+ *
+ * This function is not yet implemented.
+ *
+ * Returns: a pointer to the newly added entry in the index.
+ */
+GstIndexEntry *
+gst_index_add_object (GstIndex * index, gint id, gchar * key,
+ GType type, gpointer object)
+{
+ if (!GST_INDEX_IS_WRITABLE (index) || id == -1)
+ return NULL;
+
+ return NULL;
+}
+#endif
+
+static gint
+gst_index_compare_func (gconstpointer a, gconstpointer b, gpointer user_data)
+{
+ if (a < b)
+ return -1;
+ if (a > b)
+ return 1;
+ return 0;
+}
+
+/**
+ * gst_index_get_assoc_entry:
+ * @index: the index to search
+ * @id: the id of the index writer
+ * @method: The lookup method to use
+ * @flags: Flags for the entry
+ * @format: the format of the value
+ * @value: the value to find
+ *
+ * Finds the given format/value in the index
+ *
+ * Returns: the entry associated with the value or NULL if the
+ * value was not found.
+ */
+GstIndexEntry *
+gst_index_get_assoc_entry (GstIndex * index, gint id,
+ GstIndexLookupMethod method, GstIndexAssociationFlags flags,
+ GstFormat format, gint64 value)
+{
+ g_return_val_if_fail (GST_IS_INDEX (index), NULL);
+
+ if (id == -1)
+ return NULL;
+
+ return gst_index_get_assoc_entry_full (index, id, method, flags, format,
+ value, gst_index_compare_func, NULL);
+}
+
+/**
+ * gst_index_get_assoc_entry_full:
+ * @index: the index to search
+ * @id: the id of the index writer
+ * @method: The lookup method to use
+ * @flags: Flags for the entry
+ * @format: the format of the value
+ * @value: the value to find
+ * @func: the function used to compare entries
+ * @user_data: user data passed to the compare function
+ *
+ * Finds the given format/value in the index with the given
+ * compare function and user_data.
+ *
+ * Returns: the entry associated with the value or NULL if the
+ * value was not found.
+ */
+GstIndexEntry *
+gst_index_get_assoc_entry_full (GstIndex * index, gint id,
+ GstIndexLookupMethod method, GstIndexAssociationFlags flags,
+ GstFormat format, gint64 value, GCompareDataFunc func, gpointer user_data)
+{
+ GstIndexClass *iclass;
+
+ g_return_val_if_fail (GST_IS_INDEX (index), NULL);
+
+ if (id == -1)
+ return NULL;
+
+ iclass = GST_INDEX_GET_CLASS (index);
+
+ if (iclass->get_assoc_entry)
+ return iclass->get_assoc_entry (index, id, method, flags, format, value,
+ func, user_data);
+
+ return NULL;
+}
+
+/**
+ * gst_index_entry_assoc_map:
+ * @entry: the index to search
+ * @format: the format of the value the find
+ * @value: a pointer to store the value
+ *
+ * Gets alternative formats associated with the indexentry.
+ *
+ * Returns: TRUE if there was a value associated with the given
+ * format.
+ */
+gboolean
+gst_index_entry_assoc_map (GstIndexEntry * entry,
+ GstFormat format, gint64 * value)
+{
+ gint i;
+
+ g_return_val_if_fail (entry != NULL, FALSE);
+ g_return_val_if_fail (value != NULL, FALSE);
+
+ for (i = 0; i < GST_INDEX_NASSOCS (entry); i++) {
+ if (GST_INDEX_ASSOC_FORMAT (entry, i) == format) {
+ *value = GST_INDEX_ASSOC_VALUE (entry, i);
+ return TRUE;
+ }
+ }
+ return FALSE;
+}
--- /dev/null
+/* GStreamer
+ * Copyright (C) 1999,2000 Erik Walthinsen <omega@cse.ogi.edu>
+ * 2000 Wim Taymans <wim.taymans@chello.be>
+ *
+ * gstindex.h: Header for GstIndex, base class to handle efficient
+ * storage or caching of seeking information.
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifndef __GST_INDEX_H__
+#define __GST_INDEX_H__
+
+#include <gst/gstobject.h>
+#include <gst/gstformat.h>
+#include <gst/gstpluginfeature.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_INDEX (gst_index_get_type ())
+#define GST_INDEX(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_INDEX, GstIndex))
+#define GST_IS_INDEX(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_INDEX))
+#define GST_INDEX_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_INDEX, GstIndexClass))
+#define GST_IS_INDEX_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_INDEX))
+#define GST_INDEX_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_INDEX, GstIndexClass))
+
+#define GST_TYPE_INDEX_ENTRY (gst_index_entry_get_type())
+
+typedef struct _GstIndexEntry GstIndexEntry;
+typedef struct _GstIndexGroup GstIndexGroup;
+typedef struct _GstIndex GstIndex;
+typedef struct _GstIndexClass GstIndexClass;
+
+/**
+ * GstIndexCertainty:
+ * @GST_INDEX_UNKNOWN: accuracy is not known
+ * @GST_INDEX_CERTAIN: accuracy is perfect
+ * @GST_INDEX_FUZZY: accuracy is fuzzy
+ *
+ * The certainty of a group in the index.
+ */
+typedef enum {
+ GST_INDEX_UNKNOWN,
+ GST_INDEX_CERTAIN,
+ GST_INDEX_FUZZY
+} GstIndexCertainty;
+
+/**
+ * GstIndexEntryType:
+ * @GST_INDEX_ENTRY_ID: This entry is an id that maps an index id to its owner object
+ * @GST_INDEX_ENTRY_ASSOCIATION: This entry is an association between formats
+ * @GST_INDEX_ENTRY_OBJECT: An object
+ * @GST_INDEX_ENTRY_FORMAT: A format definition
+ *
+ * The different types of entries in the index.
+ */
+typedef enum {
+ GST_INDEX_ENTRY_ID,
+ GST_INDEX_ENTRY_ASSOCIATION,
+ GST_INDEX_ENTRY_OBJECT,
+ GST_INDEX_ENTRY_FORMAT
+} GstIndexEntryType;
+
+/**
+ * GstIndexLookupMethod:
+ * @GST_INDEX_LOOKUP_EXACT: There has to be an exact indexentry with the given format/value
+ * @GST_INDEX_LOOKUP_BEFORE: The exact entry or the one before it
+ * @GST_INDEX_LOOKUP_AFTER: The exact entry or the one after it
+ *
+ * Specify the method to find an index entry in the index.
+ */
+typedef enum {
+ GST_INDEX_LOOKUP_EXACT,
+ GST_INDEX_LOOKUP_BEFORE,
+ GST_INDEX_LOOKUP_AFTER
+} GstIndexLookupMethod;
+
+/**
+ * GST_INDEX_NASSOCS:
+ * @entry: The entry to query
+ *
+ * Get the number of associations in the entry.
+ */
+#define GST_INDEX_NASSOCS(entry) ((entry)->data.assoc.nassocs)
+
+/**
+ * GST_INDEX_ASSOC_FLAGS:
+ * @entry: The entry to query
+ *
+ * Get the flags for this entry.
+ */
+#define GST_INDEX_ASSOC_FLAGS(entry) ((entry)->data.assoc.flags)
+
+/**
+ * GST_INDEX_ASSOC_FORMAT:
+ * @entry: The entry to query
+ * @i: The format index
+ *
+ * Get the i-th format of the entry.
+ */
+#define GST_INDEX_ASSOC_FORMAT(entry,i) ((entry)->data.assoc.assocs[(i)].format)
+
+/**
+ * GST_INDEX_ASSOC_VALUE:
+ * @entry: The entry to query
+ * @i: The value index
+ *
+ * Get the i-th value of the entry.
+ */
+#define GST_INDEX_ASSOC_VALUE(entry,i) ((entry)->data.assoc.assocs[(i)].value)
+
+typedef struct _GstIndexAssociation GstIndexAssociation;
+
+/**
+ * GstIndexAssociation:
+ * @format: the format of the association
+ * @value: the value of the association
+ *
+ * An association in an entry.
+ */
+struct _GstIndexAssociation {
+ GstFormat format;
+ gint64 value;
+};
+
+/**
+ * GstIndexAssociationFlags:
+ * @GST_INDEX_ASSOCIATION_FLAG_NONE: no extra flags
+ * @GST_INDEX_ASSOCIATION_FLAG_KEY_UNIT: the entry marks a key unit, a key unit is one
+ * that marks a place where one can randomly seek to.
+ * @GST_INDEX_ASSOCIATION_FLAG_DELTA_UNIT: the entry marks a delta unit, a delta unit
+ * is one that marks a place where one can relatively seek to.
+ * @GST_INDEX_ASSOCIATION_FLAG_LAST: extra user defined flags should start here.
+ *
+ * Flags for an association entry.
+ */
+typedef enum {
+ GST_INDEX_ASSOCIATION_FLAG_NONE = 0,
+ GST_INDEX_ASSOCIATION_FLAG_KEY_UNIT = (1 << 0),
+ GST_INDEX_ASSOCIATION_FLAG_DELTA_UNIT = (1 << 1),
+
+ /* new flags should start here */
+ GST_INDEX_ASSOCIATION_FLAG_LAST = (1 << 8)
+} GstIndexAssociationFlags;
+
+/**
+ * GST_INDEX_FORMAT_FORMAT:
+ * @entry: The entry to query
+ *
+ * Get the format of the format entry
+ */
+#define GST_INDEX_FORMAT_FORMAT(entry) ((entry)->data.format.format)
+
+/**
+ * GST_INDEX_FORMAT_KEY:
+ * @entry: The entry to query
+ *
+ * Get the key of the format entry
+ */
+#define GST_INDEX_FORMAT_KEY(entry) ((entry)->data.format.key)
+
+/**
+ * GST_INDEX_ID_INVALID:
+ *
+ * Constant for an invalid index id
+ */
+#define GST_INDEX_ID_INVALID (-1)
+
+/**
+ * GST_INDEX_ID_DESCRIPTION:
+ * @entry: The entry to query
+ *
+ * Get the description of the id entry
+ */
+#define GST_INDEX_ID_DESCRIPTION(entry) ((entry)->data.id.description)
+
+/**
+ * GstIndexEntry:
+ *
+ * The basic element of an index.
+ */
+struct _GstIndexEntry {
+ /*< private >*/
+ GstIndexEntryType type;
+ gint id;
+
+ union {
+ struct {
+ gchar *description;
+ } id;
+ struct {
+ gint nassocs;
+ GstIndexAssociation
+ *assocs;
+ GstIndexAssociationFlags flags;
+ } assoc;
+ struct {
+ gchar *key;
+ GType type;
+ gpointer object;
+ } object;
+ struct {
+ GstFormat format;
+ const gchar *key;
+ } format;
+ } data;
+};
+
+/**
+ * GstIndexGroup:
+ *
+ * A group of related entries in an index.
+ */
+
+struct _GstIndexGroup {
+ /*< private >*/
+ /* unique ID of group in index */
+ gint groupnum;
+
+ /* list of entries */
+ GList *entries;
+
+ /* the certainty level of the group */
+ GstIndexCertainty certainty;
+
+ /* peer group that contains more certain entries */
+ gint peergroup;
+};
+
+/**
+ * GstIndexFilter:
+ * @index: The index being queried
+ * @entry: The entry to be added.
+ * @user_data: User data passed to the function.
+ *
+ * Function to filter out entries in the index.
+ *
+ * Returns: This function should return %TRUE if the entry is to be added
+ * to the index, %FALSE otherwise.
+ *
+ */
+typedef gboolean (*GstIndexFilter) (GstIndex *index,
+ GstIndexEntry *entry,
+ gpointer user_data);
+/**
+ * GstIndexResolverMethod:
+ * @GST_INDEX_RESOLVER_CUSTOM: Use a custom resolver
+ * @GST_INDEX_RESOLVER_GTYPE: Resolve based on the GType of the object
+ * @GST_INDEX_RESOLVER_PATH: Resolve on the path in graph
+ *
+ * The method used to resolve index writers
+ */
+typedef enum {
+ GST_INDEX_RESOLVER_CUSTOM,
+ GST_INDEX_RESOLVER_GTYPE,
+ GST_INDEX_RESOLVER_PATH
+} GstIndexResolverMethod;
+
+/**
+ * GstIndexResolver:
+ * @index: the index being queried.
+ * @writer: The object that wants to write
+ * @writer_string: A description of the writer.
+ * @user_data: user_data as registered
+ *
+ * Function to resolve ids to writer descriptions.
+ *
+ * Returns: %TRUE if an id could be assigned to the writer.
+ */
+typedef gboolean (*GstIndexResolver) (GstIndex *index,
+ GstObject *writer,
+ gchar **writer_string,
+ gpointer user_data);
+
+/**
+ * GstIndexFlags:
+ * @GST_INDEX_WRITABLE: The index is writable
+ * @GST_INDEX_READABLE: The index is readable
+ * @GST_INDEX_FLAG_LAST: First flag that can be used by subclasses
+ *
+ * Flags for this index
+ */
+typedef enum {
+ GST_INDEX_WRITABLE = (GST_OBJECT_FLAG_LAST << 0),
+ GST_INDEX_READABLE = (GST_OBJECT_FLAG_LAST << 1),
+
+ GST_INDEX_FLAG_LAST = (GST_OBJECT_FLAG_LAST << 8)
+} GstIndexFlags;
+
+/**
+ * GST_INDEX_IS_READABLE:
+ * @obj: The index to check
+ *
+ * Check if the index can be read from
+ */
+#define GST_INDEX_IS_READABLE(obj) (GST_OBJECT_FLAG_IS_SET (obj, GST_INDEX_READABLE))
+
+/**
+ * GST_INDEX_IS_WRITABLE:
+ * @obj: The index to check
+ *
+ * Check if the index can be written to
+ */
+#define GST_INDEX_IS_WRITABLE(obj) (GST_OBJECT_FLAG_IS_SET (obj, GST_INDEX_WRITABLE))
+
+/**
+ * GstIndex:
+ *
+ * Opaque #GstIndex structure.
+ */
+struct _GstIndex {
+ GstObject object;
+
+ /*< private >*/
+ GList *groups;
+ GstIndexGroup *curgroup;
+ gint maxgroup;
+
+ GstIndexResolverMethod method;
+ GstIndexResolver resolver;
+ gpointer resolver_user_data;
+ GDestroyNotify resolver_user_data_destroy;
+
+ GstIndexFilter filter;
+ gpointer filter_user_data;
+ GDestroyNotify filter_user_data_destroy;
+
+ GHashTable *writers;
+ gint last_id;
+
+ /*< private >*/
+ gpointer _gst_reserved[GST_PADDING];
+};
+
+struct _GstIndexClass {
+ GstObjectClass parent_class;
+
+ /*< protected >*/
+ gboolean (*get_writer_id) (GstIndex *index, gint *id, gchar *writer);
+
+ void (*commit) (GstIndex *index, gint id);
+
+ /* abstract methods */
+ void (*add_entry) (GstIndex *index, GstIndexEntry *entry);
+
+ GstIndexEntry* (*get_assoc_entry) (GstIndex *index, gint id,
+ GstIndexLookupMethod method, GstIndexAssociationFlags flags,
+ GstFormat format, gint64 value,
+ GCompareDataFunc func,
+ gpointer user_data);
+ /* signals */
+ void (*entry_added) (GstIndex *index, GstIndexEntry *entry);
+
+ /*< private >*/
+ gpointer _gst_reserved[GST_PADDING];
+};
+
+static
+GType gst_index_get_type (void);
+
+#if 0
+GstIndex* gst_index_new (void);
+#endif
+void gst_index_commit (GstIndex *index, gint id);
+
+#if 0
+gint gst_index_get_group (GstIndex *index);
+gint gst_index_new_group (GstIndex *index);
+gboolean gst_index_set_group (GstIndex *index, gint groupnum);
+
+void gst_index_set_certainty (GstIndex *index,
+ GstIndexCertainty certainty);
+GstIndexCertainty gst_index_get_certainty (GstIndex *index);
+
+static
+void gst_index_set_filter (GstIndex *index,
+ GstIndexFilter filter, gpointer user_data);
+static
+void gst_index_set_filter_full (GstIndex *index,
+ GstIndexFilter filter, gpointer user_data,
+ GDestroyNotify user_data_destroy);
+
+void gst_index_set_resolver (GstIndex *index,
+ GstIndexResolver resolver, gpointer user_data);
+void gst_index_set_resolver_full (GstIndex *index, GstIndexResolver resolver,
+ gpointer user_data,
+ GDestroyNotify user_data_destroy);
+#endif
+
+static
+gboolean gst_index_get_writer_id (GstIndex *index, GstObject *writer, gint *id);
+
+#if 0
+GstIndexEntry* gst_index_add_format (GstIndex *index, gint id, GstFormat format);
+#endif
+
+static
+GstIndexEntry* gst_index_add_associationv (GstIndex * index, gint id, GstIndexAssociationFlags flags,
+ gint n, const GstIndexAssociation * list);
+#if 0
+GstIndexEntry* gst_index_add_association (GstIndex *index, gint id, GstIndexAssociationFlags flags,
+ GstFormat format, gint64 value, ...)
+GstIndexEntry* gst_index_add_object (GstIndex *index, gint id, gchar *key,
+ GType type, gpointer object);
+#endif
+
+static
+GstIndexEntry* gst_index_add_id (GstIndex *index, gint id,
+ gchar *description);
+
+static
+GstIndexEntry* gst_index_get_assoc_entry (GstIndex *index, gint id,
+ GstIndexLookupMethod method, GstIndexAssociationFlags flags,
+ GstFormat format, gint64 value);
+static
+GstIndexEntry* gst_index_get_assoc_entry_full (GstIndex *index, gint id,
+ GstIndexLookupMethod method, GstIndexAssociationFlags flags,
+ GstFormat format, gint64 value,
+ GCompareDataFunc func,
+ gpointer user_data);
+
+/* working with index entries */
+static
+GType gst_index_entry_get_type (void);
+static
+GstIndexEntry * gst_index_entry_copy (GstIndexEntry *entry);
+static
+void gst_index_entry_free (GstIndexEntry *entry);
+static
+gboolean gst_index_entry_assoc_map (GstIndexEntry *entry,
+ GstFormat format, gint64 *value);
+
+G_END_DECLS
+
+#endif /* __GST_INDEX_H__ */
--- /dev/null
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#include <gst/gst.h>
+
+#define GST_TYPE_MEM_INDEX \
+ (gst_index_get_type ())
+#define GST_MEM_INDEX(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_MEM_INDEX, GstMemIndex))
+#define GST_MEM_INDEX_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_MEM_INDEX, GstMemIndexClass))
+#define GST_IS_MEM_INDEX(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_MEM_INDEX))
+#define GST_IS_MEM_INDEX_CLASS(klass) \
+ (GST_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_MEM_INDEX))
+
+/*
+ * Object model:
+ *
+ * All entries are simply added to a GList first. Then we build
+ * an index to each entry for each id/format
+ *
+ *
+ * memindex
+ * -----------------------------...
+ * ! !
+ * id1 id2
+ * ------------
+ * ! !
+ * format1 format2
+ * ! !
+ * GTree GTree
+ *
+ *
+ * The memindex creates a MemIndexId object for each writer id, a
+ * Hashtable is kept to map the id to the MemIndexId
+ *
+ * The MemIndexId keeps a MemIndexFormatIndex for each format the
+ * specific writer wants indexed.
+ *
+ * The MemIndexFormatIndex keeps all the values of the particular
+ * format in a GTree, The values of the GTree point back to the entry.
+ *
+ * Finding a value for an id/format requires locating the correct GTree,
+ * then do a lookup in the Tree to get the required value.
+ */
+
+typedef struct
+{
+ GstFormat format;
+ gint offset;
+ GTree *tree;
+}
+GstMemIndexFormatIndex;
+
+typedef struct
+{
+ gint id;
+ GHashTable *format_index;
+}
+GstMemIndexId;
+
+typedef struct _GstMemIndex GstMemIndex;
+typedef struct _GstMemIndexClass GstMemIndexClass;
+
+struct _GstMemIndex
+{
+ GstIndex parent;
+
+ GList *associations;
+
+ GHashTable *id_index;
+};
+
+struct _GstMemIndexClass
+{
+ GstIndexClass parent_class;
+};
+
+static void gst_mem_index_finalize (GObject * object);
+
+static void gst_mem_index_add_entry (GstIndex * index, GstIndexEntry * entry);
+static GstIndexEntry *gst_mem_index_get_assoc_entry (GstIndex * index, gint id,
+ GstIndexLookupMethod method, GstIndexAssociationFlags flags,
+ GstFormat format, gint64 value, GCompareDataFunc func, gpointer user_data);
+
+#define CLASS(mem_index) GST_MEM_INDEX_CLASS (G_OBJECT_GET_CLASS (mem_index))
+
+static GType gst_mem_index_get_type (void);
+
+typedef GstMemIndex GstFlvDemuxMemIndex;
+typedef GstMemIndexClass GstFlvDemuxMemIndexClass;
+G_DEFINE_TYPE (GstFlvDemuxMemIndex, gst_mem_index, GST_TYPE_INDEX);
+
+static void
+gst_mem_index_class_init (GstMemIndexClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstIndexClass *gstindex_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstindex_class = (GstIndexClass *) klass;
+
+ gobject_class->finalize = gst_mem_index_finalize;
+
+ gstindex_class->add_entry = GST_DEBUG_FUNCPTR (gst_mem_index_add_entry);
+ gstindex_class->get_assoc_entry =
+ GST_DEBUG_FUNCPTR (gst_mem_index_get_assoc_entry);
+}
+
+static void
+gst_mem_index_init (GstMemIndex * index)
+{
+ GST_DEBUG ("created new mem index");
+
+ index->associations = NULL;
+ index->id_index = g_hash_table_new (g_int_hash, g_int_equal);
+}
+
+static void
+gst_mem_index_free_format (gpointer key, gpointer value, gpointer user_data)
+{
+ GstMemIndexFormatIndex *index = (GstMemIndexFormatIndex *) value;
+
+ if (index->tree) {
+ g_tree_destroy (index->tree);
+ }
+
+ g_slice_free (GstMemIndexFormatIndex, index);
+}
+
+static void
+gst_mem_index_free_id (gpointer key, gpointer value, gpointer user_data)
+{
+ GstMemIndexId *id_index = (GstMemIndexId *) value;
+
+ if (id_index->format_index) {
+ g_hash_table_foreach (id_index->format_index, gst_mem_index_free_format,
+ NULL);
+ g_hash_table_destroy (id_index->format_index);
+ id_index->format_index = NULL;
+ }
+
+ g_slice_free (GstMemIndexId, id_index);
+}
+
+static void
+gst_mem_index_finalize (GObject * object)
+{
+ GstMemIndex *memindex = GST_MEM_INDEX (object);
+
+ /* Delete the trees referencing the associations first */
+ if (memindex->id_index) {
+ g_hash_table_foreach (memindex->id_index, gst_mem_index_free_id, NULL);
+ g_hash_table_destroy (memindex->id_index);
+ memindex->id_index = NULL;
+ }
+
+ /* Then delete the associations themselves */
+ if (memindex->associations) {
+ g_list_foreach (memindex->associations, (GFunc) gst_index_entry_free, NULL);
+ g_list_free (memindex->associations);
+ memindex->associations = NULL;
+ }
+
+ G_OBJECT_CLASS (gst_mem_index_parent_class)->finalize (object);
+}
+
+static void
+gst_mem_index_add_id (GstIndex * index, GstIndexEntry * entry)
+{
+ GstMemIndex *memindex = GST_MEM_INDEX (index);
+ GstMemIndexId *id_index;
+
+ id_index = g_hash_table_lookup (memindex->id_index, &entry->id);
+
+ if (!id_index) {
+ id_index = g_slice_new0 (GstMemIndexId);
+
+ id_index->id = entry->id;
+ id_index->format_index = g_hash_table_new (g_int_hash, g_int_equal);
+ g_hash_table_insert (memindex->id_index, &id_index->id, id_index);
+ }
+}
+
+static gint
+mem_index_compare (gconstpointer a, gconstpointer b, gpointer user_data)
+{
+ GstMemIndexFormatIndex *index = user_data;
+ gint64 val1, val2;
+ gint64 diff;
+
+ val1 = GST_INDEX_ASSOC_VALUE (((GstIndexEntry *) a), index->offset);
+ val2 = GST_INDEX_ASSOC_VALUE (((GstIndexEntry *) b), index->offset);
+
+ diff = (val2 - val1);
+
+ return (diff == 0 ? 0 : (diff > 0 ? 1 : -1));
+}
+
+static void
+gst_mem_index_index_format (GstMemIndexId * id_index, GstIndexEntry * entry,
+ gint assoc)
+{
+ GstMemIndexFormatIndex *index;
+ GstFormat *format;
+
+ format = &GST_INDEX_ASSOC_FORMAT (entry, assoc);
+
+ index = g_hash_table_lookup (id_index->format_index, format);
+
+ if (!index) {
+ index = g_slice_new0 (GstMemIndexFormatIndex);
+
+ index->format = *format;
+ index->offset = assoc;
+ index->tree = g_tree_new_with_data (mem_index_compare, index);
+
+ g_hash_table_insert (id_index->format_index, &index->format, index);
+ }
+
+ g_tree_insert (index->tree, entry, entry);
+}
+
+static void
+gst_mem_index_add_association (GstIndex * index, GstIndexEntry * entry)
+{
+ GstMemIndex *memindex = GST_MEM_INDEX (index);
+ GstMemIndexId *id_index;
+
+ memindex->associations = g_list_prepend (memindex->associations, entry);
+
+ id_index = g_hash_table_lookup (memindex->id_index, &entry->id);
+ if (id_index) {
+ gint i;
+
+ for (i = 0; i < GST_INDEX_NASSOCS (entry); i++) {
+ gst_mem_index_index_format (id_index, entry, i);
+ }
+ }
+}
+
+static void
+gst_mem_index_add_object (GstIndex * index, GstIndexEntry * entry)
+{
+}
+
+static void
+gst_mem_index_add_format (GstIndex * index, GstIndexEntry * entry)
+{
+}
+
+static void
+gst_mem_index_add_entry (GstIndex * index, GstIndexEntry * entry)
+{
+ GST_LOG_OBJECT (index, "added this entry");
+
+ switch (entry->type) {
+ case GST_INDEX_ENTRY_ID:
+ gst_mem_index_add_id (index, entry);
+ break;
+ case GST_INDEX_ENTRY_ASSOCIATION:
+ gst_mem_index_add_association (index, entry);
+ break;
+ case GST_INDEX_ENTRY_OBJECT:
+ gst_mem_index_add_object (index, entry);
+ break;
+ case GST_INDEX_ENTRY_FORMAT:
+ gst_mem_index_add_format (index, entry);
+ break;
+ default:
+ break;
+ }
+}
+
+typedef struct
+{
+ gint64 value;
+ GstMemIndexFormatIndex *index;
+ gboolean exact;
+ GstIndexEntry *lower;
+ gint64 low_diff;
+ GstIndexEntry *higher;
+ gint64 high_diff;
+}
+GstMemIndexSearchData;
+
+static gint
+mem_index_search (gconstpointer a, gconstpointer b)
+{
+ GstMemIndexSearchData *data = (GstMemIndexSearchData *) b;
+ GstMemIndexFormatIndex *index = data->index;
+ gint64 val1, val2;
+ gint64 diff;
+
+ val1 = GST_INDEX_ASSOC_VALUE (((GstIndexEntry *) a), index->offset);
+ val2 = data->value;
+
+ diff = (val1 - val2);
+ if (diff == 0)
+ return 0;
+
+ /* exact matching, don't update low/high */
+ if (data->exact)
+ return (diff > 0 ? 1 : -1);
+
+ if (diff < 0) {
+ if (diff > data->low_diff) {
+ data->low_diff = diff;
+ data->lower = (GstIndexEntry *) a;
+ }
+ diff = -1;
+ } else {
+ if (diff < data->high_diff) {
+ data->high_diff = diff;
+ data->higher = (GstIndexEntry *) a;
+ }
+ diff = 1;
+ }
+
+ return diff;
+}
+
+static GstIndexEntry *
+gst_mem_index_get_assoc_entry (GstIndex * index, gint id,
+ GstIndexLookupMethod method,
+ GstIndexAssociationFlags flags,
+ GstFormat format, gint64 value, GCompareDataFunc func, gpointer user_data)
+{
+ GstMemIndex *memindex = GST_MEM_INDEX (index);
+ GstMemIndexId *id_index;
+ GstMemIndexFormatIndex *format_index;
+ GstIndexEntry *entry;
+ GstMemIndexSearchData data;
+
+ id_index = g_hash_table_lookup (memindex->id_index, &id);
+ if (!id_index)
+ return NULL;
+
+ format_index = g_hash_table_lookup (id_index->format_index, &format);
+ if (!format_index)
+ return NULL;
+
+ data.value = value;
+ data.index = format_index;
+ data.exact = (method == GST_INDEX_LOOKUP_EXACT);
+
+ /* setup data for low/high checks if we are not looking
+ * for an exact match */
+ if (!data.exact) {
+ data.low_diff = G_MININT64;
+ data.lower = NULL;
+ data.high_diff = G_MAXINT64;
+ data.higher = NULL;
+ }
+
+ entry = g_tree_search (format_index->tree, mem_index_search, &data);
+
+ /* get the low/high values if we're not exact */
+ if (entry == NULL && !data.exact) {
+ if (method == GST_INDEX_LOOKUP_BEFORE)
+ entry = data.lower;
+ else if (method == GST_INDEX_LOOKUP_AFTER) {
+ entry = data.higher;
+ }
+ }
+
+ if (entry && ((GST_INDEX_ASSOC_FLAGS (entry) & flags) != flags)) {
+ if (method != GST_INDEX_LOOKUP_EXACT) {
+ GList *l_entry = g_list_find (memindex->associations, entry);
+
+ entry = NULL;
+
+ while (l_entry) {
+ entry = (GstIndexEntry *) l_entry->data;
+
+ if (entry->id == id && (GST_INDEX_ASSOC_FLAGS (entry) & flags) == flags)
+ break;
+
+ if (method == GST_INDEX_LOOKUP_BEFORE)
+ l_entry = g_list_next (l_entry);
+ else if (method == GST_INDEX_LOOKUP_AFTER) {
+ l_entry = g_list_previous (l_entry);
+ }
+ }
+ } else {
+ entry = NULL;
+ }
+ }
+
+ return entry;
+}
+
+#if 0
+gboolean
+gst_mem_index_plugin_init (GstPlugin * plugin)
+{
+ GstIndexFactory *factory;
+
+ factory = gst_index_factory_new ("memindex",
+ "A index that stores entries in memory", gst_mem_index_get_type ());
+
+ if (factory == NULL) {
+ g_warning ("failed to create memindex factory");
+ return FALSE;
+ }
+
+ GST_PLUGIN_FEATURE (factory)->plugin_name = plugin->desc.name;
+ GST_PLUGIN_FEATURE (factory)->loaded = TRUE;
+
+ gst_registry_add_feature (gst_registry_get_default (),
+ GST_PLUGIN_FEATURE (factory));
+
+ return TRUE;
+}
+#endif
"Codec/Decoder/Video",
"FLC/FLI/FLX video decoder",
"Sepp Wijnands <mrrazz@garbage-coderz.net>, Zeeshan Ali <zeenix@gmail.com>");
- gst_element_class_add_static_pad_template (gstelement_class,
- &sink_factory);
- gst_element_class_add_static_pad_template (gstelement_class,
- &src_video_factory);
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&sink_factory));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&src_video_factory));
}
static void
/* gstgoom.c: implementation of goom drawing element
* Copyright (C) <2001> Richard Boulton <richard@tartarus.org>
* (C) <2006> Wim Taymans <wim at fluendo dot com>
+ * (C) <2011> Wim Taymans <wim.taymans at gmail dot com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* <refsect2>
* <title>Example launch line</title>
* |[
- * gst-launch -v audiotestsrc ! goom ! ffmpegcolorspace ! xvimagesink
+ * gst-launch -v audiotestsrc ! goom ! videoconvert ! xvimagesink
* ]|
* </refsect2>
*/
#include <gst/gst.h>
#include "gstgoom.h"
#include <gst/video/video.h>
+#include <gst/audio/audio.h>
#include "goom.h"
#if HAVE_ORC
static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS (GST_VIDEO_CAPS_xRGB_HOST_ENDIAN)
+#if G_BYTE_ORDER == G_BIG_ENDIAN
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("xRGB"))
+#else
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("BGRx"))
+#endif
);
static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink", /* the name of the pads */
GST_PAD_SINK, /* type of the pad */
GST_PAD_ALWAYS, /* ALWAYS/SOMETIMES */
- GST_STATIC_CAPS ("audio/x-raw-int, "
- "endianness = (int) BYTE_ORDER, "
- "signed = (boolean) TRUE, "
- "width = (int) 16, "
- "depth = (int) 16, "
- "rate = (int) [ 8000, 96000 ], " "channels = (int) { 1, 2 }")
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "format = (string) " GST_AUDIO_NE (S16) ", "
+ "rate = (int) [ 8000, 96000 ], "
+ "channels = (int) 1, "
+ "layout = (string) interleaved; "
+ "audio/x-raw, "
+ "format = (string) " GST_AUDIO_NE (S16) ", "
+ "rate = (int) [ 8000, 96000 ], "
+ "channels = (int) 2, "
+ "channel-mask = (bitmask) 0x3, " "layout = (string) interleaved")
);
-static void gst_goom_class_init (GstGoomClass * klass);
-static void gst_goom_base_init (GstGoomClass * klass);
-static void gst_goom_init (GstGoom * goom);
static void gst_goom_finalize (GObject * object);
static GstStateChangeReturn gst_goom_change_state (GstElement * element,
GstStateChange transition);
-static GstFlowReturn gst_goom_chain (GstPad * pad, GstBuffer * buffer);
-static gboolean gst_goom_src_event (GstPad * pad, GstEvent * event);
-static gboolean gst_goom_sink_event (GstPad * pad, GstEvent * event);
-
-static gboolean gst_goom_src_query (GstPad * pad, GstQuery * query);
+static GstFlowReturn gst_goom_chain (GstPad * pad, GstObject * parent,
+ GstBuffer * buffer);
+static gboolean gst_goom_src_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+static gboolean gst_goom_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
-static gboolean gst_goom_sink_setcaps (GstPad * pad, GstCaps * caps);
-static gboolean gst_goom_src_setcaps (GstPad * pad, GstCaps * caps);
+static gboolean gst_goom_src_query (GstPad * pad, GstObject * parent,
+ GstQuery * query);
-static GstElementClass *parent_class = NULL;
-
-GType
-gst_goom_get_type (void)
-{
- static GType type = 0;
-
- if (!type) {
- static const GTypeInfo info = {
- sizeof (GstGoomClass),
- (GBaseInitFunc) gst_goom_base_init,
- NULL,
- (GClassInitFunc) gst_goom_class_init,
- NULL,
- NULL,
- sizeof (GstGoom),
- 0,
- (GInstanceInitFunc) gst_goom_init,
- };
-
- type = g_type_register_static (GST_TYPE_ELEMENT, "GstGoom", &info, 0);
- }
- return type;
-}
-
-static void
-gst_goom_base_init (GstGoomClass * klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_set_details_simple (element_class, "GOOM: what a GOOM!",
- "Visualization",
- "Takes frames of data and outputs video frames using the GOOM filter",
- "Wim Taymans <wim@fluendo.com>");
- gst_element_class_add_static_pad_template (element_class,
- &sink_template);
- gst_element_class_add_static_pad_template (element_class, &src_template);
-}
+#define gst_goom_parent_class parent_class
+G_DEFINE_TYPE (GstGoom, gst_goom, GST_TYPE_ELEMENT);
static void
gst_goom_class_init (GstGoomClass * klass)
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
- parent_class = g_type_class_peek_parent (klass);
-
gobject_class->finalize = gst_goom_finalize;
+ gst_element_class_set_details_simple (gstelement_class, "GOOM: what a GOOM!",
+ "Visualization",
+ "Takes frames of data and outputs video frames using the GOOM filter",
+ "Wim Taymans <wim@fluendo.com>");
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&src_template));
+
gstelement_class->change_state = GST_DEBUG_FUNCPTR (gst_goom_change_state);
}
GST_DEBUG_FUNCPTR (gst_goom_chain));
gst_pad_set_event_function (goom->sinkpad,
GST_DEBUG_FUNCPTR (gst_goom_sink_event));
- gst_pad_set_setcaps_function (goom->sinkpad,
- GST_DEBUG_FUNCPTR (gst_goom_sink_setcaps));
gst_element_add_pad (GST_ELEMENT (goom), goom->sinkpad);
goom->srcpad = gst_pad_new_from_static_template (&src_template, "src");
- gst_pad_set_setcaps_function (goom->srcpad,
- GST_DEBUG_FUNCPTR (gst_goom_src_setcaps));
gst_pad_set_event_function (goom->srcpad,
GST_DEBUG_FUNCPTR (gst_goom_src_event));
gst_pad_set_query_function (goom->srcpad,
goom->plugin = NULL;
g_object_unref (goom->adapter);
+ if (goom->pool)
+ gst_object_unref (goom->pool);
G_OBJECT_CLASS (parent_class)->finalize (object);
}
}
static gboolean
-gst_goom_sink_setcaps (GstPad * pad, GstCaps * caps)
+gst_goom_sink_setcaps (GstGoom * goom, GstCaps * caps)
{
- GstGoom *goom;
GstStructure *structure;
- gboolean res;
-
- goom = GST_GOOM (GST_PAD_PARENT (pad));
structure = gst_caps_get_structure (caps, 0);
- res = gst_structure_get_int (structure, "channels", &goom->channels);
- res &= gst_structure_get_int (structure, "rate", &goom->rate);
+ gst_structure_get_int (structure, "channels", &goom->channels);
+ gst_structure_get_int (structure, "rate", &goom->rate);
goom->bps = goom->channels * sizeof (gint16);
- return res;
+ return TRUE;
}
static gboolean
-gst_goom_src_setcaps (GstPad * pad, GstCaps * caps)
+gst_goom_src_setcaps (GstGoom * goom, GstCaps * caps)
{
- GstGoom *goom;
GstStructure *structure;
-
- goom = GST_GOOM (GST_PAD_PARENT (pad));
+ gboolean res;
structure = gst_caps_get_structure (caps, 0);
-
if (!gst_structure_get_int (structure, "width", &goom->width) ||
!gst_structure_get_int (structure, "height", &goom->height) ||
!gst_structure_get_fraction (structure, "framerate", &goom->fps_n,
&goom->fps_d))
- return FALSE;
+ goto error;
goom_set_resolution (goom->plugin, goom->width, goom->height);
GST_DEBUG_OBJECT (goom, "dimension %dx%d, framerate %d/%d, spf %d",
goom->width, goom->height, goom->fps_n, goom->fps_d, goom->spf);
- return TRUE;
+ res = gst_pad_push_event (goom->srcpad, gst_event_new_caps (caps));
+
+ return res;
+
+ /* ERRORS */
+error:
+ {
+ GST_DEBUG_OBJECT (goom, "error parsing caps");
+ return FALSE;
+ }
}
static gboolean
{
GstCaps *othercaps, *target;
GstStructure *structure;
- const GstCaps *templ;
+ GstCaps *templ;
+ GstQuery *query;
+ GstBufferPool *pool = NULL;
+ guint size, min, max, prefix, alignment;
templ = gst_pad_get_pad_template_caps (goom->srcpad);
GST_DEBUG_OBJECT (goom, "performing negotiation");
/* see what the peer can do */
- othercaps = gst_pad_peer_get_caps (goom->srcpad);
+ othercaps = gst_pad_peer_query_caps (goom->srcpad, NULL);
if (othercaps) {
target = gst_caps_intersect (othercaps, templ);
gst_caps_unref (othercaps);
+ gst_caps_unref (templ);
if (gst_caps_is_empty (target))
goto no_format;
gst_caps_truncate (target);
} else {
- target = gst_caps_ref ((GstCaps *) templ);
+ target = templ;
}
structure = gst_caps_get_structure (target, 0);
gst_structure_fixate_field_nearest_fraction (structure, "framerate",
DEFAULT_FPS_N, DEFAULT_FPS_D);
- gst_pad_set_caps (goom->srcpad, target);
+ gst_goom_src_setcaps (goom, target);
+
+ /* try to get a bufferpool now */
+ /* find a pool for the negotiated caps now */
+ query = gst_query_new_allocation (target, TRUE);
+
+ if (gst_pad_peer_query (goom->srcpad, query)) {
+ /* we got configuration from our peer, parse them */
+ gst_query_parse_allocation_params (query, &size, &min, &max, &prefix,
+ &alignment, &pool);
+ } else {
+ size = goom->outsize;
+ min = max = 0;
+ prefix = 0;
+ alignment = 0;
+ }
+
+ if (pool == NULL) {
+ GstStructure *config;
+
+ /* we did not get a pool, make one ourselves then */
+ pool = gst_buffer_pool_new ();
+
+ config = gst_buffer_pool_get_config (pool);
+ gst_buffer_pool_config_set (config, target, size, min, max, prefix,
+ alignment);
+ gst_buffer_pool_set_config (pool, config);
+ }
+
+ if (goom->pool)
+ gst_object_unref (goom->pool);
+ goom->pool = pool;
+
+ /* and activate */
+ gst_buffer_pool_set_active (pool, TRUE);
+
gst_caps_unref (target);
return TRUE;
}
static gboolean
-gst_goom_src_event (GstPad * pad, GstEvent * event)
+gst_goom_src_event (GstPad * pad, GstObject * parent, GstEvent * event)
{
gboolean res;
GstGoom *goom;
- goom = GST_GOOM (gst_pad_get_parent (pad));
+ goom = GST_GOOM (parent);
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_QOS:
GstClockTimeDiff diff;
GstClockTime timestamp;
- gst_event_parse_qos (event, &proportion, &diff, ×tamp);
+ gst_event_parse_qos (event, NULL, &proportion, &diff, ×tamp);
/* save stuff for the _chain() function */
GST_OBJECT_LOCK (goom);
res = gst_pad_push_event (goom->sinkpad, event);
break;
}
- gst_object_unref (goom);
return res;
}
static gboolean
-gst_goom_sink_event (GstPad * pad, GstEvent * event)
+gst_goom_sink_event (GstPad * pad, GstObject * parent, GstEvent * event)
{
gboolean res;
GstGoom *goom;
- goom = GST_GOOM (gst_pad_get_parent (pad));
+ goom = GST_GOOM (parent);
switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_CAPS:
+ {
+ GstCaps *caps;
+
+ gst_event_parse_caps (event, &caps);
+ res = gst_goom_sink_setcaps (goom, caps);
+ break;
+ }
case GST_EVENT_FLUSH_START:
res = gst_pad_push_event (goom->srcpad, event);
break;
gst_goom_reset (goom);
res = gst_pad_push_event (goom->srcpad, event);
break;
- case GST_EVENT_NEWSEGMENT:
+ case GST_EVENT_SEGMENT:
{
- GstFormat format;
- gdouble rate, arate;
- gint64 start, stop, time;
- gboolean update;
-
/* the newsegment values are used to clip the input samples
* and to convert the incomming timestamps to running time so
* we can do QoS */
- gst_event_parse_new_segment_full (event, &update, &rate, &arate, &format,
- &start, &stop, &time);
-
- /* now configure the values */
- gst_segment_set_newsegment_full (&goom->segment, update,
- rate, arate, format, start, stop, time);
+ gst_event_copy_segment (event, &goom->segment);
res = gst_pad_push_event (goom->srcpad, event);
break;
res = gst_pad_push_event (goom->srcpad, event);
break;
}
- gst_object_unref (goom);
return res;
}
static gboolean
-gst_goom_src_query (GstPad * pad, GstQuery * query)
+gst_goom_src_query (GstPad * pad, GstObject * parent, GstQuery * query)
{
gboolean res = FALSE;
GstGoom *goom;
- goom = GST_GOOM (gst_pad_get_parent (pad));
+ goom = GST_GOOM (parent);
switch (GST_QUERY_TYPE (query)) {
case GST_QUERY_LATENCY:
break;
}
default:
- res = gst_pad_peer_query (goom->sinkpad, query);
+ res = gst_pad_query_default (pad, parent, query);
break;
}
- gst_object_unref (goom);
-
return res;
}
+/* make sure we are negotiated */
static GstFlowReturn
-get_buffer (GstGoom * goom, GstBuffer ** outbuf)
+ensure_negotiated (GstGoom * goom)
{
- GstFlowReturn ret;
+ gboolean reconfigure;
- if (GST_PAD_CAPS (goom->srcpad) == NULL) {
+ reconfigure = gst_pad_check_reconfigure (goom->srcpad);
+
+ /* we don't know an output format yet, pick one */
+ if (reconfigure || !gst_pad_has_current_caps (goom->srcpad)) {
if (!gst_goom_src_negotiate (goom))
return GST_FLOW_NOT_NEGOTIATED;
}
-
- GST_DEBUG_OBJECT (goom, "allocating output buffer with caps %"
- GST_PTR_FORMAT, GST_PAD_CAPS (goom->srcpad));
-
- ret =
- gst_pad_alloc_buffer_and_set_caps (goom->srcpad,
- GST_BUFFER_OFFSET_NONE, goom->outsize,
- GST_PAD_CAPS (goom->srcpad), outbuf);
- if (ret != GST_FLOW_OK)
- return ret;
-
return GST_FLOW_OK;
}
+
static GstFlowReturn
-gst_goom_chain (GstPad * pad, GstBuffer * buffer)
+gst_goom_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer)
{
GstGoom *goom;
GstFlowReturn ret;
GstBuffer *outbuf = NULL;
- goom = GST_GOOM (gst_pad_get_parent (pad));
+ goom = GST_GOOM (parent);
if (goom->bps == 0) {
ret = GST_FLOW_NOT_NEGOTIATED;
goto beach;
}
- /* If we don't have an output format yet, preallocate a buffer to try and
- * set one */
- if (GST_PAD_CAPS (goom->srcpad) == NULL) {
- ret = get_buffer (goom, &outbuf);
- if (ret != GST_FLOW_OK) {
- gst_buffer_unref (buffer);
- goto beach;
- }
+ /* Make sure have an output format */
+ ret = ensure_negotiated (goom);
+ if (ret != GST_FLOW_OK) {
+ gst_buffer_unref (buffer);
+ goto beach;
}
/* don't try to combine samples from discont buffer */
}
GST_DEBUG_OBJECT (goom,
- "Input buffer has %d samples, time=%" G_GUINT64_FORMAT,
- GST_BUFFER_SIZE (buffer) / goom->bps, GST_BUFFER_TIMESTAMP (buffer));
+ "Input buffer has %" G_GSIZE_FORMAT " samples, time=%" G_GUINT64_FORMAT,
+ gst_buffer_get_size (buffer) / goom->bps, GST_BUFFER_TIMESTAMP (buffer));
/* Collect samples until we have enough for an output frame */
gst_adapter_push (goom->adapter, buffer);
while (TRUE) {
const guint16 *data;
- gboolean need_skip;
guchar *out_frame;
gint i;
guint avail, to_flush;
timestamp += gst_util_uint64_scale_int (dist, GST_SECOND, goom->rate);
}
- if (timestamp != -1) {
+ if (GST_CLOCK_TIME_IS_VALID (timestamp)) {
gint64 qostime;
+ gboolean need_skip;
qostime = gst_segment_to_running_time (&goom->segment, GST_FORMAT_TIME,
- timestamp);
- qostime += goom->duration;
+ timestamp) + goom->duration;
GST_OBJECT_LOCK (goom);
/* check for QoS, don't compute buffers that are known to be late */
/* get next GOOM_SAMPLES, we have at least this amount of samples */
data =
- (const guint16 *) gst_adapter_peek (goom->adapter,
+ (const guint16 *) gst_adapter_map (goom->adapter,
GOOM_SAMPLES * goom->bps);
if (goom->channels == 2) {
/* alloc a buffer if we don't have one yet, this happens
* when we pushed a buffer in this while loop before */
if (outbuf == NULL) {
- ret = get_buffer (goom, &outbuf);
+ GST_DEBUG_OBJECT (goom, "allocating output buffer");
+ ret = gst_buffer_pool_acquire_buffer (goom->pool, &outbuf, NULL);
if (ret != GST_FLOW_OK) {
+ gst_adapter_unmap (goom->adapter);
goto beach;
}
}
GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
GST_BUFFER_DURATION (outbuf) = goom->duration;
- GST_BUFFER_SIZE (outbuf) = goom->outsize;
out_frame = (guchar *) goom_update (goom->plugin, goom->datain, 0, 0);
- memcpy (GST_BUFFER_DATA (outbuf), out_frame, goom->outsize);
+ gst_buffer_fill (outbuf, 0, out_frame, goom->outsize);
+
+ gst_adapter_unmap (goom->adapter);
GST_DEBUG ("Pushing frame with time=%" GST_TIME_FORMAT ", duration=%"
GST_TIME_FORMAT, GST_TIME_ARGS (timestamp),
gst_buffer_unref (outbuf);
beach:
- gst_object_unref (goom);
return ret;
}
switch (transition) {
case GST_STATE_CHANGE_PAUSED_TO_READY:
+ if (goom->pool) {
+ gst_buffer_pool_set_active (goom->pool, FALSE);
+ gst_object_replace ((GstObject **) & goom->pool, NULL);
+ }
break;
case GST_STATE_CHANGE_READY_TO_NULL:
break;
gint height;
GstClockTime duration;
guint outsize;
+ GstBufferPool *pool;
/* samples per frame */
guint spf;
"GOOM: what a GOOM! 2k1 edition", "Visualization",
"Takes frames of data and outputs video frames using the GOOM 2k1 filter",
"Wim Taymans <wim@fluendo.com>");
- gst_element_class_add_static_pad_template (element_class,
- &sink_template);
- gst_element_class_add_static_pad_template (element_class, &src_template);
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&sink_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&src_template));
}
static void
GST_STATIC_CAPS ("ANY")
);
-static void gst_icydemux_class_init (GstICYDemuxClass * klass);
-static void gst_icydemux_base_init (GstICYDemuxClass * klass);
-static void gst_icydemux_init (GstICYDemux * icydemux);
static void gst_icydemux_dispose (GObject * object);
-static GstFlowReturn gst_icydemux_chain (GstPad * pad, GstBuffer * buf);
-static gboolean gst_icydemux_handle_event (GstPad * pad, GstEvent * event);
+static GstFlowReturn gst_icydemux_chain (GstPad * pad, GstObject * parent,
+ GstBuffer * buf);
+static gboolean gst_icydemux_handle_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
static gboolean gst_icydemux_add_srcpad (GstICYDemux * icydemux,
GstCaps * new_caps);
static gboolean gst_icydemux_send_tag_event (GstICYDemux * icydemux,
GstTagList * taglist);
-static GstElementClass *parent_class = NULL;
-GType
-gst_icydemux_get_type (void)
-{
- static GType plugin_type = 0;
-
- if (!plugin_type) {
- static const GTypeInfo plugin_info = {
- sizeof (GstICYDemuxClass),
- (GBaseInitFunc) gst_icydemux_base_init,
- NULL,
- (GClassInitFunc) gst_icydemux_class_init,
- NULL,
- NULL,
- sizeof (GstICYDemux),
- 0,
- (GInstanceInitFunc) gst_icydemux_init,
- };
- plugin_type = g_type_register_static (GST_TYPE_ELEMENT,
- "GstICYDemux", &plugin_info, 0);
- }
- return plugin_type;
-}
-
-static void
-gst_icydemux_base_init (GstICYDemuxClass * klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_static_pad_template (element_class, &src_factory);
- gst_element_class_add_static_pad_template (element_class, &sink_factory);
- gst_element_class_set_details_simple (element_class, "ICY tag demuxer",
- "Codec/Demuxer/Metadata",
- "Read and output ICY tags while demuxing the contents",
- "Jan Schmidt <thaytan@mad.scientist.com>, "
- "Michael Smith <msmith@fluendo.com>");
-}
+#define gst_icydemux_parent_class parent_class
+G_DEFINE_TYPE (GstICYDemux, gst_icydemux, GST_TYPE_ELEMENT);
static void
gst_icydemux_class_init (GstICYDemuxClass * klass)
gstelement_class->change_state = gst_icydemux_change_state;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&src_factory));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&sink_factory));
+
+ gst_element_class_set_details_simple (gstelement_class, "ICY tag demuxer",
+ "Codec/Demuxer/Metadata",
+ "Read and output ICY tags while demuxing the contents",
+ "Jan Schmidt <thaytan@mad.scientist.com>, "
+ "Michael Smith <msmith@fluendo.com>");
}
static void
GST_DEBUG_FUNCPTR (gst_icydemux_chain));
gst_pad_set_event_function (icydemux->sinkpad,
GST_DEBUG_FUNCPTR (gst_icydemux_handle_event));
- gst_pad_set_setcaps_function (icydemux->sinkpad,
- GST_DEBUG_FUNCPTR (gst_icydemux_sink_setcaps));
gst_element_add_pad (GST_ELEMENT (icydemux), icydemux->sinkpad);
gst_icydemux_reset (icydemux);
length = gst_adapter_available (icydemux->meta_adapter);
- data = gst_adapter_peek (icydemux->meta_adapter, length);
+ data = gst_adapter_map (icydemux->meta_adapter, length);
/* Now, copy this to a buffer where we can NULL-terminate it to make things
* a bit easier, then do that parsing. */
buffer = g_strndup ((const gchar *) data, length);
- tags = gst_tag_list_new ();
+ tags = gst_tag_list_new_empty ();
strings = g_strsplit (buffer, "';", 0);
for (i = 0; strings[i]; i++) {
g_strfreev (strings);
g_free (buffer);
- gst_adapter_clear (icydemux->meta_adapter);
+ gst_adapter_unmap (icydemux->meta_adapter);
+ gst_adapter_flush (icydemux->meta_adapter, length);
if (!gst_tag_list_is_empty (tags))
gst_icydemux_tag_found (icydemux, tags);
}
static gboolean
-gst_icydemux_handle_event (GstPad * pad, GstEvent * event)
+gst_icydemux_handle_event (GstPad * pad, GstObject * parent, GstEvent * event)
{
- GstICYDemux *icydemux = GST_ICYDEMUX (GST_PAD_PARENT (pad));
+ GstICYDemux *icydemux = GST_ICYDEMUX (parent);
gboolean result;
- if (GST_EVENT_TYPE (event) == GST_EVENT_TAG) {
- GstTagList *tags;
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_TAG:
+ {
+ GstTagList *tags;
- gst_event_parse_tag (event, &tags);
- result = gst_icydemux_tag_found (icydemux, gst_tag_list_copy (tags));
- gst_event_unref (event);
- return result;
+ gst_event_parse_tag (event, &tags);
+ result = gst_icydemux_tag_found (icydemux, gst_tag_list_copy (tags));
+ gst_event_unref (event);
+ return result;
+ }
+ case GST_EVENT_CAPS:
+ {
+ GstCaps *caps;
+
+ gst_event_parse_caps (event, &caps);
+ result = gst_icydemux_sink_setcaps (pad, caps);
+ gst_event_unref (event);
+ return result;
+ }
+ default:
+ break;
}
if (icydemux->typefinding) {
g_list_free (icydemux->cached_events);
icydemux->cached_events = NULL;
- return gst_pad_event_default (pad, event);
+ return gst_pad_event_default (pad, parent, event);
default:
icydemux->cached_events = g_list_append (icydemux->cached_events,
event);
return TRUE;
}
} else {
- return gst_pad_event_default (pad, event);
+ return gst_pad_event_default (pad, parent, event);
}
}
if (G_UNLIKELY (icydemux->content_type)) {
if (!g_ascii_strcasecmp (icydemux->content_type, "video/nsv")) {
GST_DEBUG ("We have a NSV stream");
- caps = gst_caps_new_simple ("video/x-nsv", NULL);
+ caps = gst_caps_new_empty_simple ("video/x-nsv");
} else {
GST_DEBUG ("Upstream Content-Type isn't supported");
g_free (icydemux->content_type);
icydemux->typefind_buf, &prob);
if (caps == NULL) {
- if (GST_BUFFER_SIZE (icydemux->typefind_buf) < ICY_TYPE_FIND_MAX_SIZE) {
+ if (gst_buffer_get_size (icydemux->typefind_buf) <
+ ICY_TYPE_FIND_MAX_SIZE) {
/* Just break for more data */
return GST_FLOW_OK;
}
return GST_FLOW_ERROR;
}
- buf = gst_buffer_make_metadata_writable (buf);
- gst_buffer_set_caps (buf, icydemux->src_caps);
+ buf = gst_buffer_make_writable (buf);
/* Most things don't care, and it's a pain to track (we should preserve a
* 0 offset on the first buffer though if it's there, for id3demux etc.) */
}
static GstFlowReturn
-gst_icydemux_chain (GstPad * pad, GstBuffer * buf)
+gst_icydemux_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
{
GstICYDemux *icydemux;
guint size, chunk, offset;
GstBuffer *sub;
GstFlowReturn ret = GST_FLOW_OK;
- icydemux = GST_ICYDEMUX (GST_PAD_PARENT (pad));
+ icydemux = GST_ICYDEMUX (parent);
if (G_UNLIKELY (icydemux->meta_interval < 0))
goto not_negotiated;
/* Go through the buffer, chopping it into appropriate chunks. Forward as
* tags or buffers, as appropriate
*/
- size = GST_BUFFER_SIZE (buf);
+ size = gst_buffer_get_size (buf);
offset = 0;
while (size) {
if (icydemux->remaining) {
chunk = (size <= icydemux->remaining) ? size : icydemux->remaining;
- sub = gst_buffer_create_sub (buf, offset, chunk);
+ sub = gst_buffer_copy_region (buf, GST_BUFFER_COPY_ALL, offset, chunk);
offset += chunk;
icydemux->remaining -= chunk;
size -= chunk;
} else if (icydemux->meta_remaining) {
chunk = (size <= icydemux->meta_remaining) ?
size : icydemux->meta_remaining;
- sub = gst_buffer_create_sub (buf, offset, chunk);
+ sub = gst_buffer_copy_region (buf, GST_BUFFER_COPY_ALL, offset, chunk);
gst_icydemux_add_meta (icydemux, sub);
offset += chunk;
icydemux->remaining = icydemux->meta_interval;
}
} else {
+ guint8 byte;
/* We need to read a single byte (always safe at this point in the loop)
* to figure out how many bytes of metadata exist.
* The 'spec' tells us to read 16 * (byte_value) bytes of metadata after
* this (zero is common, and means the metadata hasn't changed).
*/
- icydemux->meta_remaining = 16 * GST_BUFFER_DATA (buf)[offset];
+ gst_buffer_extract (buf, offset, &byte, 1);
+ icydemux->meta_remaining = 16 * byte;
if (icydemux->meta_remaining == 0)
icydemux->remaining = icydemux->meta_interval;
static void gst_id3demux_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
-GST_BOILERPLATE (GstID3Demux, gst_id3demux, GstTagDemux, GST_TYPE_TAG_DEMUX);
-
-static void
-gst_id3demux_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_static_pad_template (element_class, &sink_factory);
-
- gst_element_class_set_details_simple (element_class, "ID3 tag demuxer",
- "Codec/Demuxer/Metadata",
- "Read and output ID3v1 and ID3v2 tags while demuxing the contents",
- "Jan Schmidt <thaytan@mad.scientist.com>");
-}
+#define gst_id3demux_parent_class parent_class
+G_DEFINE_TYPE (GstID3Demux, gst_id3demux, GST_TYPE_TAG_DEMUX);
static void
gst_id3demux_class_init (GstID3DemuxClass * klass)
{
- GstTagDemuxClass *tagdemux_class = (GstTagDemuxClass *) klass;
GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
+ GstTagDemuxClass *tagdemux_class = (GstTagDemuxClass *) klass;
gobject_class->set_property = gst_id3demux_set_property;
gobject_class->get_property = gst_id3demux_get_property;
"and ID3v2 tags are present", DEFAULT_PREFER_V1,
G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&sink_factory));
+
+ gst_element_class_set_details_simple (gstelement_class, "ID3 tag demuxer",
+ "Codec/Demuxer/Metadata",
+ "Read and output ID3v1 and ID3v2 tags while demuxing the contents",
+ "Jan Schmidt <thaytan@mad.scientist.com>");
+
tagdemux_class->identify_tag = GST_DEBUG_FUNCPTR (gst_id3demux_identify_tag);
tagdemux_class->parse_tag = GST_DEBUG_FUNCPTR (gst_id3demux_parse_tag);
tagdemux_class->merge_tags = GST_DEBUG_FUNCPTR (gst_id3demux_merge_tags);
}
static void
-gst_id3demux_init (GstID3Demux * id3demux, GstID3DemuxClass * klass)
+gst_id3demux_init (GstID3Demux * id3demux)
{
id3demux->prefer_v1 = DEFAULT_PREFER_V1;
}
gst_id3demux_identify_tag (GstTagDemux * demux, GstBuffer * buf,
gboolean start_tag, guint * tag_size)
{
- const guint8 *data = GST_BUFFER_DATA (buf);
+ guint8 data[3];
+
+ gst_buffer_extract (buf, 0, data, 3);
if (start_tag) {
if (data[0] != 'I' || data[1] != 'D' || data[2] != '3')
return GST_TAG_DEMUX_RESULT_BROKEN_TAG;
}
} else {
- *tags = gst_tag_list_new_from_id3v1 (GST_BUFFER_DATA (buffer));
+ GstMapInfo map;
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ *tags = gst_tag_list_new_from_id3v1 (map.data);
+ gst_buffer_unmap (buffer, &map);
if (G_UNLIKELY (*tags == NULL))
return GST_TAG_DEMUX_RESULT_BROKEN_TAG;
#include "config.h"
#endif
-/* FIXME 0.11: suppress warnings for deprecated API such as GStaticRecMutex
- * with newer GLib versions (>= 2.31.0) */
-#define GLIB_DISABLE_DEPRECATION_WARNINGS
-
#include <gst/glib-compat-private.h>
#include "gstimagefreeze.h"
"Generates a still frame stream from an image",
"Sebastian Dröge <sebastian.droege@collabora.co.uk>");
- gst_element_class_add_static_pad_template (gstelement_class,
- &sink_pad_template);
- gst_element_class_add_static_pad_template (gstelement_class,
- &src_pad_template);
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&sink_pad_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&src_pad_template));
}
static void
GST_PAD_STREAM_LOCK (self->srcpad);
ret = gst_pad_alloc_buffer (self->srcpad, offset, size, caps, buf);
- seeking = ret == GST_FLOW_WRONG_STATE
- && g_atomic_int_get (&self->seeking);
+ seeking = ret == GST_FLOW_FLUSHING && g_atomic_int_get (&self->seeking);
GST_PAD_STREAM_UNLOCK (self->srcpad);
} while (seeking);
gst_flow_get_name (ret));
} else {
/* Let upstream go EOS if we already have a buffer */
- ret = GST_FLOW_UNEXPECTED;
+ ret = GST_FLOW_EOS;
}
gst_object_unref (self);
GST_DEBUG_OBJECT (pad, "Already have a buffer, dropping");
gst_buffer_unref (buffer);
g_mutex_unlock (self->lock);
- return GST_FLOW_UNEXPECTED;
+ return GST_FLOW_EOS;
}
self->buffer = buffer;
plugin_LTLIBRARIES = libgstinterleave.la
-libgstinterleave_la_SOURCES = plugin.c interleave.c deinterleave.c
+libgstinterleave_la_SOURCES = plugin.c deinterleave.c
libgstinterleave_la_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_BASE_CFLAGS) $(GST_CFLAGS)
libgstinterleave_la_LIBADD = $(GST_PLUGINS_BASE_LIBS) -lgstaudio-$(GST_MAJORMINOR) $(GST_BASE_LIBS) $(GST_LIBS)
libgstinterleave_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
libgstinterleave_la_LIBTOOLFLAGS = --tag=disable-static
-noinst_HEADERS = plugin.h interleave.h deinterleave.h
+noinst_HEADERS = plugin.h deinterleave.h
Android.mk: Makefile.am $(BUILT_SOURCES)
androgenizer \
* <refsect2>
* <title>Example launch line</title>
* |[
- * gst-launch filesrc location=/path/to/file.mp3 ! decodebin ! audioconvert ! "audio/x-raw-int,channels=2 ! deinterleave name=d d.src0 ! queue ! audioconvert ! vorbisenc ! oggmux ! filesink location=channel1.ogg d.src1 ! queue ! audioconvert ! vorbisenc ! oggmux ! filesink location=channel2.ogg
+ * gst-launch filesrc location=/path/to/file.mp3 ! decodebin ! audioconvert ! "audio/x-raw,channels=2 ! deinterleave name=d d.src_0 ! queue ! audioconvert ! vorbisenc ! oggmux ! filesink location=channel1.ogg d.src_1 ! queue ! audioconvert ! vorbisenc ! oggmux ! filesink location=channel2.ogg
* ]| Decodes an MP3 file and encodes the left and right channel into separate
* Ogg Vorbis files.
* |[
- * gst-launch filesrc location=file.mp3 ! decodebin ! audioconvert ! "audio/x-raw-int,channels=2" ! deinterleave name=d interleave name=i ! audioconvert ! wavenc ! filesink location=test.wav d.src0 ! queue ! audioconvert ! i.sink1 d.src1 ! queue ! audioconvert ! i.sink0
+ * gst-launch filesrc location=file.mp3 ! decodebin ! audioconvert ! "audio/x-raw,channels=2" ! deinterleave name=d interleave name=i ! audioconvert ! wavenc ! filesink location=test.wav d.src_0 ! queue ! audioconvert ! i.sink_1 d.src_1 ! queue ! audioconvert ! i.sink_0
* ]| Decodes and deinterleaves a Stereo MP3 file into separate channels and
* then interleaves the channels again to a WAV file with the channel with the
* channels exchanged.
GST_DEBUG_CATEGORY_STATIC (gst_deinterleave_debug);
#define GST_CAT_DEFAULT gst_deinterleave_debug
-static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src%d",
+static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src_%u",
GST_PAD_SRC,
GST_PAD_SOMETIMES,
- GST_STATIC_CAPS ("audio/x-raw-int, "
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "format = (string) " GST_AUDIO_FORMATS_ALL ", "
"rate = (int) [ 1, MAX ], "
- "channels = (int) 1, "
- "endianness = (int) { LITTLE_ENDIAN, BIG_ENDIAN }, "
- "width = (int) { 8, 16, 24, 32 }, "
- "depth = (int) [ 1, 32 ], "
- "signed = (boolean) { true, false }; "
- "audio/x-raw-float, "
- "rate = (int) [ 1, MAX ], "
- "channels = (int) 1, "
- "endianness = (int) { LITTLE_ENDIAN , BIG_ENDIAN }, "
- "width = (int) { 32, 64 }")
- );
+ "channels = (int) 1, layout = (string) {non-interleaved, interleaved}"));
static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("audio/x-raw-int, "
- "rate = (int) [ 1, MAX ], "
- "channels = (int) [ 1, MAX ], "
- "endianness = (int) { LITTLE_ENDIAN, BIG_ENDIAN }, "
- "width = (int) { 8, 16, 24, 32 }, "
- "depth = (int) [ 1, 32 ], "
- "signed = (boolean) { true, false }; "
- "audio/x-raw-float, "
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "format = (string) " GST_AUDIO_FORMATS_ALL ", "
"rate = (int) [ 1, MAX ], "
- "channels = (int) [ 1, MAX ], "
- "endianness = (int) { LITTLE_ENDIAN , BIG_ENDIAN }, "
- "width = (int) { 32, 64 }")
- );
+ "channels = (int) [ 1, MAX ], layout = (string) interleaved"));
#define MAKE_FUNC(type) \
static void deinterleave_##type (guint##type *out, guint##type *in, \
}
}
-GST_BOILERPLATE (GstDeinterleave, gst_deinterleave, GstElement,
- GST_TYPE_ELEMENT);
+#define gst_deinterleave_parent_class parent_class
+G_DEFINE_TYPE (GstDeinterleave, gst_deinterleave, GST_TYPE_ELEMENT);
enum
{
PROP_KEEP_POSITIONS
};
-static GstFlowReturn gst_deinterleave_chain (GstPad * pad, GstBuffer * buffer);
+static GstFlowReturn gst_deinterleave_chain (GstPad * pad, GstObject * parent,
+ GstBuffer * buffer);
-static gboolean gst_deinterleave_sink_setcaps (GstPad * pad, GstCaps * caps);
+static gboolean gst_deinterleave_sink_setcaps (GstDeinterleave * self,
+ GstCaps * caps);
-static GstCaps *gst_deinterleave_sink_getcaps (GstPad * pad);
+static GstStateChangeReturn
+gst_deinterleave_change_state (GstElement * element, GstStateChange transition);
-static gboolean gst_deinterleave_sink_activate_push (GstPad * pad,
- gboolean active);
-static gboolean gst_deinterleave_sink_event (GstPad * pad, GstEvent * event);
+static gboolean gst_deinterleave_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
-static gboolean gst_deinterleave_src_query (GstPad * pad, GstQuery * query);
+static gboolean gst_deinterleave_src_query (GstPad * pad, GstObject * parent,
+ GstQuery * query);
static void gst_deinterleave_set_property (GObject * object,
guint prop_id, const GValue * value, GParamSpec * pspec);
{
GstDeinterleave *self = GST_DEINTERLEAVE (obj);
- if (self->pos) {
- g_free (self->pos);
- self->pos = NULL;
- }
-
if (self->pending_events) {
g_list_foreach (self->pending_events, (GFunc) gst_mini_object_unref, NULL);
g_list_free (self->pending_events);
}
static void
-gst_deinterleave_base_init (gpointer g_class)
+gst_deinterleave_class_init (GstDeinterleaveClass * klass)
{
- GstElementClass *gstelement_class = (GstElementClass *) g_class;
+ GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
+ GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
+
+ GST_DEBUG_CATEGORY_INIT (gst_deinterleave_debug, "deinterleave", 0,
+ "deinterleave element");
gst_element_class_set_details_simple (gstelement_class, "Audio deinterleaver",
"Filter/Converter/Audio",
"Iain <iain@prettypeople.org>, "
"Sebastian Dröge <slomo@circular-chaos.org>");
- gst_element_class_add_static_pad_template (gstelement_class,
- &sink_template);
- gst_element_class_add_static_pad_template (gstelement_class,
- &src_template);
-}
-
-static void
-gst_deinterleave_class_init (GstDeinterleaveClass * klass)
-{
- GObjectClass *gobject_class = (GObjectClass *) klass;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&src_template));
- GST_DEBUG_CATEGORY_INIT (gst_deinterleave_debug, "deinterleave", 0,
- "deinterleave element");
+ gstelement_class->change_state = gst_deinterleave_change_state;
gobject_class->finalize = gst_deinterleave_finalize;
gobject_class->set_property = gst_deinterleave_set_property;
}
static void
-gst_deinterleave_init (GstDeinterleave * self, GstDeinterleaveClass * klass)
+gst_deinterleave_init (GstDeinterleave * self)
{
- self->channels = 0;
- self->pos = NULL;
self->keep_positions = FALSE;
- self->width = 0;
self->func = NULL;
+ gst_audio_info_init (&self->audio_info);
/* Add sink pad */
self->sink = gst_pad_new_from_static_template (&sink_template, "sink");
gst_pad_set_chain_function (self->sink,
GST_DEBUG_FUNCPTR (gst_deinterleave_chain));
- gst_pad_set_setcaps_function (self->sink,
- GST_DEBUG_FUNCPTR (gst_deinterleave_sink_setcaps));
- gst_pad_set_getcaps_function (self->sink,
- GST_DEBUG_FUNCPTR (gst_deinterleave_sink_getcaps));
- gst_pad_set_activatepush_function (self->sink,
- GST_DEBUG_FUNCPTR (gst_deinterleave_sink_activate_push));
gst_pad_set_event_function (self->sink,
GST_DEBUG_FUNCPTR (gst_deinterleave_sink_event));
gst_element_add_pad (GST_ELEMENT (self), self->sink);
guint i;
- for (i = 0; i < self->channels; i++) {
- gchar *name = g_strdup_printf ("src%d", i);
+ for (i = 0; i < GST_AUDIO_INFO_CHANNELS (&self->audio_info); i++) {
+ gchar *name = g_strdup_printf ("src_%u", i);
GstCaps *srccaps;
+ GstAudioInfo info;
+ GstAudioFormat format = GST_AUDIO_INFO_FORMAT (&self->audio_info);
+ gint rate = GST_AUDIO_INFO_RATE (&self->audio_info);
+ GstAudioChannelPosition position = 0;
- GstStructure *s;
+ /* Set channel position if we know it */
+ if (self->keep_positions)
+ position = GST_AUDIO_INFO_POSITION (&self->audio_info, i);
- pad = gst_pad_new_from_static_template (&src_template, name);
- g_free (name);
+ gst_audio_info_init (&info);
+ gst_audio_info_set_format (&info, format, rate, 1, &position);
- /* Set channel position if we know it */
- if (self->keep_positions) {
- GstAudioChannelPosition pos[1] = { GST_AUDIO_CHANNEL_POSITION_NONE };
+ srccaps = gst_audio_info_to_caps (&info);
- srccaps = gst_caps_copy (caps);
- s = gst_caps_get_structure (srccaps, 0);
- if (self->pos)
- gst_audio_set_channel_positions (s, &self->pos[i]);
- else
- gst_audio_set_channel_positions (s, pos);
- } else {
- srccaps = caps;
- }
+ pad = gst_pad_new_from_static_template (&src_template, name);
+ g_free (name);
- gst_pad_set_caps (pad, srccaps);
gst_pad_use_fixed_caps (pad);
gst_pad_set_query_function (pad,
GST_DEBUG_FUNCPTR (gst_deinterleave_src_query));
gst_pad_set_active (pad, TRUE);
+ gst_pad_set_caps (pad, srccaps);
gst_element_add_pad (GST_ELEMENT (self), pad);
self->srcpads = g_list_prepend (self->srcpads, gst_object_ref (pad));
- if (self->keep_positions)
- gst_caps_unref (srccaps);
+ gst_caps_unref (srccaps);
}
gst_element_no_more_pads (GST_ELEMENT (self));
gst_deinterleave_set_pads_caps (GstDeinterleave * self, GstCaps * caps)
{
GList *l;
-
- GstStructure *s;
-
gint i;
for (l = self->srcpads, i = 0; l; l = l->next, i++) {
GstPad *pad = GST_PAD (l->data);
GstCaps *srccaps;
+ GstAudioInfo info;
+ gst_audio_info_from_caps (&info, caps);
+ if (self->keep_positions)
+ GST_AUDIO_INFO_POSITION (&info, i) =
+ GST_AUDIO_INFO_POSITION (&self->audio_info, i);
- /* Set channel position if we know it */
- if (self->keep_positions) {
- GstAudioChannelPosition pos[1] = { GST_AUDIO_CHANNEL_POSITION_NONE };
-
- srccaps = gst_caps_copy (caps);
- s = gst_caps_get_structure (srccaps, 0);
- if (self->pos)
- gst_audio_set_channel_positions (s, &self->pos[i]);
- else
- gst_audio_set_channel_positions (s, pos);
- } else {
- srccaps = caps;
- }
+ srccaps = gst_audio_info_to_caps (&info);
gst_pad_set_caps (pad, srccaps);
-
- if (self->keep_positions)
- gst_caps_unref (srccaps);
+ gst_caps_unref (srccaps);
}
}
g_list_free (self->srcpads);
self->srcpads = NULL;
- gst_pad_set_caps (self->sink, NULL);
gst_caps_replace (&self->sinkcaps, NULL);
}
static gboolean
-gst_deinterleave_set_process_function (GstDeinterleave * self, GstCaps * caps)
+gst_deinterleave_set_process_function (GstDeinterleave * self)
{
- GstStructure *s;
-
- s = gst_caps_get_structure (caps, 0);
- if (!gst_structure_get_int (s, "width", &self->width))
- return FALSE;
-
- switch (self->width) {
+ switch (GST_AUDIO_INFO_WIDTH (&self->audio_info)) {
case 8:
self->func = (GstDeinterleaveFunc) deinterleave_8;
break;
}
static gboolean
-gst_deinterleave_sink_setcaps (GstPad * pad, GstCaps * caps)
+gst_deinterleave_sink_setcaps (GstDeinterleave * self, GstCaps * caps)
{
- GstDeinterleave *self;
-
GstCaps *srccaps;
-
GstStructure *s;
- self = GST_DEINTERLEAVE (gst_pad_get_parent (pad));
-
GST_DEBUG_OBJECT (self, "got caps: %" GST_PTR_FORMAT, caps);
- if (self->sinkcaps && !gst_caps_is_equal (caps, self->sinkcaps)) {
- gint new_channels, i;
+ if (!gst_audio_info_from_caps (&self->audio_info, caps))
+ goto invalid_caps;
- GstAudioChannelPosition *pos;
+ if (!gst_deinterleave_set_process_function (self))
+ goto unsupported_caps;
+ if (self->sinkcaps && !gst_caps_is_equal (caps, self->sinkcaps)) {
+ gint i;
gboolean same_layout = TRUE;
-
- s = gst_caps_get_structure (caps, 0);
+ gboolean was_unpositioned;
+ gboolean is_unpositioned =
+ GST_AUDIO_INFO_IS_UNPOSITIONED (&self->audio_info);
+ gint new_channels = GST_AUDIO_INFO_CHANNELS (&self->audio_info);
+ gint old_channels;
+ GstAudioInfo old_info;
+
+ gst_audio_info_init (&old_info);
+ gst_audio_info_from_caps (&old_info, self->sinkcaps);
+ was_unpositioned = GST_AUDIO_INFO_IS_UNPOSITIONED (&old_info);
+ old_channels = GST_AUDIO_INFO_CHANNELS (&old_info);
/* We allow caps changes as long as the number of channels doesn't change
* and the channel positions stay the same. _getcaps() should've cared
* for this already but better be safe.
*/
- if (!gst_structure_get_int (s, "channels", &new_channels) ||
- new_channels != self->channels ||
- !gst_deinterleave_set_process_function (self, caps))
+ if (new_channels != old_channels ||
+ !gst_deinterleave_set_process_function (self))
goto cannot_change_caps;
/* Now check the channel positions. If we had no channel positions
* If we had channel positions and get different ones things have
* changed too of course
*/
- pos = gst_audio_get_channel_positions (s);
- if ((pos && !self->pos) || (!pos && self->pos))
+ if ((!was_unpositioned && is_unpositioned) || (was_unpositioned
+ && !is_unpositioned))
goto cannot_change_caps;
- if (pos) {
- for (i = 0; i < self->channels; i++) {
- if (self->pos[i] != pos[i]) {
+ if (!is_unpositioned) {
+ if (GST_AUDIO_INFO_CHANNELS (&old_info) !=
+ GST_AUDIO_INFO_CHANNELS (&self->audio_info))
+ goto cannot_change_caps;
+ for (i = 0; i < GST_AUDIO_INFO_CHANNELS (&old_info); i++) {
+ if (self->audio_info.position[i] != old_info.position[i]) {
same_layout = FALSE;
break;
}
}
- g_free (pos);
if (!same_layout)
goto cannot_change_caps;
}
- } else {
- s = gst_caps_get_structure (caps, 0);
-
- if (!gst_structure_get_int (s, "channels", &self->channels))
- goto no_channels;
-
- if (!gst_deinterleave_set_process_function (self, caps))
- goto unsupported_caps;
-
- self->pos = gst_audio_get_channel_positions (s);
}
gst_caps_replace (&self->sinkcaps, caps);
srccaps = gst_caps_copy (caps);
s = gst_caps_get_structure (srccaps, 0);
gst_structure_set (s, "channels", G_TYPE_INT, 1, NULL);
- gst_structure_remove_field (s, "channel-positions");
+ gst_structure_remove_field (s, "channel-mask");
/* If we already have pads, update the caps otherwise
* add new pads */
}
gst_caps_unref (srccaps);
- gst_object_unref (self);
return TRUE;
cannot_change_caps:
{
GST_ERROR_OBJECT (self, "can't set new caps: %" GST_PTR_FORMAT, caps);
- gst_object_unref (self);
return FALSE;
}
unsupported_caps:
{
GST_ERROR_OBJECT (self, "caps not supported: %" GST_PTR_FORMAT, caps);
- gst_object_unref (self);
return FALSE;
}
-no_channels:
+invalid_caps:
{
GST_ERROR_OBJECT (self, "invalid caps");
- gst_object_unref (self);
return FALSE;
}
}
size = gst_caps_get_size (caps);
for (i = 0; i < size; i++) {
s = gst_caps_get_structure (caps, i);
- gst_structure_remove_field (s, "channel-positions");
+ gst_structure_remove_field (s, "channel-mask");
gst_structure_remove_field (s, "channels");
}
}
}
static GstCaps *
-gst_deinterleave_sink_getcaps (GstPad * pad)
+gst_deinterleave_sink_getcaps (GstPad * pad, GstObject * parent,
+ GstCaps * filter)
{
- GstDeinterleave *self = GST_DEINTERLEAVE (gst_pad_get_parent (pad));
+ GstDeinterleave *self = GST_DEINTERLEAVE (parent);
GstCaps *ret;
if (pad == ourpad) {
if (GST_PAD_DIRECTION (pad) == GST_PAD_SINK)
- __set_channels (ourcaps, self->channels);
+ __set_channels (ourcaps, GST_AUDIO_INFO_CHANNELS (&self->audio_info));
else
__set_channels (ourcaps, 1);
} else {
* as otherwise gst_pad_peer_get_caps() might call
* back into this function and deadlock
*/
- peercaps = gst_pad_peer_get_caps (ourpad);
+ peercaps = gst_pad_peer_query_caps (ourpad, NULL);
+ peercaps = gst_caps_make_writable (peercaps);
}
/* If the peer exists and has caps add them to the intersection,
}
GST_OBJECT_UNLOCK (self);
- gst_object_unref (self);
-
GST_DEBUG_OBJECT (pad, "Intersected caps to %" GST_PTR_FORMAT, ret);
return ret;
}
static gboolean
-gst_deinterleave_sink_event (GstPad * pad, GstEvent * event)
+gst_deinterleave_sink_event (GstPad * pad, GstObject * parent, GstEvent * event)
{
- GstDeinterleave *self = GST_DEINTERLEAVE (gst_pad_get_parent (pad));
+ GstDeinterleave *self = GST_DEINTERLEAVE (parent);
gboolean ret;
case GST_EVENT_FLUSH_STOP:
case GST_EVENT_FLUSH_START:
case GST_EVENT_EOS:
- ret = gst_pad_event_default (pad, event);
+ ret = gst_pad_event_default (pad, parent, event);
+ break;
+ case GST_EVENT_CAPS:
+ {
+ GstCaps *caps;
+
+ gst_event_parse_caps (event, &caps);
+ ret = gst_deinterleave_sink_setcaps (self, caps);
+ gst_event_unref (event);
break;
+ }
+
default:
if (self->srcpads) {
- ret = gst_pad_event_default (pad, event);
+ ret = gst_pad_event_default (pad, parent, event);
} else {
GST_OBJECT_LOCK (self);
self->pending_events = g_list_append (self->pending_events, event);
break;
}
- gst_object_unref (self);
-
return ret;
}
static gboolean
-gst_deinterleave_src_query (GstPad * pad, GstQuery * query)
+gst_deinterleave_src_query (GstPad * pad, GstObject * parent, GstQuery * query)
{
- GstDeinterleave *self = GST_DEINTERLEAVE (gst_pad_get_parent (pad));
+ GstDeinterleave *self = GST_DEINTERLEAVE (parent);
gboolean res;
- res = gst_pad_query_default (pad, query);
+ res = gst_pad_query_default (pad, parent, query);
if (res && GST_QUERY_TYPE (query) == GST_QUERY_DURATION) {
GstFormat format;
* to get the correct value. All other formats should be fine
*/
if (format == GST_FORMAT_BYTES && dur != -1)
- gst_query_set_duration (query, format, dur / self->channels);
+ gst_query_set_duration (query, format,
+ dur / GST_AUDIO_INFO_CHANNELS (&self->audio_info));
} else if (res && GST_QUERY_TYPE (query) == GST_QUERY_POSITION) {
GstFormat format;
* to get the correct value. All other formats should be fine
*/
if (format == GST_FORMAT_BYTES && pos != -1)
- gst_query_set_position (query, format, pos / self->channels);
+ gst_query_set_position (query, format,
+ pos / GST_AUDIO_INFO_CHANNELS (&self->audio_info));
+ } else if (res && GST_QUERY_TYPE (query) == GST_QUERY_CAPS) {
+ GstCaps *filter, *caps;
+
+ gst_query_parse_caps (query, &filter);
+ caps = gst_deinterleave_sink_getcaps (pad, parent, filter);
+ gst_query_set_caps_result (query, caps);
+ gst_caps_unref (caps);
}
- gst_object_unref (self);
return res;
}
{
GstFlowReturn ret = GST_FLOW_OK;
- guint channels = self->channels;
+ guint channels = GST_AUDIO_INFO_CHANNELS (&self->audio_info);
guint pads_pushed = 0, buffers_allocated = 0;
- guint nframes = GST_BUFFER_SIZE (buf) / channels / (self->width / 8);
+ guint nframes =
+ gst_buffer_get_size (buf) / channels /
+ (GST_AUDIO_INFO_WIDTH (&self->audio_info) / 8);
- guint bufsize = nframes * (self->width / 8);
+ guint bufsize = nframes * (GST_AUDIO_INFO_WIDTH (&self->audio_info) / 8);
guint i;
guint8 *in, *out;
+ GstMapInfo read_info;
+ gst_buffer_map (buf, &read_info, GST_MAP_READ);
+
/* Send any pending events to all src pads */
GST_OBJECT_LOCK (self);
if (self->pending_events) {
/* Allocate buffers */
for (srcs = self->srcpads, i = 0; srcs; srcs = srcs->next, i++) {
- GstPad *pad = (GstPad *) srcs->data;
-
- buffers_out[i] = NULL;
- ret =
- gst_pad_alloc_buffer (pad, GST_BUFFER_OFFSET_NONE, bufsize,
- GST_PAD_CAPS (pad), &buffers_out[i]);
+ buffers_out[i] = gst_buffer_new_allocate (NULL, bufsize, 0);
/* Make sure we got a correct buffer. The only other case we allow
* here is an unliked pad */
- if (ret != GST_FLOW_OK && ret != GST_FLOW_NOT_LINKED)
+ if (!buffers_out[i])
goto alloc_buffer_failed;
- else if (buffers_out[i] && GST_BUFFER_SIZE (buffers_out[i]) != bufsize)
+ else if (buffers_out[i] && gst_buffer_get_size (buffers_out[i]) != bufsize)
goto alloc_buffer_bad_size;
- else if (buffers_out[i] &&
- !gst_caps_is_equal (GST_BUFFER_CAPS (buffers_out[i]),
- GST_PAD_CAPS (pad)))
- goto invalid_caps;
if (buffers_out[i]) {
- gst_buffer_copy_metadata (buffers_out[i], buf,
- GST_BUFFER_COPY_TIMESTAMPS | GST_BUFFER_COPY_FLAGS);
+ gst_buffer_copy_into (buffers_out[i], buf, GST_BUFFER_COPY_METADATA, 0,
+ -1);
buffers_allocated++;
}
}
/* deinterleave */
for (srcs = self->srcpads, i = 0; srcs; srcs = srcs->next, i++) {
GstPad *pad = (GstPad *) srcs->data;
+ GstMapInfo write_info;
+
- in = (guint8 *) GST_BUFFER_DATA (buf);
- in += i * (self->width / 8);
+ in = (guint8 *) read_info.data;
+ in += i * (GST_AUDIO_INFO_WIDTH (&self->audio_info) / 8);
if (buffers_out[i]) {
- out = (guint8 *) GST_BUFFER_DATA (buffers_out[i]);
+ gst_buffer_map (buffers_out[i], &write_info, GST_MAP_WRITE);
+
+ out = (guint8 *) write_info.data;
self->func (out, in, channels, nframes);
+ gst_buffer_unmap (buffers_out[i], &write_info);
+
ret = gst_pad_push (pad, buffers_out[i]);
buffers_out[i] = NULL;
if (ret == GST_FLOW_OK)
ret = GST_FLOW_NOT_LINKED;
done:
+ gst_buffer_unmap (buf, &read_info);
gst_buffer_unref (buf);
g_free (buffers_out);
return ret;
ret = GST_FLOW_NOT_NEGOTIATED;
goto clean_buffers;
}
-invalid_caps:
- {
- GST_WARNING ("called alloc_buffer(), but didn't get requested caps");
- ret = GST_FLOW_NOT_NEGOTIATED;
- goto clean_buffers;
- }
push_failed:
{
GST_DEBUG ("push() failed, flow = %s", gst_flow_get_name (ret));
}
clean_buffers:
{
+ gst_buffer_unmap (buf, &read_info);
for (i = 0; i < channels; i++) {
if (buffers_out[i])
gst_buffer_unref (buffers_out[i]);
}
static GstFlowReturn
-gst_deinterleave_chain (GstPad * pad, GstBuffer * buffer)
+gst_deinterleave_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer)
{
- GstDeinterleave *self = GST_DEINTERLEAVE (GST_PAD_PARENT (pad));
+ GstDeinterleave *self = GST_DEINTERLEAVE (parent);
GstFlowReturn ret;
g_return_val_if_fail (self->func != NULL, GST_FLOW_NOT_NEGOTIATED);
- g_return_val_if_fail (self->width > 0, GST_FLOW_NOT_NEGOTIATED);
- g_return_val_if_fail (self->channels > 0, GST_FLOW_NOT_NEGOTIATED);
+ g_return_val_if_fail (GST_AUDIO_INFO_WIDTH (&self->audio_info) > 0,
+ GST_FLOW_NOT_NEGOTIATED);
+ g_return_val_if_fail (GST_AUDIO_INFO_CHANNELS (&self->audio_info) > 0,
+ GST_FLOW_NOT_NEGOTIATED);
ret = gst_deinterleave_process (self, buffer);
return ret;
}
-static gboolean
-gst_deinterleave_sink_activate_push (GstPad * pad, gboolean active)
+static GstStateChangeReturn
+gst_deinterleave_change_state (GstElement * element, GstStateChange transition)
{
- GstDeinterleave *self = GST_DEINTERLEAVE (gst_pad_get_parent (pad));
-
- /* Reset everything when the pad is deactivated */
- if (!active) {
- gst_deinterleave_remove_pads (self);
- if (self->pos) {
- g_free (self->pos);
- self->pos = NULL;
- }
- self->channels = 0;
- self->width = 0;
- self->func = NULL;
-
- if (self->pending_events) {
- g_list_foreach (self->pending_events, (GFunc) gst_mini_object_unref,
- NULL);
- g_list_free (self->pending_events);
- self->pending_events = NULL;
- }
+ GstStateChangeReturn ret;
+ GstDeinterleave *self = GST_DEINTERLEAVE (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_NULL_TO_READY:
+ break;
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ gst_deinterleave_remove_pads (self);
+
+ self->func = NULL;
+
+ if (self->pending_events) {
+ g_list_foreach (self->pending_events, (GFunc) gst_mini_object_unref,
+ NULL);
+ g_list_free (self->pending_events);
+ self->pending_events = NULL;
+ }
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
+ break;
+ default:
+ break;
}
- gst_object_unref (self);
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
- return TRUE;
+ switch (transition) {
+ case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ gst_deinterleave_remove_pads (self);
+
+ self->func = NULL;
+
+ if (self->pending_events) {
+ g_list_foreach (self->pending_events, (GFunc) gst_mini_object_unref,
+ NULL);
+ g_list_free (self->pending_events);
+ self->pending_events = NULL;
+ }
+ break;
+ case GST_STATE_CHANGE_READY_TO_NULL:
+ break;
+ default:
+ break;
+ }
+ return ret;
}
G_BEGIN_DECLS
#include <gst/gst.h>
-#include <gst/audio/multichannel.h>
+#include <gst/audio/audio.h>
#define GST_TYPE_DEINTERLEAVE (gst_deinterleave_get_type())
#define GST_DEINTERLEAVE(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_DEINTERLEAVE,GstDeinterleave))
/*< private > */
GList *srcpads;
GstCaps *sinkcaps;
- gint channels;
- GstAudioChannelPosition *pos;
+ GstAudioInfo audio_info;
gboolean keep_positions;
GstPad *sink;
- gint width;
GstDeinterleaveFunc func;
GList *pending_events;
GST_DEBUG_CATEGORY_STATIC (gst_interleave_debug);
#define GST_CAT_DEFAULT gst_interleave_debug
-static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink%d",
+static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink_%u",
GST_PAD_SINK,
GST_PAD_REQUEST,
GST_STATIC_CAPS ("audio/x-raw-int, "
"Andy Wingo <wingo at pobox.com>, "
"Sebastian Dröge <slomo@circular-chaos.org>");
- gst_element_class_add_static_pad_template (g_class, &sink_template);
- gst_element_class_add_static_pad_template (g_class, &src_template);
+ gst_element_class_add_pad_template (g_class,
+ gst_static_pad_template_get (&sink_template));
+ gst_element_class_add_pad_template (g_class,
+ gst_static_pad_template_get (&src_template));
}
static void
if (templ->direction != GST_PAD_SINK)
goto not_sink_pad;
-#if GLIB_CHECK_VERSION(2,29,5)
channels = g_atomic_int_add (&self->channels, 1);
padnumber = g_atomic_int_add (&self->padcounter, 1);
-#else
- channels = g_atomic_int_exchange_and_add (&self->channels, 1);
- padnumber = g_atomic_int_exchange_and_add (&self->padcounter, 1);
-#endif
- pad_name = g_strdup_printf ("sink%d", padnumber);
+ pad_name = g_strdup_printf ("sink_%u", padnumber);
new_pad = GST_PAD_CAST (g_object_new (GST_TYPE_INTERLEAVE_PAD,
"name", pad_name, "direction", templ->direction,
"template", templ, NULL));
GST_DEBUG_OBJECT (self, "no data available, must be EOS");
gst_buffer_unref (outbuf);
gst_pad_push_event (self->src, gst_event_new_eos ());
- return GST_FLOW_UNEXPECTED;
+ return GST_FLOW_EOS;
}
}
static gboolean
plugin_init (GstPlugin * plugin)
{
+#if 0
if (!gst_element_register (plugin, "interleave",
GST_RANK_NONE, gst_interleave_get_type ()) ||
- !gst_element_register (plugin, "deinterleave",
- GST_RANK_NONE, gst_deinterleave_get_type ()))
- return FALSE;
-
- return TRUE;
-}
+#endif
+ if (!gst_element_register (plugin, "deinterleave",
+ GST_RANK_NONE, gst_deinterleave_get_type ()))
+ return FALSE; return TRUE;}
-GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
- GST_VERSION_MINOR,
- "interleave",
- "Audio interleaver/deinterleaver",
- plugin_init, VERSION, "LGPL", GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN);
+ GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
+ GST_VERSION_MINOR,
+ "interleave",
+ "Audio interleaver/deinterleaver",
+ plugin_init, VERSION, "LGPL", GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN);
#include <gst/gst.h>
+#if 0
#include "interleave.h"
+#endif
#include "deinterleave.h"
#endif /* __GST_PLUGIN_INTERLEAVE_H__ */
$(GST_PLUGINS_BASE_LIBS) \
-lgstriff-@GST_MAJORMINOR@ \
-lgstaudio-@GST_MAJORMINOR@ \
+ -lgstvideo-@GST_MAJORMINOR@ \
-lgstrtp-@GST_MAJORMINOR@ \
-lgsttag-@GST_MAJORMINOR@ \
-lgstpbutils-@GST_MAJORMINOR@ \
atom_data_new_from_gst_buffer (guint32 fourcc, const GstBuffer * buf)
{
AtomData *data = atom_data_new (fourcc);
+ gsize size = gst_buffer_get_size ((GstBuffer *) buf);
- atom_data_alloc_mem (data, GST_BUFFER_SIZE (buf));
- g_memmove (data->data, GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (buf));
+ atom_data_alloc_mem (data, size);
+ gst_buffer_extract ((GstBuffer *) buf, 0, data->data, size);
return data;
}
}
}
+static GstBuffer *
+_gst_buffer_new_wrapped (gpointer mem, gsize size, GFreeFunc free_func)
+{
+ GstBuffer *buf;
+
+ buf = gst_buffer_new ();
+ gst_buffer_take_memory (buf, -1,
+ gst_memory_new_wrapped (free_func ? 0 : GST_MEMORY_FLAG_READONLY,
+ mem, free_func, size, 0, size));
+
+ return buf;
+}
+
void
atom_moov_add_blob_tag (AtomMOOV * moov, guint8 * data, guint size)
{
if (len > size)
return;
- buf = gst_buffer_new ();
- GST_BUFFER_SIZE (buf) = len - 8;
- GST_BUFFER_DATA (buf) = data + 8;
-
+ buf = _gst_buffer_new_wrapped (data + 8, len - 8, NULL);
data_atom = atom_data_new_from_gst_buffer (fourcc, buf);
gst_buffer_unref (buf);
guint8 *bdata;
/* need full atom */
- buf = gst_buffer_new_and_alloc (size + 4);
- bdata = GST_BUFFER_DATA (buf);
+ bdata = g_malloc (size + 4);
/* full atom: version and flags */
GST_WRITE_UINT32_BE (bdata, 0);
memcpy (bdata + 4, data, size);
+ buf = _gst_buffer_new_wrapped (bdata, size + 4, g_free);
data_atom = atom_data_new_from_gst_buffer (fourcc, buf);
gst_buffer_unref (buf);
GstBuffer *buf;
guint8 *data;
- buf = gst_buffer_new_and_alloc (8);
- data = GST_BUFFER_DATA (buf);
-
+ data = g_malloc (8);
/* ihdr = image header box */
GST_WRITE_UINT32_BE (data, par_width);
GST_WRITE_UINT32_BE (data + 4, par_height);
+ buf = _gst_buffer_new_wrapped (data, 8, g_free);
atom_data = atom_data_new_from_gst_buffer (FOURCC_pasp, buf);
gst_buffer_unref (buf);
/* optional DecoderSpecificInfo */
if (codec_data) {
DecoderSpecificInfoDescriptor *desc;
+ gsize size;
esds->es.dec_conf_desc.dec_specific_info = desc =
desc_dec_specific_info_new ();
- desc_dec_specific_info_alloc_data (desc, GST_BUFFER_SIZE (codec_data));
-
- memcpy (desc->data, GST_BUFFER_DATA (codec_data),
- GST_BUFFER_SIZE (codec_data));
+ size = gst_buffer_get_size ((GstBuffer *) codec_data);
+ desc_dec_specific_info_alloc_data (desc, size);
+ gst_buffer_extract ((GstBuffer *) codec_data, 0, desc->data, size);
}
return build_atom_info_wrapper ((Atom *) esds, atom_esds_copy_data,
{
AtomData *atom_data;
GstBuffer *buf;
+ guint8 *data;
- buf = gst_buffer_new_and_alloc (12);
-
- GST_WRITE_UINT32_BE (GST_BUFFER_DATA (buf), buffer_size_db);
- GST_WRITE_UINT32_BE (GST_BUFFER_DATA (buf) + 4, max_bitrate);
- GST_WRITE_UINT32_BE (GST_BUFFER_DATA (buf) + 8, avg_bitrate);
+ data = g_malloc (12);
+ GST_WRITE_UINT32_BE (data, buffer_size_db);
+ GST_WRITE_UINT32_BE (data + 4, max_bitrate);
+ GST_WRITE_UINT32_BE (data + 8, avg_bitrate);
+ buf = _gst_buffer_new_wrapped (data, 12, g_free);
atom_data = atom_data_new_from_gst_buffer (FOURCC_btrt, buf);
gst_buffer_unref (buf);
{
AtomInfo *esds, *mp4a;
GstBuffer *buf;
+ guint32 tmp = 0;
/* Add ESDS atom to WAVE */
esds = build_esds_extension (trak, ESDS_OBJECT_TYPE_MPEG4_P3,
/* Add MP4A atom to the WAVE:
* not really in spec, but makes offset based players happy */
- buf = gst_buffer_new_and_alloc (4);
- *((guint32 *) GST_BUFFER_DATA (buf)) = 0;
+ buf = _gst_buffer_new_wrapped (&tmp, 4, NULL);
mp4a = build_codec_data_extension (FOURCC_mp4a, buf);
gst_buffer_unref (buf);
{
AtomData *atom_data;
GstBuffer *buf;
+ guint8 f = fields;
if (fields == 1) {
return NULL;
}
- buf = gst_buffer_new_and_alloc (1);
- GST_BUFFER_DATA (buf)[0] = (guint8) fields;
-
+ buf = _gst_buffer_new_wrapped (&f, 1, NULL);
atom_data =
atom_data_new_from_gst_buffer (GST_MAKE_FOURCC ('f', 'i', 'e', 'l'), buf);
gst_buffer_unref (buf);
}
AtomInfo *
-build_jp2h_extension (AtomTRAK * trak, gint width, gint height, guint32 fourcc,
- gint ncomp, const GValue * cmap_array, const GValue * cdef_array)
+build_jp2h_extension (AtomTRAK * trak, gint width, gint height,
+ const gchar * colorspace, gint ncomp, const GValue * cmap_array,
+ const GValue * cdef_array)
{
AtomData *atom_data;
GstBuffer *buf;
g_return_val_if_fail (cdef_array == NULL ||
GST_VALUE_HOLDS_ARRAY (cdef_array), NULL);
- if (fourcc == GST_MAKE_FOURCC ('s', 'R', 'G', 'B')) {
+ if (g_str_equal (colorspace, "sRGB")) {
cenum = 0x10;
if (ncomp == 0)
ncomp = 3;
- } else if (fourcc == GST_MAKE_FOURCC ('G', 'R', 'A', 'Y')) {
+ } else if (g_str_equal (colorspace, "GRAY")) {
cenum = 0x11;
if (ncomp == 0)
ncomp = 1;
- } else if (fourcc == GST_MAKE_FOURCC ('s', 'Y', 'U', 'V')) {
+ } else if (g_str_equal (colorspace, "sYUV")) {
cenum = 0x12;
if (ncomp == 0)
ncomp = 3;
cdef_size = 8 + 2 + cdef_array_size * 6;
}
- buf = gst_buffer_new_and_alloc (idhr_size + colr_size + cmap_size +
- cdef_size);
- gst_byte_writer_init_with_buffer (&writer, buf, FALSE);
+ gst_byte_writer_init_with_size (&writer,
+ idhr_size + colr_size + cmap_size + cdef_size, TRUE);
/* ihdr = image header box */
gst_byte_writer_put_uint32_be (&writer, 22);
}
g_assert (gst_byte_writer_get_remaining (&writer) == 0);
+ buf = gst_byte_writer_reset_and_get_buffer (&writer);
atom_data = atom_data_new_from_gst_buffer (FOURCC_jp2h, buf);
gst_buffer_unref (buf);
GstBuffer *buf;
AtomInfo *res;
- buf = gst_buffer_new ();
- GST_BUFFER_DATA (buf) = ext;
- GST_BUFFER_SIZE (buf) = sizeof (ext);
-
/* vendor */
GST_WRITE_UINT32_LE (ext, 0);
/* decoder version */
/* frames per sample */
GST_WRITE_UINT8 (ext + 8, 1);
+ buf = _gst_buffer_new_wrapped (ext, sizeof (ext), NULL);
res = build_codec_data_extension (GST_MAKE_FOURCC ('d', 'a', 'm', 'r'), buf);
gst_buffer_unref (buf);
return res;
GstBuffer *buf;
AtomInfo *res;
- buf = gst_buffer_new ();
- GST_BUFFER_DATA (buf) = ext;
- GST_BUFFER_SIZE (buf) = sizeof (ext);
-
/* vendor */
GST_WRITE_UINT32_LE (ext, 0);
/* decoder version */
GST_WRITE_UINT8 (ext + 5, 10);
GST_WRITE_UINT8 (ext + 6, 0);
+ buf = _gst_buffer_new_wrapped (ext, sizeof (ext), NULL);
res = build_codec_data_extension (GST_MAKE_FOURCC ('d', '2', '6', '3'), buf);
gst_buffer_unref (buf);
return res;
/* convert to uint32 from fixed point */
gamma_fp = (guint32) 65536 *gamma;
- buf = gst_buffer_new_and_alloc (4);
- GST_WRITE_UINT32_BE (GST_BUFFER_DATA (buf), gamma_fp);
+ gamma_fp = GUINT32_TO_BE (gamma_fp);
+ buf = _gst_buffer_new_wrapped (&gamma_fp, 4, NULL);
res = build_codec_data_extension (FOURCC_gama, buf);
gst_buffer_unref (buf);
return res;
{
AtomInfo *res;
GstBuffer *buf;
+ gsize size;
+ guint8 *data;
/* the seqh plus its size and fourcc */
- buf = gst_buffer_new_and_alloc (GST_BUFFER_SIZE (seqh) + 8);
+ size = gst_buffer_get_size ((GstBuffer *) seqh);
+ data = g_malloc (size + 8);
- GST_WRITE_UINT32_LE (GST_BUFFER_DATA (buf), FOURCC_SEQH);
- GST_WRITE_UINT32_BE (GST_BUFFER_DATA (buf) + 4, GST_BUFFER_SIZE (seqh));
- memcpy (GST_BUFFER_DATA (buf) + 8, GST_BUFFER_DATA (seqh),
- GST_BUFFER_SIZE (seqh));
+ GST_WRITE_UINT32_LE (data, FOURCC_SEQH);
+ GST_WRITE_UINT32_BE (data + 4, size + 8);
+ gst_buffer_extract ((GstBuffer *) seqh, 0, data + 8, size);
+ buf = _gst_buffer_new_wrapped (data, size + 8, g_free);
res = build_codec_data_extension (FOURCC_SMI_, buf);
gst_buffer_unref (buf);
return res;
within the WAVE header (below), it's little endian. */
fourcc = MS_WAVE_FOURCC (0x11);
- buf = gst_buffer_new_and_alloc (ima_adpcm_atom_size);
- data = GST_BUFFER_DATA (buf);
+ data = g_malloc (ima_adpcm_atom_size);
/* This atom's content is a WAVE header, including 2 bytes of extra data.
Note that all of this is little-endian, unlike most stuff in qt. */
GST_WRITE_UINT16_LE (data + 16, 2); /* Two extra bytes */
GST_WRITE_UINT16_LE (data + 18, samplesperblock);
+ buf = _gst_buffer_new_wrapped (data, ima_adpcm_atom_size, g_free);
atom_data = atom_data_new_from_gst_buffer (fourcc, buf);
gst_buffer_unref (buf);
build_uuid_xmp_atom (GstBuffer * xmp_data)
{
AtomUUID *uuid;
+ gsize size;
static guint8 xmp_uuid[] = { 0xBE, 0x7A, 0xCF, 0xCB,
0x97, 0xA9, 0x42, 0xE8,
0x9C, 0x71, 0x99, 0x94,
uuid = atom_uuid_new ();
memcpy (uuid->uuid, xmp_uuid, 16);
- uuid->data = g_malloc (GST_BUFFER_SIZE (xmp_data));
- uuid->datalen = GST_BUFFER_SIZE (xmp_data);
- memcpy (uuid->data, GST_BUFFER_DATA (xmp_data), GST_BUFFER_SIZE (xmp_data));
+ size = gst_buffer_get_size (xmp_data);
+ uuid->data = g_malloc (size);
+ uuid->datalen = size;
+ gst_buffer_extract (xmp_data, 0, uuid->data, size);
return build_atom_info_wrapper ((Atom *) uuid, atom_uuid_copy_data,
atom_uuid_free);
AtomInfo * build_btrt_extension (guint32 buffer_size_db, guint32 avg_bitrate,
guint32 max_bitrate);
AtomInfo * build_jp2h_extension (AtomTRAK * trak, gint width, gint height,
- guint32 fourcc, gint ncomp,
+ const gchar *colorspace, gint ncomp,
const GValue * cmap_array,
const GValue * cdef_array);
guint64 size = 0;
if (prefix) {
- if (fwrite (GST_BUFFER_DATA (prefix), 1, GST_BUFFER_SIZE (prefix), f) !=
- GST_BUFFER_SIZE (prefix)) {
+ GstMapInfo map;
+
+ gst_buffer_map (prefix, &map, GST_MAP_READ);
+ if (fwrite (map.data, 1, map.size, f) != map.size) {
+ gst_buffer_unmap (prefix, &map);
return FALSE;
}
+ gst_buffer_unmap (prefix, &map);
}
if (!atom_ftyp_copy_data (ftyp, &data, &size, &offset)) {
return FALSE;
#include "config.h"
#endif
-/* FIXME 0.11: suppress warnings for deprecated API such as GStaticRecMutex
- * with newer GLib versions (>= 2.31.0) */
-#define GLIB_DISABLE_DEPRECATION_WARNINGS
-
#include <glib/gstdio.h>
#include <gst/gst.h>
PROP_FAST_START_MODE
};
-GST_BOILERPLATE (GstQTMoovRecover, gst_qt_moov_recover, GstPipeline,
- GST_TYPE_PIPELINE);
+#define gst_qt_moov_recover_parent_class parent_class
+G_DEFINE_TYPE (GstQTMoovRecover, gst_qt_moov_recover, GST_TYPE_PIPELINE);
/* property functions */
static void gst_qt_moov_recover_set_property (GObject * object,
static void gst_qt_moov_recover_finalize (GObject * object);
static void
-gst_qt_moov_recover_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-#if 0
- GstQTMoovRecoverClass *klass = (GstQTMoovRecoverClass *) g_class;
-#endif
- gst_element_class_set_details_simple (element_class, "QT Moov Recover",
- "Util", "Recovers unfinished qtmux files",
- "Thiago Santos <thiago.sousa.santos@collabora.co.uk>");
-}
-
-static void
gst_qt_moov_recover_class_init (GstQTMoovRecoverClass * klass)
{
GObjectClass *gobject_class;
GST_DEBUG_CATEGORY_INIT (gst_qt_moov_recover_debug, "qtmoovrecover", 0,
"QT Moovie Recover");
+
+ gst_element_class_set_details_simple (gstelement_class, "QT Moov Recover",
+ "Util", "Recovers unfinished qtmux files",
+ "Thiago Santos <thiago.sousa.santos@collabora.co.uk>");
}
static void
-gst_qt_moov_recover_init (GstQTMoovRecover * qtmr,
- GstQTMoovRecoverClass * qtmr_klass)
+gst_qt_moov_recover_init (GstQTMoovRecover * qtmr)
{
}
switch (transition) {
case GST_STATE_CHANGE_NULL_TO_READY:
- qtmr->task = gst_task_create (gst_qt_moov_recover_run, qtmr);
- g_static_rec_mutex_init (&qtmr->task_mutex);
+ qtmr->task = gst_task_new (gst_qt_moov_recover_run, qtmr);
+ g_rec_mutex_init (&qtmr->task_mutex);
gst_task_set_lock (qtmr->task, &qtmr->task_mutex);
break;
case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
g_assert (gst_task_get_state (qtmr->task) == GST_TASK_STOPPED);
gst_object_unref (qtmr->task);
qtmr->task = NULL;
- g_static_rec_mutex_free (&qtmr->task_mutex);
+ g_rec_mutex_clear (&qtmr->task_mutex);
break;
default:
break;
GstPipeline pipeline;
GstTask *task;
- GStaticRecMutex task_mutex;
+ GRecMutex task_mutex;
/* properties */
gboolean faststart_mode;
* <refsect2>
* <title>Example pipelines</title>
* |[
- * gst-launch v4l2src num-buffers=500 ! video/x-raw-yuv,width=320,height=240 ! ffmpegcolorspace ! qtmux ! filesink location=video.mov
+ * gst-launch v4l2src num-buffers=500 ! video/x-raw,width=320,height=240 ! ffmpegcolorspace ! qtmux ! filesink location=video.mov
* ]|
* Records a video stream captured from a v4l2 device and muxes it into a qt file.
* </refsect2>
#include <glib/gstdio.h>
#include <gst/gst.h>
-#include <gst/base/gstcollectpads.h>
+#include <gst/base/gstcollectpads2.h>
+#include <gst/audio/audio.h>
+#include <gst/video/video.h>
#include <gst/tag/xmpwriter.h>
#include <sys/types.h>
/* pad functions */
static GstPad *gst_qt_mux_request_new_pad (GstElement * element,
- GstPadTemplate * templ, const gchar * name);
+ GstPadTemplate * templ, const gchar * name, const GstCaps * caps);
static void gst_qt_mux_release_pad (GstElement * element, GstPad * pad);
/* event */
srctempl = gst_pad_template_new ("src", GST_PAD_SRC,
GST_PAD_ALWAYS, params->src_caps);
gst_element_class_add_pad_template (element_class, srctempl);
- gst_object_unref (srctempl);
if (params->audio_sink_caps) {
- audiosinktempl = gst_pad_template_new ("audio_%d",
+ audiosinktempl = gst_pad_template_new ("audio_%u",
GST_PAD_SINK, GST_PAD_REQUEST, params->audio_sink_caps);
gst_element_class_add_pad_template (element_class, audiosinktempl);
- gst_object_unref (audiosinktempl);
}
if (params->video_sink_caps) {
- videosinktempl = gst_pad_template_new ("video_%d",
+ videosinktempl = gst_pad_template_new ("video_%u",
GST_PAD_SINK, GST_PAD_REQUEST, params->video_sink_caps);
gst_element_class_add_pad_template (element_class, videosinktempl);
- gst_object_unref (videosinktempl);
}
klass->format = params->prop->format;
GstQTMux * qtmux)
{
GstBuffer *newbuf;
+ GstMapInfo map;
+ gsize size;
GST_LOG_OBJECT (qtmux, "Preparing jpc buffer");
if (buf == NULL)
return NULL;
- newbuf = gst_buffer_new_and_alloc (GST_BUFFER_SIZE (buf) + 8);
- gst_buffer_copy_metadata (newbuf, buf, GST_BUFFER_COPY_ALL);
+ size = gst_buffer_get_size (buf);
+ newbuf = gst_buffer_new_and_alloc (size + 8);
+ gst_buffer_copy_into (newbuf, buf, GST_BUFFER_COPY_ALL, 8, size);
- GST_WRITE_UINT32_BE (GST_BUFFER_DATA (newbuf), GST_BUFFER_SIZE (newbuf));
- GST_WRITE_UINT32_LE (GST_BUFFER_DATA (newbuf) + 4, FOURCC_jp2c);
+ gst_buffer_map (newbuf, &map, GST_MAP_WRITE);
+ GST_WRITE_UINT32_BE (map.data, map.size);
+ GST_WRITE_UINT32_LE (map.data + 4, FOURCC_jp2c);
- memcpy (GST_BUFFER_DATA (newbuf) + 8, GST_BUFFER_DATA (buf),
- GST_BUFFER_SIZE (buf));
+ gst_buffer_unmap (buf, &map);
gst_buffer_unref (buf);
return newbuf;
GDateDay day;
gchar *str;
- g_return_if_fail (gst_tag_get_type (tag) == GST_TYPE_DATE);
+ g_return_if_fail (gst_tag_get_type (tag) == G_TYPE_DATE);
if (!gst_tag_list_get_date (list, tag, &date) || !date)
return;
GstCaps *caps;
GstStructure *structure;
gint flags = 0;
+ GstMapInfo map;
g_return_if_fail (gst_tag_get_type (tag) == GST_TYPE_BUFFER);
if (!buf)
goto done;
- caps = gst_buffer_get_caps (buf);
+ /* FIXME-0.11 caps metadata ? */
+ /* caps = gst_buffer_get_caps (buf); */
+ caps = NULL;
if (!caps) {
GST_WARNING_OBJECT (qtmux, "preview image without caps");
goto done;
goto done;
}
+ gst_buffer_map (buf, &map, GST_MAP_READ);
GST_DEBUG_OBJECT (qtmux, "Adding tag %" GST_FOURCC_FORMAT
- " -> image size %d", GST_FOURCC_ARGS (fourcc), GST_BUFFER_SIZE (buf));
- atom_moov_add_tag (qtmux->moov, fourcc, flags, GST_BUFFER_DATA (buf),
- GST_BUFFER_SIZE (buf));
+ " -> image size %" G_GSIZE_FORMAT "", GST_FOURCC_ARGS (fourcc), map.size);
+ atom_moov_add_tag (qtmux->moov, fourcc, flags, map.data, map.size);
+ gst_buffer_unmap (buf, &map);
done:
g_value_unset (&value);
}
GDate *date = NULL;
GDateYear year;
- g_return_if_fail (gst_tag_get_type (tag) == GST_TYPE_DATE);
+ g_return_if_fail (gst_tag_get_type (tag) == G_TYPE_DATE);
if (!gst_tag_list_get_date (list, tag, &date) || !date)
return;
GstCaps *caps = NULL;
val = gst_tag_list_get_value_index (list, GST_QT_DEMUX_PRIVATE_TAG, i);
- buf = (GstBuffer *) gst_value_get_mini_object (val);
+ buf = (GstBuffer *) gst_value_get_buffer (val);
- if (buf && (caps = gst_buffer_get_caps (buf))) {
+ /* FIXME-0.11 */
+ if (buf && (caps = NULL /*gst_buffer_get_caps (buf) */ )) {
GstStructure *s;
const gchar *style = NULL;
+ GstMapInfo map;
- GST_DEBUG_OBJECT (qtmux, "Found private tag %d/%d; size %d, caps %"
- GST_PTR_FORMAT, i, num_tags, GST_BUFFER_SIZE (buf), caps);
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ GST_DEBUG_OBJECT (qtmux,
+ "Found private tag %d/%d; size %" G_GSIZE_FORMAT ", caps %"
+ GST_PTR_FORMAT, i, num_tags, map.size, caps);
s = gst_caps_get_structure (caps, 0);
if (s && (style = gst_structure_get_string (s, "style"))) {
/* try to prevent some style tag ending up into another variant
(strcmp (style, "iso") == 0 &&
qtmux_klass->format == GST_QT_MUX_FORMAT_3GP)) {
GST_DEBUG_OBJECT (qtmux, "Adding private tag");
- atom_moov_add_blob_tag (qtmux->moov, GST_BUFFER_DATA (buf),
- GST_BUFFER_SIZE (buf));
+ atom_moov_add_blob_tag (qtmux->moov, map.data, map.size);
}
}
+ gst_buffer_unmap (buf, &map);
gst_caps_unref (caps);
}
}
GstBuffer *buf;
buf = gst_buffer_new ();
- GST_BUFFER_DATA (buf) = GST_BUFFER_MALLOCDATA (buf) = data;
- GST_BUFFER_SIZE (buf) = size;
+ gst_buffer_take_memory (buf, -1,
+ gst_memory_new_wrapped (0, data, g_free, size, 0, size));
return buf;
}
gboolean mind_fast)
{
GstFlowReturn res;
- guint8 *data;
- guint size;
+ gsize size;
g_return_val_if_fail (buf != NULL, GST_FLOW_ERROR);
- data = GST_BUFFER_DATA (buf);
- size = GST_BUFFER_SIZE (buf);
-
- GST_LOG_OBJECT (qtmux, "sending buffer size %d", size);
+ size = gst_buffer_get_size (buf);
+ GST_LOG_OBJECT (qtmux, "sending buffer size %" G_GSIZE_FORMAT, size);
if (mind_fast && qtmux->fast_start_file) {
+ GstMapInfo map;
gint ret;
GST_LOG_OBJECT (qtmux, "to temporary file");
- ret = fwrite (data, sizeof (guint8), size, qtmux->fast_start_file);
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ ret = fwrite (map.data, sizeof (guint8), map.size, qtmux->fast_start_file);
+ gst_buffer_unmap (buf, &map);
gst_buffer_unref (buf);
if (ret != size)
goto write_error;
res = GST_FLOW_OK;
} else {
GST_LOG_OBJECT (qtmux, "downstream");
-
- buf = gst_buffer_make_metadata_writable (buf);
- gst_buffer_set_caps (buf, GST_PAD_CAPS (qtmux->srcpad));
res = gst_pad_push (qtmux->srcpad, buf);
}
* (somehow optimize copy?) */
GST_DEBUG_OBJECT (qtmux, "Sending buffered data");
while (ret == GST_FLOW_OK) {
- gint r;
const int bufsize = 4096;
+ GstMapInfo map;
+ gsize size;
buf = gst_buffer_new_and_alloc (bufsize);
- r = fread (GST_BUFFER_DATA (buf), sizeof (guint8), bufsize,
- qtmux->fast_start_file);
- if (r == 0)
+ gst_buffer_map (buf, &map, GST_MAP_WRITE);
+ size = fread (map.data, sizeof (guint8), bufsize, qtmux->fast_start_file);
+ if (size == 0) {
+ gst_buffer_unmap (buf, &map);
break;
- GST_BUFFER_SIZE (buf) = r;
- GST_LOG_OBJECT (qtmux, "Pushing buffered buffer of size %d", r);
+ }
+ GST_LOG_OBJECT (qtmux, "Pushing buffered buffer of size %d",
+ (gint) map.size);
+ gst_buffer_unmap (buf, &map);
ret = gst_qt_mux_send_buffer (qtmux, buf, offset, FALSE);
buf = NULL;
}
gst_qt_mux_update_mdat_size (GstQTMux * qtmux, guint64 mdat_pos,
guint64 mdat_size, guint64 * offset)
{
- GstEvent *event;
GstBuffer *buf;
gboolean large_file;
+ GstSegment segment;
+ GstMapInfo map;
large_file = (mdat_size > MDAT_LARGE_FILE_LIMIT);
mdat_pos += 8;
/* seek and rewrite the header */
- event = gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_BYTES,
- mdat_pos, GST_CLOCK_TIME_NONE, 0);
- gst_pad_push_event (qtmux->srcpad, event);
+ gst_segment_init (&segment, GST_FORMAT_BYTES);
+ segment.start = mdat_pos;
+ gst_pad_push_event (qtmux->srcpad, gst_event_new_segment (&segment));
if (large_file) {
buf = gst_buffer_new_and_alloc (sizeof (guint64));
- GST_WRITE_UINT64_BE (GST_BUFFER_DATA (buf), mdat_size + 16);
+ gst_buffer_map (buf, &map, GST_MAP_WRITE);
+ GST_WRITE_UINT64_BE (map.data, mdat_size + 16);
} else {
- guint8 *data;
-
buf = gst_buffer_new_and_alloc (16);
- data = GST_BUFFER_DATA (buf);
- GST_WRITE_UINT32_BE (data, 8);
- GST_WRITE_UINT32_LE (data + 4, FOURCC_free);
- GST_WRITE_UINT32_BE (data + 8, mdat_size + 8);
- GST_WRITE_UINT32_LE (data + 12, FOURCC_mdat);
+ gst_buffer_map (buf, &map, GST_MAP_WRITE);
+ GST_WRITE_UINT32_BE (map.data, 8);
+ GST_WRITE_UINT32_LE (map.data + 4, FOURCC_free);
+ GST_WRITE_UINT32_BE (map.data + 8, mdat_size + 8);
+ GST_WRITE_UINT32_LE (map.data + 12, FOURCC_mdat);
}
+ gst_buffer_unmap (buf, &map);
return gst_qt_mux_send_buffer (qtmux, buf, offset, FALSE);
}
GstStructure *structure;
GValue array = { 0 };
GValue value = { 0 };
- GstCaps *caps;
+ GstCaps *caps, *tcaps;
+
+ tcaps = gst_pad_get_current_caps (mux->srcpad);
+ caps = gst_caps_copy (tcaps);
+ gst_caps_unref (tcaps);
- caps = gst_caps_copy (GST_PAD_CAPS (mux->srcpad));
structure = gst_caps_get_structure (caps, 0);
g_value_init (&array, GST_TYPE_ARRAY);
- GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_IN_CAPS);
+ GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_HEADER);
g_value_init (&value, GST_TYPE_BUFFER);
gst_value_take_buffer (&value, gst_buffer_ref (buf));
gst_value_array_append_value (&array, &value);
{
GstFlowReturn ret = GST_FLOW_OK;
GstCaps *caps;
+ GstSegment segment;
GST_DEBUG_OBJECT (qtmux, "starting file");
gst_caps_unref (caps);
/* let downstream know we think in BYTES and expect to do seeking later on */
- gst_pad_push_event (qtmux->srcpad,
- gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_BYTES, 0, -1, 0));
+ gst_segment_init (&segment, GST_FORMAT_BYTES);
+ gst_pad_push_event (qtmux->srcpad, gst_event_new_segment (&segment));
/* initialize our moov recovery file */
GST_OBJECT_LOCK (qtmux);
}
if (qtmux->fragment_sequence) {
- GstEvent *event;
+ GstSegment segment;
if (qtmux->mfra) {
guint8 *data = NULL;
return GST_FLOW_OK;
}
-
timescale = qtmux->timescale;
/* only mvex duration is updated,
* mvhd should be consistent with empty moov
GST_DEBUG_OBJECT (qtmux, "rewriting moov with mvex duration %"
GST_TIME_FORMAT, GST_TIME_ARGS (first_ts));
/* seek and rewrite the header */
- event = gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_BYTES,
- qtmux->mdat_pos, GST_CLOCK_TIME_NONE, 0);
- gst_pad_push_event (qtmux->srcpad, event);
+ gst_segment_init (&segment, GST_FORMAT_BYTES);
+ segment.start = qtmux->mdat_pos;
+ gst_pad_push_event (qtmux->srcpad, gst_event_new_segment (&segment));
/* no need to seek back */
return gst_qt_mux_send_moov (qtmux, NULL, FALSE);
}
pad->traf = NULL;
atom_moof_copy_data (moof, &data, &size, &offset);
buffer = _gst_buffer_new_take_data (data, offset);
- GST_LOG_OBJECT (qtmux, "writing moof size %d", GST_BUFFER_SIZE (buffer));
+ GST_LOG_OBJECT (qtmux, "writing moof size %" G_GSIZE_FORMAT,
+ gst_buffer_get_size (buffer));
ret = gst_qt_mux_send_buffer (qtmux, buffer, &qtmux->header_size, FALSE);
/* and actual data */
total_size = 0;
for (i = 0; i < atom_array_get_len (&pad->fragment_buffers); i++) {
total_size +=
- GST_BUFFER_SIZE (atom_array_index (&pad->fragment_buffers, i));
+ gst_buffer_get_size (atom_array_index (&pad->fragment_buffers, i));
}
GST_LOG_OBJECT (qtmux, "writing %d buffers, total_size %d",
buf = pad->buf_entries[pad->buf_head];
pad->buf_entries[pad->buf_head++] = NULL;
pad->buf_head %= wrap;
- buf = gst_buffer_make_metadata_writable (buf);
+ buf = gst_buffer_make_writable (buf);
/* track original ts (= pts ?) for later */
GST_BUFFER_OFFSET_END (buf) = GST_BUFFER_TIMESTAMP (buf);
GST_BUFFER_TIMESTAMP (buf) = ts;
if (G_LIKELY (buf != NULL && GST_CLOCK_TIME_IS_VALID (pad->first_ts) &&
pad->first_ts != 0)) {
- buf = gst_buffer_make_metadata_writable (buf);
+ buf = gst_buffer_make_writable (buf);
check_and_subtract_ts (qtmux, &GST_BUFFER_TIMESTAMP (buf), pad->first_ts);
}
/* when we obtain the first_ts we subtract from all stored buffers we have,
/* for computing the avg bitrate */
if (G_LIKELY (last_buf)) {
- pad->total_bytes += GST_BUFFER_SIZE (last_buf);
+ pad->total_bytes += gst_buffer_get_size (last_buf);
pad->total_duration += duration;
}
buffer (= chunk)), but can also be fixed-packet-size codecs like ADPCM
*/
sample_size = pad->sample_size;
- if (GST_BUFFER_SIZE (last_buf) % sample_size != 0)
+ if (gst_buffer_get_size (last_buf) % sample_size != 0)
goto fragmented_sample;
/* note: qt raw audio storage warps it implicitly into a timewise
* perfect stream, discarding buffer times */
nsamples = gst_util_uint64_scale_round (GST_BUFFER_DURATION (last_buf),
atom_trak_get_timescale (pad->trak), GST_SECOND);
} else {
- nsamples = GST_BUFFER_SIZE (last_buf) / sample_size;
+ nsamples = gst_buffer_get_size (last_buf) / sample_size;
}
duration = GST_BUFFER_DURATION (last_buf) / nsamples;
pad->last_dts += duration * nsamples;
} else {
nsamples = 1;
- sample_size = GST_BUFFER_SIZE (last_buf);
+ sample_size = gst_buffer_get_size (last_buf);
if (pad->have_dts) {
gint64 scaled_dts;
pad->last_dts = GST_BUFFER_OFFSET_END (last_buf);
}
if (G_UNLIKELY (qtmux->state == GST_QT_MUX_STATE_EOS))
- return GST_FLOW_UNEXPECTED;
+ return GST_FLOW_EOS;
best_pad = (GstQTPad *) cdata;
if (ret == GST_FLOW_OK) {
GST_DEBUG_OBJECT (qtmux, "Pushing eos");
gst_pad_push_event (qtmux->srcpad, gst_event_new_eos ());
- ret = GST_FLOW_UNEXPECTED;
+ ret = GST_FLOW_EOS;
} else {
GST_WARNING_OBJECT (qtmux, "Failed to stop file: %s",
gst_flow_get_name (ret));
* the old caps are a subset of the new one (this means upstream
* added more info to the caps, as both should be 'fixed' caps) */
if (qtpad->fourcc) {
- GstCaps *current_caps = NULL;
- gboolean is_subset;
- g_object_get (pad, "caps", ¤t_caps, NULL);
+ GstCaps *current_caps;
+
+ current_caps = gst_pad_get_current_caps (pad);
g_assert (caps != NULL);
- is_subset = gst_qtmux_caps_is_subset_full (qtmux, current_caps, caps);
- gst_caps_unref (current_caps);
- if (!is_subset) {
+ if (!gst_qtmux_caps_is_subset_full (qtmux, current_caps, caps)) {
+ gst_caps_unref (current_caps);
goto refuse_renegotiation;
}
GST_DEBUG_OBJECT (qtmux,
"pad %s accepted renegotiation to %" GST_PTR_FORMAT " from %"
- GST_PTR_FORMAT, GST_PAD_NAME (pad), caps, GST_PAD_CAPS (pad));
+ GST_PTR_FORMAT, GST_PAD_NAME (pad), caps, current_caps);
+ gst_caps_unref (current_caps);
}
GST_DEBUG_OBJECT (qtmux, "%s:%s, caps=%" GST_PTR_FORMAT,
"assuming 'raw'");
}
- if (!codec_data || GST_BUFFER_SIZE (codec_data) < 2)
+ if (!codec_data || gst_buffer_get_size ((GstBuffer *) codec_data) < 2)
GST_WARNING_OBJECT (qtmux, "no (valid) codec_data for AAC audio");
else {
- guint8 profile = GST_READ_UINT8 (GST_BUFFER_DATA (codec_data));
+ guint8 profile;
+ gst_buffer_extract ((GstBuffer *) codec_data, 0, &profile, 1);
/* warn if not Low Complexity profile */
profile >>= 3;
if (profile != 2)
entry.samples_per_packet = 320;
entry.bytes_per_sample = 2;
ext_atom = build_amr_extension ();
- } else if (strcmp (mimetype, "audio/x-raw-int") == 0) {
- gint width;
- gint depth;
- gint endianness;
- gboolean sign;
-
- if (!gst_structure_get_int (structure, "width", &width) ||
- !gst_structure_get_int (structure, "depth", &depth) ||
- !gst_structure_get_boolean (structure, "signed", &sign)) {
- GST_DEBUG_OBJECT (qtmux, "broken caps, width/depth/signed field missing");
- goto refuse_caps;
- }
+ } else if (strcmp (mimetype, "audio/x-raw") == 0) {
+ GstAudioInfo info;
- if (depth <= 8) {
- endianness = G_BYTE_ORDER;
- } else if (!gst_structure_get_int (structure, "endianness", &endianness)) {
- GST_DEBUG_OBJECT (qtmux, "broken caps, endianness field missing");
+ gst_audio_info_init (&info);
+ if (!gst_audio_info_from_caps (&info, caps))
goto refuse_caps;
- }
/* spec has no place for a distinction in these */
- if (width != depth) {
+ if (info.finfo->width != info.finfo->depth) {
GST_DEBUG_OBJECT (qtmux, "width must be same as depth!");
goto refuse_caps;
}
- if (sign) {
- if (endianness == G_LITTLE_ENDIAN)
+ if ((info.finfo->flags & GST_AUDIO_FORMAT_FLAG_SIGNED)) {
+ if (info.finfo->endianness == G_LITTLE_ENDIAN)
entry.fourcc = FOURCC_sowt;
- else if (endianness == G_BIG_ENDIAN)
+ else if (info.finfo->endianness == G_BIG_ENDIAN)
entry.fourcc = FOURCC_twos;
/* maximum backward compatibility; only new version for > 16 bit */
- if (depth <= 16)
+ if (info.finfo->depth <= 16)
entry.version = 0;
/* not compressed in any case */
entry.compression_id = 0;
/* QT spec says: max at 16 bit even if sample size were actually larger,
* however, most players (e.g. QuickTime!) seem to disagree, so ... */
- entry.sample_size = depth;
- entry.bytes_per_sample = depth / 8;
+ entry.sample_size = info.finfo->depth;
+ entry.bytes_per_sample = info.finfo->depth / 8;
entry.samples_per_packet = 1;
- entry.bytes_per_packet = depth / 8;
- entry.bytes_per_frame = entry.bytes_per_packet * channels;
+ entry.bytes_per_packet = info.finfo->depth / 8;
+ entry.bytes_per_frame = entry.bytes_per_packet * info.channels;
} else {
- if (width == 8 && depth == 8) {
+ if (info.finfo->width == 8 && info.finfo->depth == 8) {
/* fall back to old 8-bit version */
entry.fourcc = FOURCC_raw_;
entry.version = 0;
goto refuse_caps;
}
}
- constant_size = (depth / 8) * channels;
+ constant_size = (info.finfo->depth / 8) * info.channels;
} else if (strcmp (mimetype, "audio/x-alaw") == 0) {
entry.fourcc = FOURCC_alaw;
entry.samples_per_packet = 1023;
} else if (strcmp (mimetype, "audio/x-alac") == 0) {
GstBuffer *codec_config;
gint len;
+ GstMapInfo map;
entry.fourcc = FOURCC_alac;
+ gst_buffer_map ((GstBuffer *) codec_data, &map, GST_MAP_READ);
/* let's check if codec data already comes with 'alac' atom prefix */
- if (!codec_data || (len = GST_BUFFER_SIZE (codec_data)) < 28) {
+ if (!codec_data || (len = map.size) < 28) {
GST_DEBUG_OBJECT (qtmux, "broken caps, codec data missing");
+ gst_buffer_unmap ((GstBuffer *) codec_data, &map);
goto refuse_caps;
}
- if (GST_READ_UINT32_LE (GST_BUFFER_DATA (codec_data) + 4) == FOURCC_alac) {
+ if (GST_READ_UINT32_LE (map.data + 4) == FOURCC_alac) {
len -= 8;
- codec_config = gst_buffer_create_sub ((GstBuffer *) codec_data, 8, len);
+ codec_config =
+ gst_buffer_copy_region ((GstBuffer *) codec_data, 0, 8, len);
} else {
codec_config = gst_buffer_ref ((GstBuffer *) codec_data);
}
+ gst_buffer_unmap ((GstBuffer *) codec_data, &map);
if (len != 28) {
/* does not look good, but perhaps some trailing unneeded stuff */
GST_WARNING_OBJECT (qtmux, "unexpected codec-data size, possibly broken");
else
ext_atom = build_codec_data_extension (FOURCC_alac, codec_config);
/* set some more info */
+ gst_buffer_map (codec_config, &map, GST_MAP_READ);
entry.bytes_per_sample = 2;
- entry.samples_per_packet =
- GST_READ_UINT32_BE (GST_BUFFER_DATA (codec_config) + 4);
+ entry.samples_per_packet = GST_READ_UINT32_BE (map.data + 4);
+ gst_buffer_unmap (codec_config, &map);
gst_buffer_unref (codec_config);
}
* the old caps are a subset of the new one (this means upstream
* added more info to the caps, as both should be 'fixed' caps) */
if (qtpad->fourcc) {
- GstCaps *current_caps = NULL;
- gboolean is_subset;
- g_object_get (pad, "caps", ¤t_caps, NULL);
+ GstCaps *current_caps;
+
+ current_caps = gst_pad_get_current_caps (pad);
g_assert (caps != NULL);
- is_subset = gst_qtmux_caps_is_subset_full (qtmux, current_caps, caps);
- gst_caps_unref (current_caps);
- if (!is_subset) {
+ if (!gst_qtmux_caps_is_subset_full (qtmux, current_caps, caps)) {
+ gst_caps_unref (current_caps);
goto refuse_renegotiation;
}
GST_DEBUG_OBJECT (qtmux,
"pad %s accepted renegotiation to %" GST_PTR_FORMAT " from %"
- GST_PTR_FORMAT, GST_PAD_NAME (pad), caps, GST_PAD_CAPS (pad));
+ GST_PTR_FORMAT, GST_PAD_NAME (pad), caps, current_caps);
+ gst_caps_unref (current_caps);
}
GST_DEBUG_OBJECT (qtmux, "%s:%s, caps=%" GST_PTR_FORMAT,
sync = TRUE;
/* now map onto a fourcc, and some extra properties */
- if (strcmp (mimetype, "video/x-raw-rgb") == 0) {
- gint bpp;
+ if (strcmp (mimetype, "video/x-raw") == 0) {
+ const gchar *format;
+ GstVideoFormat fmt;
+ const GstVideoFormatInfo *vinfo;
- entry.fourcc = FOURCC_raw_;
- gst_structure_get_int (structure, "bpp", &bpp);
- entry.depth = bpp;
- sync = FALSE;
- } else if (strcmp (mimetype, "video/x-raw-yuv") == 0) {
- guint32 format = 0;
+ format = gst_structure_get_string (structure, "format");
+ fmt = gst_video_format_from_string (format);
+ vinfo = gst_video_format_get_info (fmt);
- sync = FALSE;
- gst_structure_get_fourcc (structure, "format", &format);
- switch (format) {
- case GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y'):
+ switch (fmt) {
+ case GST_VIDEO_FORMAT_UYVY:
if (depth == -1)
depth = 24;
entry.fourcc = FOURCC_2vuy;
entry.depth = depth;
+ sync = FALSE;
+ break;
+ default:
+ if (GST_VIDEO_FORMAT_INFO_FLAGS (vinfo) & GST_VIDEO_FORMAT_FLAG_RGB) {
+ entry.fourcc = FOURCC_raw_;
+ entry.depth = GST_VIDEO_FORMAT_INFO_PSTRIDE (vinfo, 0) * 8;
+ sync = FALSE;
+ }
break;
}
} else if (strcmp (mimetype, "video/x-h263") == 0) {
sync = FALSE;
} else if (strcmp (mimetype, "image/x-j2c") == 0 ||
strcmp (mimetype, "image/x-jpc") == 0) {
- guint32 fourcc;
+ const gchar *colorspace;
const GValue *cmap_array;
const GValue *cdef_array;
gint ncomp = 0;
ext_atom = NULL;
entry.fourcc = FOURCC_mjp2;
sync = FALSE;
- if (gst_structure_get_fourcc (structure, "fourcc", &fourcc) &&
+
+ colorspace = gst_structure_get_string (structure, "colorspace");
+ if (colorspace &&
(ext_atom =
- build_jp2h_extension (qtpad->trak, width, height, fourcc, ncomp,
+ build_jp2h_extension (qtpad->trak, width, height, colorspace, ncomp,
cmap_array, cdef_array)) != NULL) {
ext_atom_list = g_list_append (ext_atom_list, ext_atom);
} else if (strcmp (mimetype, "video/x-qt-part") == 0) {
guint32 fourcc;
- gst_structure_get_fourcc (structure, "format", &fourcc);
+ gst_structure_get_uint (structure, "format", &fourcc);
entry.fourcc = fourcc;
qtpad->have_dts = TRUE;
} else if (strcmp (mimetype, "video/x-mp4-part") == 0) {
guint32 fourcc;
- gst_structure_get_fourcc (structure, "format", &fourcc);
+ gst_structure_get_uint (structure, "format", &fourcc);
entry.fourcc = fourcc;
qtpad->have_dts = TRUE;
}
refuse_renegotiation:
{
GST_WARNING_OBJECT (qtmux,
- "pad %s refused renegotiation to %" GST_PTR_FORMAT " from %"
- GST_PTR_FORMAT, GST_PAD_NAME (pad), caps, GST_PAD_CAPS (pad));
+ "pad %s refused renegotiation to %" GST_PTR_FORMAT, GST_PAD_NAME (pad),
+ caps);
gst_object_unref (qtmux);
return FALSE;
}
GstQTMux *qtmux;
guint32 avg_bitrate = 0, max_bitrate = 0;
GstPad *pad = data->pad;
+ gboolean ret = FALSE;
qtmux = GST_QT_MUX_CAST (user_data);
switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_CAPS:
+ {
+ GstCaps *caps;
+ GstQTPad *collect_pad;
+
+ gst_event_parse_caps (event, &caps);
+
+ /* find stream data */
+ collect_pad = (GstQTPad *) gst_pad_get_element_private (pad);
+ g_assert (collect_pad);
+ g_assert (collect_pad->set_caps);
+
+ ret = collect_pad->set_caps (pad, caps);
+ gst_event_unref (event);
+ break;
+ }
case GST_EVENT_TAG:{
GstTagList *list;
GstTagSetter *setter = GST_TAG_SETTER (qtmux);
qtpad->max_bitrate = max_bitrate;
}
+ gst_event_unref (event);
+ ret = TRUE;
break;
}
default:
+ ret = gst_pad_event_default (data->pad, GST_OBJECT (qtmux), event);
+ break;
+ case GST_EVENT_EOS:
+ case GST_EVENT_SEGMENT:
+ gst_event_unref (event);
+ ret = TRUE;
break;
}
- /* now GstCollectPads2 can take care of the rest, e.g. EOS */
- return FALSE;
+ return ret;
}
static void
static GstPad *
gst_qt_mux_request_new_pad (GstElement * element,
- GstPadTemplate * templ, const gchar * req_name)
+ GstPadTemplate * templ, const gchar * req_name, const GstCaps * caps)
{
GstElementClass *klass = GST_ELEMENT_GET_CLASS (element);
GstQTMux *qtmux = GST_QT_MUX_CAST (element);
GstPad *newpad;
gboolean audio;
gchar *name;
+ gint pad_id;
if (templ->direction != GST_PAD_SINK)
goto wrong_direction;
if (qtmux->state > GST_QT_MUX_STATE_STARTED)
goto too_late;
- if (templ == gst_element_class_get_pad_template (klass, "audio_%d")) {
+ if (templ == gst_element_class_get_pad_template (klass, "audio_%u")) {
audio = TRUE;
- name = g_strdup_printf ("audio_%02d", qtmux->audio_pads++);
- } else if (templ == gst_element_class_get_pad_template (klass, "video_%d")) {
+ if (req_name != NULL && sscanf (req_name, "audio_%u", &pad_id) == 1) {
+ name = g_strdup (req_name);
+ } else {
+ name = g_strdup_printf ("audio_%u", qtmux->audio_pads++);
+ }
+ } else if (templ == gst_element_class_get_pad_template (klass, "video_%u")) {
audio = FALSE;
- name = g_strdup_printf ("video_%02d", qtmux->video_pads++);
+ if (req_name != NULL && sscanf (req_name, "video_%u", &pad_id) == 1) {
+ name = g_strdup (req_name);
+ } else {
+ name = g_strdup_printf ("video_%u", qtmux->video_pads++);
+ }
} else
goto wrong_template;
/* set up pad functions */
if (audio)
- gst_pad_set_setcaps_function (newpad,
- GST_DEBUG_FUNCPTR (gst_qt_mux_audio_sink_set_caps));
+ collect_pad->set_caps = GST_DEBUG_FUNCPTR (gst_qt_mux_audio_sink_set_caps);
else
- gst_pad_set_setcaps_function (newpad,
- GST_DEBUG_FUNCPTR (gst_qt_mux_video_sink_set_caps));
+ collect_pad->set_caps = GST_DEBUG_FUNCPTR (gst_qt_mux_video_sink_set_caps);
gst_pad_set_active (newpad, TRUE);
gst_element_add_pad (element, newpad);
/* if nothing is set, it won't be called */
GstQTPadPrepareBufferFunc prepare_buf_func;
+ gboolean (*set_caps) (GstPad * pad, GstCaps * caps);
};
typedef enum _GstQTMuxState
"rate = (int) [ 1, " G_STRINGIFY (r) " ]"
#define PCM_CAPS \
- "audio/x-raw-int, " \
- "width = (int) 8, " \
- "depth = (int) 8, " \
- COMMON_AUDIO_CAPS (2, MAX) ", " \
- "signed = (boolean) { true, false }; " \
- "audio/x-raw-int, " \
- "width = (int) 16, " \
- "depth = (int) 16, " \
- "endianness = (int) { BIG_ENDIAN, LITTLE_ENDIAN }, " \
- COMMON_AUDIO_CAPS (2, MAX) ", " \
- "signed = (boolean) true " \
+ "audio/x-raw, " \
+ "format = (string) { S8, U8 }, " \
+ "layout = (string) interleaved, " \
+ COMMON_AUDIO_CAPS (2, MAX) "; " \
+ "audio/x-raw, " \
+ "format = (string) { S16LE, S16BE }, " \
+ "layout = (string) interleaved, " \
+ COMMON_AUDIO_CAPS (2, MAX)
#define PCM_CAPS_FULL \
PCM_CAPS "; " \
- "audio/x-raw-int, " \
- "width = (int) 24, " \
- "depth = (int) 24, " \
- "endianness = (int) { BIG_ENDIAN, LITTLE_ENDIAN }, " \
- COMMON_AUDIO_CAPS (2, MAX) ", " \
- "signed = (boolean) true; " \
- "audio/x-raw-int, " \
- "width = (int) 32, " \
- "depth = (int) 32, " \
- "endianness = (int) { BIG_ENDIAN, LITTLE_ENDIAN }, " \
- COMMON_AUDIO_CAPS (2, MAX) ", " \
- "signed = (boolean) true "
+ "audio/x-raw, " \
+ "format = (string) { S24LE, S24BE }, " \
+ "layout = (string) interleaved, " \
+ COMMON_AUDIO_CAPS (2, MAX) "; " \
+ "audio/x-raw, " \
+ "format = (string) { S32LE, S32BE }, " \
+ "layout = (string) interleaved, " \
+ COMMON_AUDIO_CAPS (2, MAX)
#define MP3_CAPS \
"audio/mpeg, " \
"GstQTMux",
GST_STATIC_CAPS ("video/quicktime, variant = (string) apple; "
"video/quicktime"),
- GST_STATIC_CAPS ("video/x-raw-rgb, "
- COMMON_VIDEO_CAPS "; "
- "video/x-raw-yuv, "
- "format = (fourcc) UYVY, "
+ GST_STATIC_CAPS ("video/x-raw, "
+ "format = (string) { RGB, UYVY }, "
COMMON_VIDEO_CAPS "; "
MPEG4V_CAPS "; "
H263_CAPS "; "
break;
}
case GST_QT_MUX_FORMAT_MJ2:
+ {
major = FOURCC_mjp2;
comp = mjp2_brands;
version = 0;
prefix = gst_buffer_new_and_alloc (sizeof (mjp2_prefix));
- memcpy (GST_BUFFER_DATA (prefix), mjp2_prefix, GST_BUFFER_SIZE (prefix));
+ gst_buffer_fill (prefix, 0, mjp2_prefix, sizeof (mjp2_prefix));
break;
+ }
default:
g_assert_not_reached ();
break;
"encoding-name = (string) { \"X-QT\", \"X-QUICKTIME\" }")
);
-GST_BOILERPLATE (GstRtpXQTDepay, gst_rtp_xqt_depay, GstBaseRTPDepayload,
- GST_TYPE_BASE_RTP_DEPAYLOAD);
+#define gst_rtp_xqt_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpXQTDepay, gst_rtp_xqt_depay, GST_TYPE_RTP_BASE_DEPAYLOAD);
static void gst_rtp_xqt_depay_finalize (GObject * object);
-static gboolean gst_rtp_xqt_depay_setcaps (GstBaseRTPDepayload * depayload,
+static gboolean gst_rtp_xqt_depay_setcaps (GstRTPBaseDepayload * depayload,
GstCaps * caps);
-static GstBuffer *gst_rtp_xqt_depay_process (GstBaseRTPDepayload * depayload,
+static GstBuffer *gst_rtp_xqt_depay_process (GstRTPBaseDepayload * depayload,
GstBuffer * buf);
static GstStateChangeReturn gst_rtp_xqt_depay_change_state (GstElement *
element, GstStateChange transition);
-static void
-gst_rtp_xqt_depay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_xqt_depay_src_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_xqt_depay_sink_template);
-
- gst_element_class_set_details_simple (element_class, "RTP packet depayloader",
- "Codec/Depayloader/Network",
- "Extracts Quicktime audio/video from RTP packets",
- "Wim Taymans <wim@fluendo.com>");
-}
static void
gst_rtp_xqt_depay_class_init (GstRtpXQTDepayClass * klass)
{
GObjectClass *gobject_class;
GstElementClass *gstelement_class;
- GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
- gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
+ gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
parent_class = g_type_class_peek_parent (klass);
gstelement_class->change_state = gst_rtp_xqt_depay_change_state;
- gstbasertpdepayload_class->set_caps = gst_rtp_xqt_depay_setcaps;
- gstbasertpdepayload_class->process = gst_rtp_xqt_depay_process;
+ gstrtpbasedepayload_class->set_caps = gst_rtp_xqt_depay_setcaps;
+ gstrtpbasedepayload_class->process = gst_rtp_xqt_depay_process;
GST_DEBUG_CATEGORY_INIT (rtpxqtdepay_debug, "rtpxqtdepay", 0,
"QT Media RTP Depayloader");
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_xqt_depay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_xqt_depay_sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP packet depayloader", "Codec/Depayloader/Network",
+ "Extracts Quicktime audio/video from RTP packets",
+ "Wim Taymans <wim@fluendo.com>");
}
static void
-gst_rtp_xqt_depay_init (GstRtpXQTDepay * rtpxqtdepay,
- GstRtpXQTDepayClass * klass)
+gst_rtp_xqt_depay_init (GstRtpXQTDepay * rtpxqtdepay)
{
rtpxqtdepay->adapter = gst_adapter_new ();
}
size = len - 8;
buf = gst_buffer_new_and_alloc (size);
- memcpy (GST_BUFFER_DATA (buf), data + 8, size);
+ gst_buffer_fill (buf, 0, data + 8, size);
caps = gst_caps_new_simple ("video/x-h264",
"codec_data", GST_TYPE_BUFFER, buf, NULL);
gst_buffer_unref (buf);
- gst_pad_set_caps (GST_BASE_RTP_DEPAYLOAD (rtpxqtdepay)->srcpad, caps);
+ gst_pad_set_caps (GST_RTP_BASE_DEPAYLOAD (rtpxqtdepay)->srcpad, caps);
gst_caps_unref (caps);
break;
}
}
static gboolean
-gst_rtp_xqt_depay_setcaps (GstBaseRTPDepayload * depayload, GstCaps * caps)
+gst_rtp_xqt_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
{
GstStructure *structure;
gint clock_rate = 90000; /* default */
}
static GstBuffer *
-gst_rtp_xqt_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
+gst_rtp_xqt_depay_process (GstRTPBaseDepayload * depayload, GstBuffer * buf)
{
GstRtpXQTDepay *rtpxqtdepay;
- GstBuffer *outbuf;
+ GstBuffer *outbuf = NULL;
gboolean m;
+ GstRTPBuffer rtp = { NULL };
rtpxqtdepay = GST_RTP_XQT_DEPAY (depayload);
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
+
if (!gst_rtp_buffer_validate (buf))
goto bad_packet;
GST_DEBUG_OBJECT (rtpxqtdepay, "we need resync");
}
- m = gst_rtp_buffer_get_marker (buf);
+ m = gst_rtp_buffer_get_marker (&rtp);
GST_LOG_OBJECT (rtpxqtdepay, "marker: %d", m);
{
guint8 ver, pck;
gboolean s, q, l, d;
- payload_len = gst_rtp_buffer_get_payload_len (buf);
- payload = gst_rtp_buffer_get_payload (buf);
+ payload_len = gst_rtp_buffer_get_payload_len (&rtp);
+ payload = gst_rtp_buffer_get_payload (&rtp);
/* 1 2 3
* 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
{
/* multiple samples per packet. */
outbuf = gst_buffer_new_and_alloc (payload_len);
- memcpy (GST_BUFFER_DATA (outbuf), payload, payload_len);
- return outbuf;
+ gst_buffer_fill (outbuf, 0, payload, payload_len);
+
+ goto done;
}
case 2:
{
slen = payload_len;
outbuf = gst_buffer_new_and_alloc (slen);
- memcpy (GST_BUFFER_DATA (outbuf), payload, slen);
+ gst_buffer_fill (outbuf, 0, payload, slen);
if (!s)
GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT);
- gst_base_rtp_depayload_push (depayload, outbuf);
+ gst_rtp_base_depayload_push (depayload, outbuf);
/* aligned on 32 bit boundary */
slen = GST_ROUND_UP_4 (slen);
{
/* one sample per packet, use adapter to combine based on marker bit. */
outbuf = gst_buffer_new_and_alloc (payload_len);
- memcpy (GST_BUFFER_DATA (outbuf), payload, payload_len);
+ gst_buffer_fill (outbuf, 0, payload, payload_len);
gst_adapter_push (rtpxqtdepay->adapter, outbuf);
GST_DEBUG_OBJECT (rtpxqtdepay,
"gst_rtp_xqt_depay_chain: pushing buffer of size %u", avail);
- return outbuf;
+ goto done;
}
}
}
done:
- return NULL;
+ gst_rtp_buffer_unmap (&rtp);
+ return outbuf;
bad_packet:
{
GST_ELEMENT_WARNING (rtpxqtdepay, STREAM, DECODE,
("Packet did not validate."), (NULL));
- return NULL;
+ goto done;
}
need_resync:
{
GST_DEBUG_OBJECT (rtpxqtdepay, "waiting for marker");
- return NULL;
+ goto done;
}
wrong_version:
{
GST_ELEMENT_WARNING (rtpxqtdepay, STREAM, DECODE,
("Unknown payload version."), (NULL));
- return NULL;
+ goto done;
}
pck_reserved:
{
GST_ELEMENT_WARNING (rtpxqtdepay, STREAM, DECODE,
("PCK reserved 0."), (NULL));
- return NULL;
+ goto done;
}
wrong_length:
{
GST_ELEMENT_WARNING (rtpxqtdepay, STREAM, DECODE,
("Wrong payload length."), (NULL));
- return NULL;
+ goto done;
}
unknown_format:
{
GST_ELEMENT_WARNING (rtpxqtdepay, STREAM, DECODE,
("Unknown payload format."), (NULL));
- return NULL;
+ goto done;
}
}
#include <gst/gst.h>
#include <gst/base/gstadapter.h>
-#include <gst/rtp/gstbasertpdepayload.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
G_BEGIN_DECLS
struct _GstRtpXQTDepay
{
- GstBaseRTPDepayload depayload;
+ GstRTPBaseDepayload depayload;
GstAdapter *adapter;
struct _GstRtpXQTDepayClass
{
- GstBaseRTPDepayloadClass parent_class;
+ GstRTPBaseDepayloadClass parent_class;
};
GType gst_rtp_xqt_depay_get_type (void);
* <refsect2>
* <title>Example launch line</title>
* |[
- * gst-launch filesrc location=test.mov ! qtdemux name=demux demux.audio_00 ! decodebin ! audioconvert ! audioresample ! autoaudiosink demux.video_00 ! queue ! decodebin ! ffmpegcolorspace ! videoscale ! autovideosink
+ * gst-launch filesrc location=test.mov ! qtdemux name=demux demux.audio_0 ! decodebin ! audioconvert ! audioresample ! autoaudiosink demux.video_0 ! queue ! decodebin ! ffmpegcolorspace ! videoscale ! autovideosink
* ]| Play (parse and decode) a .mov file and try to output it to
* an automatically detected soundcard and videosink. If the MOV file contains
* compressed audio or video data, this will only work if you have the
#include "config.h"
#endif
-/* FIXME 0.11: suppress warnings for deprecated API such as GStaticRecMutex
- * with newer GLib versions (>= 2.31.0) */
-#define GLIB_DISABLE_DEPRECATION_WARNINGS
-
#include "gst/gst-i18n-plugin.h"
#include <glib/gprintf.h>
#include <gst/tag/tag.h>
+#include <gst/audio/audio.h>
#include "qtatomparser.h"
#include "qtdemux_types.h"
);
static GstStaticPadTemplate gst_qtdemux_videosrc_template =
-GST_STATIC_PAD_TEMPLATE ("video_%02d",
+GST_STATIC_PAD_TEMPLATE ("video_%u",
GST_PAD_SRC,
GST_PAD_SOMETIMES,
GST_STATIC_CAPS_ANY);
static GstStaticPadTemplate gst_qtdemux_audiosrc_template =
-GST_STATIC_PAD_TEMPLATE ("audio_%02d",
+GST_STATIC_PAD_TEMPLATE ("audio_%u",
GST_PAD_SRC,
GST_PAD_SOMETIMES,
GST_STATIC_CAPS_ANY);
static GstStaticPadTemplate gst_qtdemux_subsrc_template =
-GST_STATIC_PAD_TEMPLATE ("subtitle_%02d",
+GST_STATIC_PAD_TEMPLATE ("subtitle_%u",
GST_PAD_SRC,
GST_PAD_SOMETIMES,
GST_STATIC_CAPS_ANY);
-GST_BOILERPLATE (GstQTDemux, gst_qtdemux, GstQTDemux, GST_TYPE_ELEMENT);
+#define gst_qtdemux_parent_class parent_class
+G_DEFINE_TYPE (GstQTDemux, gst_qtdemux, GST_TYPE_ELEMENT);
static void gst_qtdemux_dispose (GObject * object);
gst_qtdemux_find_index_for_given_media_offset_linear (GstQTDemux * qtdemux,
QtDemuxStream * str, gint64 media_offset);
+#if 0
static void gst_qtdemux_set_index (GstElement * element, GstIndex * index);
static GstIndex *gst_qtdemux_get_index (GstElement * element);
+#endif
static GstStateChangeReturn gst_qtdemux_change_state (GstElement * element,
GstStateChange transition);
-static gboolean qtdemux_sink_activate (GstPad * sinkpad);
-static gboolean qtdemux_sink_activate_pull (GstPad * sinkpad, gboolean active);
-static gboolean qtdemux_sink_activate_push (GstPad * sinkpad, gboolean active);
+static gboolean qtdemux_sink_activate (GstPad * sinkpad, GstObject * parent);
+static gboolean qtdemux_sink_activate_mode (GstPad * sinkpad,
+ GstObject * parent, GstPadMode mode, gboolean active);
static void gst_qtdemux_loop (GstPad * pad);
-static GstFlowReturn gst_qtdemux_chain (GstPad * sinkpad, GstBuffer * inbuf);
-static gboolean gst_qtdemux_handle_sink_event (GstPad * pad, GstEvent * event);
+static GstFlowReturn gst_qtdemux_chain (GstPad * sinkpad, GstObject * parent,
+ GstBuffer * inbuf);
+static gboolean gst_qtdemux_handle_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
static gboolean qtdemux_parse_moov (GstQTDemux * qtdemux,
const guint8 * buffer, guint length);
static GstFlowReturn qtdemux_expose_streams (GstQTDemux * qtdemux);
static void
-gst_qtdemux_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_static_pad_template (element_class,
- &gst_qtdemux_sink_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_qtdemux_videosrc_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_qtdemux_audiosrc_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_qtdemux_subsrc_template);
- gst_element_class_set_details_simple (element_class, "QuickTime demuxer",
- "Codec/Demuxer",
- "Demultiplex a QuickTime file into audio and video streams",
- "David Schleef <ds@schleef.org>, Wim Taymans <wim@fluendo.com>");
-
- GST_DEBUG_CATEGORY_INIT (qtdemux_debug, "qtdemux", 0, "qtdemux plugin");
-}
-
-static void
gst_qtdemux_class_init (GstQTDemuxClass * klass)
{
GObjectClass *gobject_class;
gobject_class->dispose = gst_qtdemux_dispose;
gstelement_class->change_state = GST_DEBUG_FUNCPTR (gst_qtdemux_change_state);
-
+#if 0
gstelement_class->set_index = GST_DEBUG_FUNCPTR (gst_qtdemux_set_index);
gstelement_class->get_index = GST_DEBUG_FUNCPTR (gst_qtdemux_get_index);
+#endif
gst_tag_register_musicbrainz_tags ();
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_qtdemux_sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_qtdemux_videosrc_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_qtdemux_audiosrc_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_qtdemux_subsrc_template));
+ gst_element_class_set_details_simple (gstelement_class, "QuickTime demuxer",
+ "Codec/Demuxer",
+ "Demultiplex a QuickTime file into audio and video streams",
+ "David Schleef <ds@schleef.org>, Wim Taymans <wim@fluendo.com>");
+
+ GST_DEBUG_CATEGORY_INIT (qtdemux_debug, "qtdemux", 0, "qtdemux plugin");
+
}
static void
-gst_qtdemux_init (GstQTDemux * qtdemux, GstQTDemuxClass * klass)
+gst_qtdemux_init (GstQTDemux * qtdemux)
{
qtdemux->sinkpad =
gst_pad_new_from_static_template (&gst_qtdemux_sink_template, "sink");
gst_pad_set_activate_function (qtdemux->sinkpad, qtdemux_sink_activate);
- gst_pad_set_activatepull_function (qtdemux->sinkpad,
- qtdemux_sink_activate_pull);
- gst_pad_set_activatepush_function (qtdemux->sinkpad,
- qtdemux_sink_activate_push);
+ gst_pad_set_activatemode_function (qtdemux->sinkpad,
+ qtdemux_sink_activate_mode);
gst_pad_set_chain_function (qtdemux->sinkpad, gst_qtdemux_chain);
gst_pad_set_event_function (qtdemux->sinkpad, gst_qtdemux_handle_sink_event);
gst_element_add_pad (GST_ELEMENT_CAST (qtdemux), qtdemux->sinkpad);
qtdemux->mdatoffset = GST_CLOCK_TIME_NONE;
qtdemux->mdatbuffer = NULL;
gst_segment_init (&qtdemux->segment, GST_FORMAT_TIME);
+
+ GST_OBJECT_FLAG_SET (qtdemux, GST_ELEMENT_FLAG_INDEXABLE);
}
static void
}
}
+static void
+_gst_buffer_copy_into_mem (GstBuffer * dest, gsize offset, const guint8 * src,
+ gsize size)
+{
+ gsize bsize;
+
+ g_return_if_fail (gst_buffer_is_writable (dest));
+
+ bsize = gst_buffer_get_size (dest);
+ g_return_if_fail (bsize >= offset + size);
+
+ gst_buffer_fill (dest, offset, src, size);
+}
+
+static GstBuffer *
+_gst_buffer_new_wrapped (gpointer mem, gsize size, GFreeFunc free_func)
+{
+ GstBuffer *buf;
+
+ buf = gst_buffer_new ();
+ gst_buffer_take_memory (buf, -1,
+ gst_memory_new_wrapped (free_func ? 0 : GST_MEMORY_FLAG_READONLY,
+ mem, free_func, size, 0, size));
+
+ return buf;
+}
+
static GstFlowReturn
gst_qtdemux_pull_atom (GstQTDemux * qtdemux, guint64 offset, guint64 size,
GstBuffer ** buf)
{
GstFlowReturn flow;
+ GstMapInfo map;
+ gsize bsize;
if (G_UNLIKELY (size == 0)) {
GstFlowReturn ret;
if (ret != GST_FLOW_OK)
return ret;
- size = QT_UINT32 (GST_BUFFER_DATA (tmp));
+ gst_buffer_map (tmp, &map, GST_MAP_READ);
+ size = QT_UINT32 (map.data);
GST_DEBUG_OBJECT (qtdemux, "size 0x%08" G_GINT64_MODIFIER "x", size);
+ gst_buffer_unmap (tmp, &map);
gst_buffer_unref (tmp);
}
* so never mind the rest (e.g. tags) (that much) */
GST_WARNING_OBJECT (qtdemux, "atom has bogus size %" G_GUINT64_FORMAT,
size);
- return GST_FLOW_UNEXPECTED;
+ return GST_FLOW_EOS;
} else {
GST_ELEMENT_ERROR (qtdemux, STREAM, DEMUX,
(_("This file is invalid and cannot be played.")),
if (G_UNLIKELY (flow != GST_FLOW_OK))
return flow;
+ bsize = gst_buffer_get_size (*buf);
/* Catch short reads - we don't want any partial atoms */
- if (G_UNLIKELY (GST_BUFFER_SIZE (*buf) < size)) {
- GST_WARNING_OBJECT (qtdemux, "short read: %u < %" G_GUINT64_FORMAT,
- GST_BUFFER_SIZE (*buf), size);
+ if (G_UNLIKELY (bsize < size)) {
+ GST_WARNING_OBJECT (qtdemux,
+ "short read: %" G_GSIZE_FORMAT " < %" G_GUINT64_FORMAT, bsize, size);
gst_buffer_unref (*buf);
*buf = NULL;
- return GST_FLOW_UNEXPECTED;
+ return GST_FLOW_EOS;
}
return flow;
}
#endif
-static const GstQueryType *
-gst_qtdemux_get_src_query_types (GstPad * pad)
-{
- static const GstQueryType src_types[] = {
- GST_QUERY_POSITION,
- GST_QUERY_DURATION,
- GST_QUERY_CONVERT,
- GST_QUERY_FORMATS,
- GST_QUERY_SEEKING,
- 0
- };
-
- return src_types;
-}
-
static gboolean
gst_qtdemux_get_duration (GstQTDemux * qtdemux, gint64 * duration)
{
}
static gboolean
-gst_qtdemux_handle_src_query (GstPad * pad, GstQuery * query)
+gst_qtdemux_handle_src_query (GstPad * pad, GstObject * parent,
+ GstQuery * query)
{
gboolean res = FALSE;
- GstQTDemux *qtdemux = GST_QTDEMUX (gst_pad_get_parent (pad));
+ GstQTDemux *qtdemux = GST_QTDEMUX (parent);
GST_LOG_OBJECT (pad, "%s query", GST_QUERY_TYPE_NAME (query));
switch (GST_QUERY_TYPE (query)) {
case GST_QUERY_POSITION:
- if (GST_CLOCK_TIME_IS_VALID (qtdemux->segment.last_stop)) {
+ if (GST_CLOCK_TIME_IS_VALID (qtdemux->segment.position)) {
gst_query_set_position (query, GST_FORMAT_TIME,
- qtdemux->segment.last_stop);
+ qtdemux->segment.position);
res = TRUE;
}
break;
break;
}
default:
- res = gst_pad_query_default (pad, query);
+ res = gst_pad_query_default (pad, parent, query);
break;
}
- gst_object_unref (qtdemux);
-
return res;
}
GstSeekType cur_type, gint64 * cur, GstSeekType stop_type, gint64 * stop)
{
gboolean res;
- GstFormat fmt;
g_return_val_if_fail (format != NULL, FALSE);
g_return_val_if_fail (cur != NULL, FALSE);
if (*format == GST_FORMAT_TIME)
return TRUE;
- fmt = GST_FORMAT_TIME;
res = TRUE;
if (cur_type != GST_SEEK_TYPE_NONE)
- res = gst_pad_query_convert (pad, *format, *cur, &fmt, cur);
+ res = gst_pad_query_convert (pad, *format, *cur, GST_FORMAT_TIME, cur);
if (res && stop_type != GST_SEEK_TYPE_NONE)
- res = gst_pad_query_convert (pad, *format, *stop, &fmt, stop);
+ res = gst_pad_query_convert (pad, *format, *stop, GST_FORMAT_TIME, stop);
if (res)
*format = GST_FORMAT_TIME;
gint64 desired_offset;
gint n;
- desired_offset = segment->last_stop;
+ desired_offset = segment->position;
GST_DEBUG_OBJECT (qtdemux, "seeking to %" GST_TIME_FORMAT,
GST_TIME_ARGS (desired_offset));
stream->segment_index = -1;
stream->last_ret = GST_FLOW_OK;
stream->sent_eos = FALSE;
+
+ if (segment->flags & GST_SEEK_FLAG_FLUSH)
+ gst_segment_init (&stream->segment, GST_FORMAT_TIME);
}
- segment->last_stop = desired_offset;
+ segment->position = desired_offset;
segment->time = desired_offset;
/* we stop at the end */
if (event) {
/* configure the segment with the seek variables */
GST_DEBUG_OBJECT (qtdemux, "configuring seek");
- gst_segment_set_seek (&seeksegment, rate, format, flags,
+ gst_segment_do_seek (&seeksegment, rate, format, flags,
cur_type, cur, stop_type, stop, &update);
}
/* prepare for streaming again */
if (flush) {
- gst_pad_push_event (qtdemux->sinkpad, gst_event_new_flush_stop ());
- gst_qtdemux_push_event (qtdemux, gst_event_new_flush_stop ());
- } else if (qtdemux->segment_running) {
- /* we are running the current segment and doing a non-flushing seek,
- * close the segment first based on the last_stop. */
- GST_DEBUG_OBJECT (qtdemux, "closing running segment %" G_GINT64_FORMAT
- " to %" G_GINT64_FORMAT, qtdemux->segment.start,
- qtdemux->segment.last_stop);
-
- if (qtdemux->segment.rate >= 0) {
- /* FIXME, rate is the product of the global rate and the (quicktime)
- * segment rate. */
- qtdemux->pending_newsegment = gst_event_new_new_segment (TRUE,
- qtdemux->segment.rate, qtdemux->segment.format,
- qtdemux->segment.start, qtdemux->segment.last_stop,
- qtdemux->segment.time);
- } else { /* For Reverse Playback */
- guint64 stop;
-
- if ((stop = qtdemux->segment.stop) == -1)
- stop = qtdemux->segment.duration;
- /* for reverse playback, we played from stop to last_stop. */
- qtdemux->pending_newsegment = gst_event_new_new_segment (TRUE,
- qtdemux->segment.rate, qtdemux->segment.format,
- qtdemux->segment.last_stop, stop, qtdemux->segment.last_stop);
- }
+ gst_pad_push_event (qtdemux->sinkpad, gst_event_new_flush_stop (TRUE));
+ gst_qtdemux_push_event (qtdemux, gst_event_new_flush_stop (TRUE));
}
/* commit the new segment */
if (qtdemux->segment.flags & GST_SEEK_FLAG_SEGMENT) {
gst_element_post_message (GST_ELEMENT_CAST (qtdemux),
gst_message_new_segment_start (GST_OBJECT_CAST (qtdemux),
- qtdemux->segment.format, qtdemux->segment.last_stop));
+ qtdemux->segment.format, qtdemux->segment.position));
}
- /* restart streaming, NEWSEGMENT will be sent from the streaming
- * thread. */
- qtdemux->segment_running = TRUE;
+ /* restart streaming, NEWSEGMENT will be sent from the streaming thread. */
for (i = 0; i < qtdemux->n_streams; i++)
qtdemux->streams[i]->last_ret = GST_FLOW_OK;
}
static gboolean
-gst_qtdemux_handle_src_event (GstPad * pad, GstEvent * event)
+gst_qtdemux_handle_src_event (GstPad * pad, GstObject * parent,
+ GstEvent * event)
{
gboolean res = TRUE;
- GstQTDemux *qtdemux = GST_QTDEMUX (gst_pad_get_parent (pad));
+ GstQTDemux *qtdemux = GST_QTDEMUX (parent);
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_SEEK:
gst_event_unref (event);
break;
default:
- res = gst_pad_event_default (pad, event);
+ res = gst_pad_event_default (pad, parent, event);
break;
}
- gst_object_unref (qtdemux);
-
done:
return res;
}
static gboolean
-gst_qtdemux_handle_sink_event (GstPad * sinkpad, GstEvent * event)
+gst_qtdemux_handle_sink_event (GstPad * sinkpad, GstObject * parent,
+ GstEvent * event)
{
- GstQTDemux *demux = GST_QTDEMUX (GST_PAD_PARENT (sinkpad));
+ GstQTDemux *demux = GST_QTDEMUX (parent);
gboolean res;
GST_LOG_OBJECT (demux, "handling %s event", GST_EVENT_TYPE_NAME (event));
switch (GST_EVENT_TYPE (event)) {
- case GST_EVENT_NEWSEGMENT:
+ case GST_EVENT_SEGMENT:
{
- GstFormat format;
- gdouble rate, arate;
- gint64 start, stop, time, offset = 0;
+ gint64 offset = 0;
QtDemuxStream *stream;
gint idx;
- gboolean update;
GstSegment segment;
/* some debug output */
- gst_segment_init (&segment, GST_FORMAT_UNDEFINED);
- gst_event_parse_new_segment_full (event, &update, &rate, &arate, &format,
- &start, &stop, &time);
- gst_segment_set_newsegment_full (&segment, update, rate, arate, format,
- start, stop, time);
- GST_DEBUG_OBJECT (demux,
- "received format %d newsegment %" GST_SEGMENT_FORMAT, format,
+ gst_event_copy_segment (event, &segment);
+ GST_DEBUG_OBJECT (demux, "received newsegment %" GST_SEGMENT_FORMAT,
&segment);
/* chain will send initial newsegment after pads have been added */
}
/* we only expect a BYTE segment, e.g. following a seek */
- if (format == GST_FORMAT_BYTES) {
- if (start > 0) {
+ if (segment.format == GST_FORMAT_BYTES) {
+ if (GST_CLOCK_TIME_IS_VALID (segment.start)) {
gint64 requested_seek_time;
guint64 seek_offset;
- offset = start;
+ offset = segment.start;
GST_OBJECT_LOCK (demux);
requested_seek_time = demux->requested_seek_time;
GST_OBJECT_UNLOCK (demux);
if (offset == seek_offset) {
- start = requested_seek_time;
+ segment.start = requested_seek_time;
} else {
- gst_qtdemux_find_sample (demux, start, TRUE, FALSE, NULL, NULL,
- &start);
- start = MAX (start, 0);
+ gst_qtdemux_find_sample (demux, segment.start, TRUE, FALSE, NULL,
+ NULL, (gint64 *) & segment.start);
+ if ((gint64) segment.start < 0)
+ segment.start = 0;
}
}
- if (stop > 0) {
- gst_qtdemux_find_sample (demux, stop, FALSE, FALSE, NULL, NULL,
- &stop);
+ if (GST_CLOCK_TIME_IS_VALID (segment.stop)) {
+ gst_qtdemux_find_sample (demux, segment.stop, FALSE, FALSE, NULL,
+ NULL, (gint64 *) & segment.stop);
/* keyframe seeking should already arrange for start >= stop,
* but make sure in other rare cases */
- stop = MAX (stop, start);
+ segment.stop = MAX (segment.stop, segment.start);
}
} else {
GST_DEBUG_OBJECT (demux, "unsupported segment format, ignoring");
}
/* accept upstream's notion of segment and distribute along */
- gst_segment_set_newsegment_full (&demux->segment, update, rate, arate,
- GST_FORMAT_TIME, start, stop, start);
- GST_DEBUG_OBJECT (demux, "Pushing newseg update %d, rate %g, "
- "applied rate %g, format %d, start %" GST_TIME_FORMAT ", "
- "stop %" GST_TIME_FORMAT, update, rate, arate, GST_FORMAT_TIME,
- GST_TIME_ARGS (start), GST_TIME_ARGS (stop));
+ segment.time = segment.start;
+ segment.duration = demux->segment.duration;
+ segment.base = gst_segment_to_running_time (&demux->segment,
+ GST_FORMAT_TIME, demux->segment.position);
- gst_qtdemux_push_event (demux,
- gst_event_new_new_segment_full (update, rate, arate, GST_FORMAT_TIME,
- start, stop, start));
+ gst_segment_copy_into (&segment, &demux->segment);
+ GST_DEBUG_OBJECT (demux, "Pushing newseg %" GST_SEGMENT_FORMAT, &segment);
+ gst_qtdemux_push_event (demux, gst_event_new_segment (&segment));
/* clear leftover in current segment, if any */
gst_adapter_clear (demux->adapter);
case GST_EVENT_FLUSH_STOP:
{
gint i;
+ GstClockTime dur;
/* clean up, force EOS if no more info follows */
gst_adapter_clear (demux->adapter);
demux->streams[i]->last_ret = GST_FLOW_OK;
demux->streams[i]->sent_eos = FALSE;
}
+ dur = demux->segment.duration;
+ gst_segment_init (&demux->segment, GST_FORMAT_TIME);
+ demux->segment.duration = dur;
break;
}
case GST_EVENT_EOS:
break;
}
- res = gst_pad_event_default (demux->sinkpad, event);
+ res = gst_pad_event_default (demux->sinkpad, parent, event);
drop:
return res;
}
+#if 0
static void
gst_qtdemux_set_index (GstElement * element, GstIndex * index)
{
return result;
}
+#endif
static void
gst_qtdemux_stbl_free (QtDemuxStream * stream)
if (qtdemux->tag_list)
gst_tag_list_free (qtdemux->tag_list);
qtdemux->tag_list = NULL;
+#if 0
if (qtdemux->element_index)
gst_object_unref (qtdemux->element_index);
qtdemux->element_index = NULL;
+#endif
gst_adapter_clear (qtdemux->adapter);
for (n = 0; n < qtdemux->n_streams; n++) {
gst_qtdemux_stream_free (qtdemux, qtdemux->streams[n]);
GST_DEBUG_OBJECT (qtdemux, "major brand: %" GST_FOURCC_FORMAT,
GST_FOURCC_ARGS (qtdemux->major_brand));
buf = qtdemux->comp_brands = gst_buffer_new_and_alloc (length - 16);
- memcpy (GST_BUFFER_DATA (buf), buffer + 16, GST_BUFFER_SIZE (buf));
+ _gst_buffer_copy_into_mem (buf, 0, buffer + 16, length - 16);
}
}
GstBuffer *buf;
GstTagList *taglist;
- buf = gst_buffer_new ();
- GST_BUFFER_DATA (buf) = (guint8 *) buffer + offset + 16;
- GST_BUFFER_SIZE (buf) = length - offset - 16;
-
+ buf = _gst_buffer_new_wrapped ((guint8 *) buffer + offset + 16,
+ length - offset - 16, NULL);
taglist = gst_tag_list_from_xmp_buffer (buf);
gst_buffer_unref (buf);
gint64 len;
GstFormat fmt = GST_FORMAT_BYTES;
- if (!gst_pad_query_peer_duration (qtdemux->sinkpad, &fmt, &len)) {
+ if (!gst_pad_peer_query_duration (qtdemux->sinkpad, &fmt, &len)) {
GST_DEBUG_OBJECT (qtdemux, "upstream size not available; "
"can not locate mfro");
goto exit;
GstBuffer *buf = NULL;
GstFlowReturn ret = GST_FLOW_OK;
guint64 cur_offset = qtdemux->offset;
+ GstMapInfo map;
ret = gst_pad_pull_range (qtdemux->sinkpad, cur_offset, 16, &buf);
if (G_UNLIKELY (ret != GST_FLOW_OK))
goto beach;
- if (G_LIKELY (GST_BUFFER_SIZE (buf) >= 8))
- extract_initial_length_and_fourcc (GST_BUFFER_DATA (buf),
- GST_BUFFER_SIZE (buf), &length, &fourcc);
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ if (G_LIKELY (map.size >= 8))
+ extract_initial_length_and_fourcc (map.data, map.size, &length, &fourcc);
+ gst_buffer_unmap (buf, &map);
gst_buffer_unref (buf);
/* maybe we already got most we needed, so only consider this eof */
(_("Invalid atom size.")),
("Header atom '%" GST_FOURCC_FORMAT "' has empty length",
GST_FOURCC_ARGS (fourcc)));
- ret = GST_FLOW_UNEXPECTED;
+ ret = GST_FLOW_EOS;
goto beach;
}
ret = gst_pad_pull_range (qtdemux->sinkpad, cur_offset, length, &moov);
if (ret != GST_FLOW_OK)
goto beach;
- if (length != GST_BUFFER_SIZE (moov)) {
+ gst_buffer_map (moov, &map, GST_MAP_READ);
+ if (length != map.size) {
/* Some files have a 'moov' atom at the end of the file which contains
* a terminal 'free' atom where the body of the atom is missing.
* Check for, and permit, this special case.
*/
- if (GST_BUFFER_SIZE (moov) >= 8) {
- guint8 *final_data = GST_BUFFER_DATA (moov) +
- (GST_BUFFER_SIZE (moov) - 8);
+ if (map.size >= 8) {
+ guint8 *final_data = map.data + (map.size - 8);
guint32 final_length = QT_UINT32 (final_data);
guint32 final_fourcc = QT_FOURCC (final_data + 4);
- if (final_fourcc == FOURCC_free &&
- GST_BUFFER_SIZE (moov) + final_length - 8 == length) {
+ gst_buffer_unmap (moov, &map);
+ if (final_fourcc == FOURCC_free
+ && map.size + final_length - 8 == length) {
/* Ok, we've found that special case. Allocate a new buffer with
* that free atom actually present. */
GstBuffer *newmoov = gst_buffer_new_and_alloc (length);
- gst_buffer_copy_metadata (newmoov, moov,
- GST_BUFFER_COPY_FLAGS | GST_BUFFER_COPY_TIMESTAMPS |
- GST_BUFFER_COPY_CAPS);
- memcpy (GST_BUFFER_DATA (newmoov), GST_BUFFER_DATA (moov),
- GST_BUFFER_SIZE (moov));
- memset (GST_BUFFER_DATA (newmoov) + GST_BUFFER_SIZE (moov), 0,
- final_length - 8);
+ gst_buffer_copy_into (newmoov, moov, 0, 0, map.size);
+ gst_buffer_map (newmoov, &map, GST_MAP_WRITE);
+ memset (map.data + length - final_length + 8, 0, final_length - 8);
gst_buffer_unref (moov);
moov = newmoov;
}
}
}
- if (length != GST_BUFFER_SIZE (moov)) {
+ if (length != map.size) {
GST_ELEMENT_ERROR (qtdemux, STREAM, DEMUX,
(_("This file is incomplete and cannot be played.")),
- ("We got less than expected (received %u, wanted %u, offset %"
- G_GUINT64_FORMAT ")",
- GST_BUFFER_SIZE (moov), (guint) length, cur_offset));
+ ("We got less than expected (received %" G_GSIZE_FORMAT
+ ", wanted %u, offset %" G_GUINT64_FORMAT ")", map.size,
+ (guint) length, cur_offset));
+ gst_buffer_unmap (moov, &map);
gst_buffer_unref (moov);
ret = GST_FLOW_ERROR;
goto beach;
}
qtdemux->offset += length;
- qtdemux_parse_moov (qtdemux, GST_BUFFER_DATA (moov), length);
+ qtdemux_parse_moov (qtdemux, map.data, length);
qtdemux_node_dump (qtdemux, qtdemux->moov_node);
qtdemux_parse_tree (qtdemux);
g_node_destroy (qtdemux->moov_node);
+ gst_buffer_unmap (moov, &map);
gst_buffer_unref (moov);
qtdemux->moov_node = NULL;
qtdemux->got_moov = TRUE;
if (ret != GST_FLOW_OK)
goto beach;
qtdemux->offset += length;
- qtdemux_parse_ftyp (qtdemux, GST_BUFFER_DATA (ftyp),
- GST_BUFFER_SIZE (ftyp));
+ gst_buffer_map (ftyp, &map, GST_MAP_READ);
+ qtdemux_parse_ftyp (qtdemux, map.data, map.size);
+ gst_buffer_unmap (ftyp, &map);
gst_buffer_unref (ftyp);
break;
}
if (ret != GST_FLOW_OK)
goto beach;
qtdemux->offset += length;
- qtdemux_parse_uuid (qtdemux, GST_BUFFER_DATA (uuid),
- GST_BUFFER_SIZE (uuid));
+ gst_buffer_map (uuid, &map, GST_MAP_READ);
+ qtdemux_parse_uuid (qtdemux, map.data, map.size);
+ gst_buffer_unmap (uuid, &map);
gst_buffer_unref (uuid);
break;
}
ret = gst_qtdemux_pull_atom (qtdemux, cur_offset, length, &unknown);
if (ret != GST_FLOW_OK)
goto beach;
- GST_MEMDUMP ("Unknown tag", GST_BUFFER_DATA (unknown),
- GST_BUFFER_SIZE (unknown));
+ gst_buffer_map (unknown, &map, GST_MAP_READ);
+ GST_MEMDUMP ("Unknown tag", map.data, map.size);
+ gst_buffer_unmap (unknown, &map);
gst_buffer_unref (unknown);
qtdemux->offset += length;
break;
}
beach:
- if (ret == GST_FLOW_UNEXPECTED && qtdemux->got_moov) {
+ if (ret == GST_FLOW_EOS && qtdemux->got_moov) {
/* digested all data, show what we have */
ret = qtdemux_expose_streams (qtdemux);
QtDemuxStream *str = qtdemux->streams[n];
seg_idx = gst_qtdemux_find_segment (qtdemux, str,
- qtdemux->segment.last_stop);
+ qtdemux->segment.position);
/* segment not found, continue with normal flow */
if (seg_idx == -1)
k_index, GST_TIME_ARGS (k_pos));
/* Set last_stop with the keyframe timestamp we pushed of that stream */
- gst_segment_set_last_stop (&qtdemux->segment, GST_FORMAT_TIME, last_stop);
+ qtdemux->segment.position = last_stop;
GST_DEBUG_OBJECT (qtdemux, "last_stop now is %" GST_TIME_FORMAT,
GST_TIME_ARGS (last_stop));
return GST_FLOW_OK;
eos:
- return GST_FLOW_UNEXPECTED;
+ return GST_FLOW_EOS;
}
/* activate the given segment number @seg_idx of @stream at time @offset.
/* update the segment values used for clipping */
gst_segment_init (&stream->segment, GST_FORMAT_TIME);
- gst_segment_set_newsegment (&stream->segment, FALSE, rate, GST_FORMAT_TIME,
- start, stop, time);
+ /* accumulate previous segments */
+ if (GST_CLOCK_TIME_IS_VALID (stream->segment.stop))
+ stream->segment.base += (stream->segment.stop - stream->segment.start) /
+ ABS (stream->segment.rate);
+ stream->segment.rate = rate;
+ stream->segment.start = start;
+ stream->segment.stop = stop;
+ stream->segment.time = time;
/* now prepare and send the segment */
if (stream->pad) {
- event = gst_event_new_new_segment (FALSE, rate, GST_FORMAT_TIME,
- start, stop, time);
+ event = gst_event_new_segment (&stream->segment);
gst_pad_push_event (stream->pad, event);
/* assume we can send more data now */
stream->last_ret = GST_FLOW_OK;
end_time = stream->segments[stream->n_segments - 1].stop_time;
GST_LOG_OBJECT (demux, "current position: %" GST_TIME_FORMAT
", stream end: %" GST_TIME_FORMAT,
- GST_TIME_ARGS (demux->segment.last_stop), GST_TIME_ARGS (end_time));
- if (end_time + 2 * GST_SECOND < demux->segment.last_stop) {
+ GST_TIME_ARGS (demux->segment.position), GST_TIME_ARGS (end_time));
+ if (end_time + 2 * GST_SECOND < demux->segment.position) {
GST_DEBUG_OBJECT (demux, "sending EOS for stream %s",
GST_PAD_NAME (stream->pad));
stream->sent_eos = TRUE;
}
}
-/* UNEXPECTED and NOT_LINKED need to be combined. This means that we return:
+/* EOS and NOT_LINKED need to be combined. This means that we return:
*
* GST_FLOW_NOT_LINKED: when all pads NOT_LINKED.
- * GST_FLOW_UNEXPECTED: when all pads UNEXPECTED or NOT_LINKED.
+ * GST_FLOW_EOS: when all pads EOS or NOT_LINKED.
*/
static GstFlowReturn
gst_qtdemux_combine_flows (GstQTDemux * demux, QtDemuxStream * stream,
stream->last_ret = ret;
/* any other error that is not-linked or eos can be returned right away */
- if (G_LIKELY (ret != GST_FLOW_UNEXPECTED && ret != GST_FLOW_NOT_LINKED))
+ if (G_LIKELY (ret != GST_FLOW_EOS && ret != GST_FLOW_NOT_LINKED))
goto done;
/* only return NOT_LINKED if all other pads returned NOT_LINKED */
ret = ostream->last_ret;
/* no unexpected or unlinked, return */
- if (G_LIKELY (ret != GST_FLOW_UNEXPECTED && ret != GST_FLOW_NOT_LINKED))
+ if (G_LIKELY (ret != GST_FLOW_EOS && ret != GST_FLOW_NOT_LINKED))
goto done;
/* we check to see if we have at least 1 unexpected or all unlinked */
- unexpected |= (ret == GST_FLOW_UNEXPECTED);
+ unexpected |= (ret == GST_FLOW_EOS);
not_linked &= (ret == GST_FLOW_NOT_LINKED);
}
if (not_linked)
ret = GST_FLOW_NOT_LINKED;
else if (unexpected)
- ret = GST_FLOW_UNEXPECTED;
+ ret = GST_FLOW_EOS;
done:
GST_LOG_OBJECT (demux, "combined flow return: %s", gst_flow_get_name (ret));
return ret;
gst_qtdemux_clip_buffer (GstQTDemux * qtdemux, QtDemuxStream * stream,
GstBuffer * buf)
{
- gint64 start, stop, cstart, cstop, diff;
+ guint64 start, stop, cstart, cstop, diff;
GstClockTime timestamp = GST_CLOCK_TIME_NONE, duration = GST_CLOCK_TIME_NONE;
- guint8 *data;
guint size;
gint num_rate, denom_rate;
gint frame_size;
gboolean clip_data;
+ guint offset;
- data = GST_BUFFER_DATA (buf);
- size = GST_BUFFER_SIZE (buf);
+ size = gst_buffer_get_size (buf);
+ offset = 0;
/* depending on the type, setup the clip parameters */
if (stream->subtype == FOURCC_soun) {
"clipping start to %" GST_TIME_FORMAT " %"
G_GUINT64_FORMAT " bytes", GST_TIME_ARGS (cstart), diff);
- data += diff;
+ offset = diff;
size -= diff;
}
}
}
}
+ gst_buffer_resize (buf, offset, size);
GST_BUFFER_TIMESTAMP (buf) = timestamp;
GST_BUFFER_DURATION (buf) = duration;
- GST_BUFFER_SIZE (buf) = size;
- GST_BUFFER_DATA (buf) = data;
return buf;
gst_qtdemux_process_buffer (GstQTDemux * qtdemux, QtDemuxStream * stream,
GstBuffer * buf)
{
- guint8 *data;
- guint size, nsize = 0;
+ GstMapInfo map;
+ guint nsize = 0;
gchar *str;
- data = GST_BUFFER_DATA (buf);
- size = GST_BUFFER_SIZE (buf);
-
/* not many cases for now */
if (G_UNLIKELY (stream->fourcc == FOURCC_mp4s)) {
/* send a one time dvd clut event */
return buf;
}
- if (G_LIKELY (size >= 2)) {
- nsize = GST_READ_UINT16_BE (data);
- nsize = MIN (nsize, size - 2);
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+
+ if (G_LIKELY (map.size >= 2)) {
+ nsize = GST_READ_UINT16_BE (map.data);
+ nsize = MIN (nsize, map.size - 2);
}
- GST_LOG_OBJECT (qtdemux, "3GPP timed text subtitle: %d/%d", nsize, size);
+ GST_LOG_OBJECT (qtdemux, "3GPP timed text subtitle: %d/%" G_GSIZE_FORMAT "",
+ nsize, map.size);
/* takes care of UTF-8 validation or UTF-16 recognition,
* no other encoding expected */
- str = gst_tag_freeform_string_to_utf8 ((gchar *) data + 2, nsize, NULL);
+ str = gst_tag_freeform_string_to_utf8 ((gchar *) map.data + 2, nsize, NULL);
+ gst_buffer_unmap (buf, &map);
if (str) {
gst_buffer_unref (buf);
- buf = gst_buffer_new ();
- GST_BUFFER_DATA (buf) = GST_BUFFER_MALLOCDATA (buf) = (guint8 *) str;
- GST_BUFFER_SIZE (buf) = strlen (str);
+ buf = _gst_buffer_new_wrapped (str, strlen (str), g_free);
} else {
/* may be 0-size subtitle, which is also sent to keep pipeline going */
- GST_BUFFER_DATA (buf) = data + 2;
- GST_BUFFER_SIZE (buf) = nsize;
+ gst_buffer_resize (buf, 2, nsize);
}
/* FIXME ? convert optional subsequent style info to markup */
if (G_UNLIKELY (stream->fourcc == FOURCC_rtsp)) {
gchar *url;
+ GstMapInfo map;
- url = g_strndup ((gchar *) GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (buf));
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ url = g_strndup ((gchar *) map.data, map.size);
+ gst_buffer_unmap (buf, &map);
if (url != NULL && strlen (url) != 0) {
/* we have RTSP redirect now */
gst_element_post_message (GST_ELEMENT_CAST (qtdemux),
/* position reporting */
if (qtdemux->segment.rate >= 0) {
- gst_segment_set_last_stop (&qtdemux->segment, GST_FORMAT_TIME, position);
+ qtdemux->segment.position = position;
gst_qtdemux_sync_streams (qtdemux);
}
GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT);
stream->discont = FALSE;
}
- gst_buffer_set_caps (buffer, stream->caps);
gst_pad_push (stream->pad, buffer);
}
/* we're going to modify the metadata */
- buf = gst_buffer_make_metadata_writable (buf);
+ buf = gst_buffer_make_writable (buf);
if (G_UNLIKELY (stream->need_process))
buf = gst_qtdemux_process_buffer (qtdemux, stream, buf);
GST_BUFFER_OFFSET_END (buf) = -1;
if (G_UNLIKELY (stream->padding)) {
- GST_BUFFER_DATA (buf) += stream->padding;
- GST_BUFFER_SIZE (buf) -= stream->padding;
+ gst_buffer_resize (buf, stream->padding, -1);
}
-
+#if 0
if (G_UNLIKELY (qtdemux->element_index)) {
GstClockTime stream_time;
GST_FORMAT_BYTES, byte_position, NULL);
}
}
+#endif
if (stream->need_clip)
buf = gst_qtdemux_clip_buffer (qtdemux, stream, buf);
if (!keyframe)
GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
- gst_buffer_set_caps (buf, stream->caps);
-
GST_LOG_OBJECT (qtdemux,
"Pushing buffer with time %" GST_TIME_FORMAT ", duration %"
GST_TIME_FORMAT " on pad %s", GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
goto next;
/* last pushed sample was out of boundary, goto next sample */
- if (G_UNLIKELY (stream->last_ret == GST_FLOW_UNEXPECTED))
+ if (G_UNLIKELY (stream->last_ret == GST_FLOW_EOS))
goto next;
GST_LOG_OBJECT (qtdemux, "reading %d bytes @ %" G_GUINT64_FORMAT, size,
ret = gst_qtdemux_combine_flows (qtdemux, stream, ret);
/* ignore unlinked, we will not push on the pad anymore and we will EOS when
* we have no more data for the pad to push */
- if (ret == GST_FLOW_UNEXPECTED)
+ if (ret == GST_FLOW_EOS)
ret = GST_FLOW_OK;
next:
eos:
{
GST_DEBUG_OBJECT (qtdemux, "No samples left for any streams - EOS");
- ret = GST_FLOW_UNEXPECTED;
+ ret = GST_FLOW_EOS;
goto beach;
}
eos_stream:
break;
case QTDEMUX_STATE_MOVIE:
ret = gst_qtdemux_loop_state_movie (qtdemux);
- if (qtdemux->segment.rate < 0 && ret == GST_FLOW_UNEXPECTED) {
+ if (qtdemux->segment.rate < 0 && ret == GST_FLOW_EOS) {
ret = gst_qtdemux_seek_to_previous_keyframe (qtdemux);
}
break;
{
GST_ELEMENT_ERROR (qtdemux, STREAM, FAILED,
(NULL), ("streaming stopped, invalid state"));
- qtdemux->segment_running = FALSE;
gst_pad_pause_task (pad);
gst_qtdemux_push_event (qtdemux, gst_event_new_eos ());
goto done;
GST_LOG_OBJECT (qtdemux, "pausing task, reason %s", reason);
- qtdemux->segment_running = FALSE;
gst_pad_pause_task (pad);
/* fatal errors need special actions */
/* check EOS */
- if (ret == GST_FLOW_UNEXPECTED) {
+ if (ret == GST_FLOW_EOS) {
if (qtdemux->n_streams == 0) {
/* we have no streams, post an error */
gst_qtdemux_post_no_playable_stream_error (qtdemux);
if (qtdemux->segment.flags & GST_SEEK_FLAG_SEGMENT) {
gint64 stop;
- /* FIXME: I am not sure this is the right fix. If the sinks are
- * supposed to detect the segment is complete and accumulate
- * automatically, it does not seem to work here. Need more work */
- qtdemux->segment_running = TRUE;
-
if ((stop = qtdemux->segment.stop) == -1)
stop = qtdemux->segment.duration;
GST_LOG_OBJECT (qtdemux, "Sending EOS at end of segment");
gst_qtdemux_push_event (qtdemux, gst_event_new_eos ());
}
- } else if (ret == GST_FLOW_NOT_LINKED || ret < GST_FLOW_UNEXPECTED) {
+ } else if (ret == GST_FLOW_NOT_LINKED || ret < GST_FLOW_EOS) {
GST_ELEMENT_ERROR (qtdemux, STREAM, FAILED,
(NULL), ("streaming stopped, reason %s", reason));
gst_qtdemux_push_event (qtdemux, gst_event_new_eos ());
/* try harder to query upstream size if we didn't get it the first time */
if (seekable && stop == -1) {
- GstFormat fmt = GST_FORMAT_BYTES;
-
GST_DEBUG_OBJECT (demux, "doing duration query to fix up unset stop");
- gst_pad_query_peer_duration (demux->sinkpad, &fmt, &stop);
+ gst_pad_peer_query_duration (demux->sinkpad, GST_FORMAT_BYTES, &stop);
}
/* if upstream doesn't know the size, it's likely that it's not seekable in
/* FIXME, unverified after edit list updates */
static GstFlowReturn
-gst_qtdemux_chain (GstPad * sinkpad, GstBuffer * inbuf)
+gst_qtdemux_chain (GstPad * sinkpad, GstObject * parent, GstBuffer * inbuf)
{
GstQTDemux *demux;
GstFlowReturn ret = GST_FLOW_OK;
- demux = GST_QTDEMUX (gst_pad_get_parent (sinkpad));
+ demux = GST_QTDEMUX (parent);
gst_adapter_push (demux->adapter, inbuf);
if (demux->neededbytes == -1)
goto eos;
- GST_DEBUG_OBJECT (demux, "pushing in inbuf %p, neededbytes:%u, available:%u",
- inbuf, demux->neededbytes, gst_adapter_available (demux->adapter));
+ GST_DEBUG_OBJECT (demux,
+ "pushing in inbuf %p, neededbytes:%u, available:%" G_GSIZE_FORMAT, inbuf,
+ demux->neededbytes, gst_adapter_available (demux->adapter));
while (((gst_adapter_available (demux->adapter)) >= demux->neededbytes) &&
(ret == GST_FLOW_OK)) {
gst_qtdemux_check_seekability (demux);
- data = gst_adapter_peek (demux->adapter, demux->neededbytes);
+ data = gst_adapter_map (demux->adapter, demux->neededbytes);
/* get fourcc/length, set neededbytes */
extract_initial_length_and_fourcc ((guint8 *) data, demux->neededbytes,
&size, &fourcc);
+ gst_adapter_unmap (demux->adapter);
+ data = NULL;
GST_DEBUG_OBJECT (demux, "Peeking found [%" GST_FOURCC_FORMAT "] "
"size: %" G_GUINT64_FORMAT, GST_FOURCC_ARGS (fourcc), size);
if (size == 0) {
/* there may be multiple mdat (or alike) buffers */
/* sanity check */
if (demux->mdatbuffer)
- bs = GST_BUFFER_SIZE (demux->mdatbuffer);
+ bs = gst_buffer_get_size (demux->mdatbuffer);
else
bs = 0;
if (size + bs > 10 * (1 << 20))
GST_DEBUG_OBJECT (demux, "In header");
- data = gst_adapter_peek (demux->adapter, demux->neededbytes);
+ data = gst_adapter_map (demux->adapter, demux->neededbytes);
/* parse the header */
extract_initial_length_and_fourcc (data, demux->neededbytes, NULL,
demux->got_moov = TRUE;
/* prepare newsegment to send when streaming actually starts */
- if (!demux->pending_newsegment) {
- demux->pending_newsegment =
- gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME,
- 0, GST_CLOCK_TIME_NONE, 0);
- }
+ if (!demux->pending_newsegment)
+ demux->pending_newsegment = gst_event_new_segment (&demux->segment);
qtdemux_parse_moov (demux, data, demux->neededbytes);
qtdemux_node_dump (demux, demux->moov_node);
GST_DEBUG_OBJECT (demux, "Parsing [moof]");
if (!qtdemux_parse_moof (demux, data, demux->neededbytes,
demux->offset, NULL)) {
+ gst_adapter_unmap (demux->adapter);
ret = GST_FLOW_ERROR;
goto done;
}
GST_FOURCC_ARGS (fourcc));
/* Let's jump that one and go back to initial state */
}
+ gst_adapter_unmap (demux->adapter);
+ data = NULL;
if (demux->mdatbuffer && demux->n_streams) {
/* the mdat was before the header */
}
case QTDEMUX_STATE_BUFFER_MDAT:{
GstBuffer *buf;
+ guint8 fourcc[4];
GST_DEBUG_OBJECT (demux, "Got our buffer at offset %" G_GUINT64_FORMAT,
demux->offset);
buf = gst_adapter_take_buffer (demux->adapter, demux->neededbytes);
+ gst_buffer_extract (buf, 0, fourcc, 4);
GST_DEBUG_OBJECT (demux, "mdatbuffer starts with %" GST_FOURCC_FORMAT,
- GST_FOURCC_ARGS (QT_FOURCC (GST_BUFFER_DATA (buf) + 4)));
+ GST_FOURCC_ARGS (QT_FOURCC (fourcc)));
if (demux->mdatbuffer)
demux->mdatbuffer = gst_buffer_join (demux->mdatbuffer, buf);
else
demux->neededbytes);
}
done:
- gst_object_unref (demux);
return ret;
eos:
{
GST_DEBUG_OBJECT (demux, "no next entry, EOS");
- ret = GST_FLOW_UNEXPECTED;
+ ret = GST_FLOW_EOS;
goto done;
}
invalid_state:
}
static gboolean
-qtdemux_sink_activate (GstPad * sinkpad)
+qtdemux_sink_activate (GstPad * sinkpad, GstObject * parent)
{
- if (gst_pad_check_pull_range (sinkpad))
- return gst_pad_activate_pull (sinkpad, TRUE);
- else
- return gst_pad_activate_push (sinkpad, TRUE);
-}
+ GstQuery *query;
+ gboolean pull_mode;
-static gboolean
-qtdemux_sink_activate_pull (GstPad * sinkpad, gboolean active)
-{
- GstQTDemux *demux = GST_QTDEMUX (GST_PAD_PARENT (sinkpad));
+ query = gst_query_new_scheduling ();
- if (active) {
- demux->pullbased = TRUE;
- demux->segment_running = TRUE;
- return gst_pad_start_task (sinkpad, (GstTaskFunction) gst_qtdemux_loop,
- sinkpad);
- } else {
- demux->segment_running = FALSE;
- return gst_pad_stop_task (sinkpad);
+ if (!gst_pad_peer_query (sinkpad, query)) {
+ gst_query_unref (query);
+ goto activate_push;
+ }
+
+ pull_mode = gst_query_has_scheduling_mode (query, GST_PAD_MODE_PULL);
+ gst_query_unref (query);
+
+ if (!pull_mode)
+ goto activate_push;
+
+ GST_DEBUG_OBJECT (sinkpad, "activating pull");
+ return gst_pad_activate_mode (sinkpad, GST_PAD_MODE_PULL, TRUE);
+
+activate_push:
+ {
+ GST_DEBUG_OBJECT (sinkpad, "activating push");
+ return gst_pad_activate_mode (sinkpad, GST_PAD_MODE_PUSH, TRUE);
}
}
static gboolean
-qtdemux_sink_activate_push (GstPad * sinkpad, gboolean active)
+qtdemux_sink_activate_mode (GstPad * sinkpad, GstObject * parent,
+ GstPadMode mode, gboolean active)
{
- GstQTDemux *demux = GST_QTDEMUX (GST_PAD_PARENT (sinkpad));
-
- demux->pullbased = FALSE;
+ gboolean res;
+ GstQTDemux *demux = GST_QTDEMUX (parent);
- return TRUE;
+ switch (mode) {
+ case GST_PAD_MODE_PUSH:
+ demux->pullbased = FALSE;
+ res = TRUE;
+ break;
+ case GST_PAD_MODE_PULL:
+ if (active) {
+ demux->pullbased = TRUE;
+ res = gst_pad_start_task (sinkpad, (GstTaskFunction) gst_qtdemux_loop,
+ sinkpad);
+ } else {
+ res = gst_pad_stop_task (sinkpad);
+ }
+ break;
+ default:
+ res = FALSE;
+ break;
+ }
+ return res;
}
#ifdef HAVE_ZLIB
switch (type) {
case FOURCC_tCtH:
buffer = gst_buffer_new_and_alloc (size);
- memcpy (GST_BUFFER_DATA (buffer), buf, size);
+ _gst_buffer_copy_into_mem (buffer, 0, buf, size);
stream->buffers = g_slist_append (stream->buffers, buffer);
GST_LOG_OBJECT (qtdemux, "parsing theora header");
break;
case FOURCC_tCt_:
buffer = gst_buffer_new_and_alloc (size);
- memcpy (GST_BUFFER_DATA (buffer), buf, size);
+ _gst_buffer_copy_into_mem (buffer, 0, buf, size);
stream->buffers = g_slist_append (stream->buffers, buffer);
GST_LOG_OBJECT (qtdemux, "parsing theora comment");
break;
case FOURCC_tCtC:
buffer = gst_buffer_new_and_alloc (size);
- memcpy (GST_BUFFER_DATA (buffer), buf, size);
+ _gst_buffer_copy_into_mem (buffer, 0, buf, size);
stream->buffers = g_slist_append (stream->buffers, buffer);
GST_LOG_OBJECT (qtdemux, "parsing theora codebook");
break;
{
/* consistent default for push based mode */
gst_segment_init (&stream->segment, GST_FORMAT_TIME);
- gst_segment_set_newsegment (&stream->segment, FALSE, 1.0, GST_FORMAT_TIME,
- 0, GST_CLOCK_TIME_NONE, 0);
if (stream->subtype == FOURCC_vide) {
- gchar *name = g_strdup_printf ("video_%02d", qtdemux->n_video_streams);
+ gchar *name = g_strdup_printf ("video_%u", qtdemux->n_video_streams);
stream->pad =
gst_pad_new_from_static_template (&gst_qtdemux_videosrc_template, name);
/* make sure it's not writable. We leave MALLOCDATA to NULL so that we
* don't free any of the buffer data. */
- palette = gst_buffer_new ();
- GST_BUFFER_FLAG_SET (palette, GST_BUFFER_FLAG_READONLY);
- GST_BUFFER_DATA (palette) = (guint8 *) palette_data;
- GST_BUFFER_SIZE (palette) = sizeof (guint32) * palette_count;
+ palette = _gst_buffer_new_wrapped ((gpointer) palette_data,
+ palette_count, NULL);
gst_caps_set_simple (stream->caps, "palette_data",
GST_TYPE_BUFFER, palette, NULL);
}
qtdemux->n_video_streams++;
} else if (stream->subtype == FOURCC_soun) {
- gchar *name = g_strdup_printf ("audio_%02d", qtdemux->n_audio_streams);
+ gchar *name = g_strdup_printf ("audio_%u", qtdemux->n_audio_streams);
stream->pad =
gst_pad_new_from_static_template (&gst_qtdemux_audiosrc_template, name);
g_free (name);
if (stream->caps) {
+ /* FIXME: Need to set channel-mask here and maybe reorder */
gst_caps_set_simple (stream->caps,
"rate", G_TYPE_INT, (int) stream->rate,
"channels", G_TYPE_INT, stream->n_channels, NULL);
} else if (stream->subtype == FOURCC_strm) {
GST_DEBUG_OBJECT (qtdemux, "stream type, not creating pad");
} else if (stream->subtype == FOURCC_subp || stream->subtype == FOURCC_text) {
- gchar *name = g_strdup_printf ("subtitle_%02d", qtdemux->n_sub_streams);
+ gchar *name = g_strdup_printf ("subtitle_%u", qtdemux->n_sub_streams);
stream->pad =
gst_pad_new_from_static_template (&gst_qtdemux_subsrc_template, name);
gst_pad_use_fixed_caps (stream->pad);
gst_pad_set_event_function (stream->pad, gst_qtdemux_handle_src_event);
- gst_pad_set_query_type_function (stream->pad,
- gst_qtdemux_get_src_query_types);
gst_pad_set_query_function (stream->pad, gst_qtdemux_handle_src_query);
+ gst_pad_set_active (stream->pad, TRUE);
GST_DEBUG_OBJECT (qtdemux, "setting caps %" GST_PTR_FORMAT, stream->caps);
gst_pad_set_caps (stream->pad, stream->caps);
GST_DEBUG_OBJECT (qtdemux, "adding pad %s %p to qtdemux %p",
GST_OBJECT_NAME (stream->pad), stream->pad, qtdemux);
- gst_pad_set_active (stream->pad, TRUE);
gst_element_add_pad (GST_ELEMENT_CAST (qtdemux), stream->pad);
+
if (stream->pending_tags)
gst_tag_list_free (stream->pending_tags);
stream->pending_tags = list;
- if (list) {
- /* post now, send event on pad later */
- GST_DEBUG_OBJECT (qtdemux, "Posting tags %" GST_PTR_FORMAT, list);
- gst_element_post_message (GST_ELEMENT (qtdemux),
- gst_message_new_tag_full (GST_OBJECT (qtdemux), stream->pad,
- gst_tag_list_copy (list)));
- }
/* global tags go on each pad anyway */
stream->send_global_tags = TRUE;
}
G_GUINT64_FORMAT, GST_FOURCC_ARGS (fourcc), *offset);
while (TRUE) {
+ GstMapInfo map;
+
ret = gst_pad_pull_range (qtdemux->sinkpad, *offset, 16, &buf);
if (G_UNLIKELY (ret != GST_FLOW_OK))
goto locate_failed;
- if (G_LIKELY (GST_BUFFER_SIZE (buf) != 16)) {
+ if (G_UNLIKELY (gst_buffer_get_size (buf) != 16)) {
/* likely EOF */
- ret = GST_FLOW_UNEXPECTED;
+ ret = GST_FLOW_EOS;
gst_buffer_unref (buf);
goto locate_failed;
}
- extract_initial_length_and_fourcc (GST_BUFFER_DATA (buf), 16, length,
- &lfourcc);
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ extract_initial_length_and_fourcc (map.data, 16, length, &lfourcc);
+ gst_buffer_unmap (buf, &map);
gst_buffer_unref (buf);
if (G_UNLIKELY (*length == 0)) {
GstBuffer *buf = NULL;
GstFlowReturn ret = GST_FLOW_OK;
GstFlowReturn res = GST_FLOW_OK;
+ GstMapInfo map;
offset = qtdemux->moof_offset;
GST_DEBUG_OBJECT (qtdemux, "next moof at offset %" G_GUINT64_FORMAT, offset);
if (!offset) {
GST_DEBUG_OBJECT (qtdemux, "no next moof");
- return GST_FLOW_UNEXPECTED;
+ return GST_FLOW_EOS;
}
/* best not do pull etc with lock held */
ret = gst_qtdemux_pull_atom (qtdemux, offset, length, &buf);
if (G_UNLIKELY (ret != GST_FLOW_OK))
goto flow_failed;
- if (!qtdemux_parse_moof (qtdemux, GST_BUFFER_DATA (buf),
- GST_BUFFER_SIZE (buf), offset, NULL)) {
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ if (!qtdemux_parse_moof (qtdemux, map.data, map.size, offset, NULL)) {
+ gst_buffer_unmap (buf, &map);
gst_buffer_unref (buf);
buf = NULL;
goto parse_failed;
}
+ gst_buffer_unmap (buf, &map);
gst_buffer_unref (buf);
buf = NULL;
flow_failed:
{
/* maybe upstream temporarily flushing */
- if (ret != GST_FLOW_WRONG_STATE) {
+ if (ret != GST_FLOW_FLUSHING) {
GST_DEBUG_OBJECT (qtdemux, "no next moof");
offset = 0;
} else {
seqh_size = QT_UINT32 (data + 4);
if (seqh_size > 0) {
_seqh = gst_buffer_new_and_alloc (seqh_size);
- memcpy (GST_BUFFER_DATA (_seqh), data + 8, seqh_size);
+ _gst_buffer_copy_into_mem (_seqh, 0, data + 8, seqh_size);
}
}
}
static const guint wb_bitrates[] = {
6600, 8850, 12650, 14250, 15850, 18250, 19850, 23050, 23850
};
- const guint8 *data = GST_BUFFER_DATA (buf);
- guint size = QT_UINT32 (data), max_mode;
+ GstMapInfo map;
+ gsize max_mode;
guint16 mode_set;
- if (GST_BUFFER_SIZE (buf) != 0x11) {
- GST_DEBUG ("Atom should have size 0x11, not %u", size);
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+
+ if (map.size != 0x11) {
+ GST_DEBUG ("Atom should have size 0x11, not %" G_GSIZE_FORMAT, map.size);
goto bad_data;
}
- if (QT_FOURCC (data + 4) != GST_MAKE_FOURCC ('d', 'a', 'm', 'r')) {
+ if (QT_FOURCC (map.data + 4) != GST_MAKE_FOURCC ('d', 'a', 'm', 'r')) {
GST_DEBUG ("Unknown atom in %" GST_FOURCC_FORMAT,
- GST_FOURCC_ARGS (QT_UINT32 (data + 4)));
+ GST_FOURCC_ARGS (QT_UINT32 (map.data + 4)));
goto bad_data;
}
- mode_set = QT_UINT16 (data + 13);
+ mode_set = QT_UINT16 (map.data + 13);
if (mode_set == (wb ? AMR_WB_ALL_MODES : AMR_NB_ALL_MODES))
max_mode = 7 + (wb ? 1 : 0);
goto bad_data;
}
+ gst_buffer_unmap (buf, &map);
return wb ? wb_bitrates[max_mode] : nb_bitrates[max_mode];
bad_data:
+ gst_buffer_unmap (buf, &map);
return 0;
}
stream->caps =
qtdemux_video_caps (qtdemux, stream, fourcc, stsd_data, &codec);
if (codec) {
- list = gst_tag_list_new ();
+ list = gst_tag_list_new_empty ();
gst_tag_list_add (list, GST_TAG_MERGE_REPLACE,
GST_TAG_VIDEO_CODEC, codec, NULL);
g_free (codec);
avc_data + 8 + 1, size - 1);
buf = gst_buffer_new_and_alloc (size);
- memcpy (GST_BUFFER_DATA (buf), avc_data + 0x8, size);
+ _gst_buffer_copy_into_mem (buf, 0, avc_data + 0x8, size);
gst_caps_set_simple (stream->caps,
"codec_data", GST_TYPE_BUFFER, buf, NULL);
gst_buffer_unref (buf);
}
if (!list)
- list = gst_tag_list_new ();
+ list = gst_tag_list_new_empty ();
if (max_bitrate > 0 && max_bitrate < G_MAXUINT32) {
gst_tag_list_add (list, GST_TAG_MERGE_REPLACE,
if (len > 0x8) {
len -= 0x8;
buf = gst_buffer_new_and_alloc (len);
- memcpy (GST_BUFFER_DATA (buf), data + 8, len);
+ _gst_buffer_copy_into_mem (buf, 0, data + 8, len);
gst_caps_set_simple (stream->caps,
"codec_data", GST_TYPE_BUFFER, buf, NULL);
gst_buffer_unref (buf);
/* see annex I of the jpeg2000 spec */
GNode *jp2h, *ihdr, *colr, *mjp2, *field, *prefix, *cmap, *cdef;
const guint8 *data;
- guint32 fourcc = 0;
+ const gchar *colorspace = NULL;
gint ncomp = 0;
guint32 ncomp_map = 0;
gint32 *comp_map = NULL;
if (QT_UINT8 ((guint8 *) colr->data + 8) == 1) {
switch (QT_UINT32 ((guint8 *) colr->data + 11)) {
case 16:
- fourcc = GST_MAKE_FOURCC ('s', 'R', 'G', 'B');
+ colorspace = "sRGB";
break;
case 17:
- fourcc = GST_MAKE_FOURCC ('G', 'R', 'A', 'Y');
+ colorspace = "GRAY";
break;
case 18:
- fourcc = GST_MAKE_FOURCC ('s', 'Y', 'U', 'V');
+ colorspace = "sYUV";
break;
default:
+ colorspace = NULL;
break;
}
}
- if (!fourcc)
+ if (!colorspace)
/* colr is required, and only values 16, 17, and 18 are specified,
- so error if we have no fourcc */
+ so error if we have no colorspace */
break;
/* extract component mapping */
gst_caps_set_simple (stream->caps,
"num-components", G_TYPE_INT, ncomp, NULL);
gst_caps_set_simple (stream->caps,
- "fourcc", GST_TYPE_FOURCC, fourcc, NULL);
+ "colorspace", G_TYPE_STRING, colorspace, NULL);
if (comp_map) {
GValue arr = { 0, };
if (len > 0x8) {
len -= 0x8;
buf = gst_buffer_new_and_alloc (len);
- memcpy (GST_BUFFER_DATA (buf), data + 8, len);
+ _gst_buffer_copy_into_mem (buf, 0, data + 8, len);
gst_caps_set_simple (stream->caps,
"codec_data", GST_TYPE_BUFFER, buf, NULL);
gst_buffer_unref (buf);
GST_DEBUG_OBJECT (qtdemux, "found codec_data in stsd");
buf = gst_buffer_new_and_alloc (len);
- memcpy (GST_BUFFER_DATA (buf), stsd_data, len);
+ _gst_buffer_copy_into_mem (buf, 0, stsd_data, len);
gst_caps_set_simple (stream->caps,
"codec_data", GST_TYPE_BUFFER, buf, NULL);
gst_buffer_unref (buf);
case FOURCC_ovc1:
{
GNode *ovc1;
- gchar *ovc1_data;
+ guint8 *ovc1_data;
guint ovc1_len;
GstBuffer *buf;
break;
}
buf = gst_buffer_new_and_alloc (ovc1_len - 198);
- memcpy (GST_BUFFER_DATA (buf), ovc1_data + 198, ovc1_len - 198);
+ _gst_buffer_copy_into_mem (buf, 0, ovc1_data + 198, ovc1_len - 198);
gst_caps_set_simple (stream->caps,
"codec_data", GST_TYPE_BUFFER, buf, NULL);
gst_buffer_unref (buf);
}
if (enda) {
gst_caps_set_simple (stream->caps,
- "endianness", G_TYPE_INT, G_LITTLE_ENDIAN, NULL);
+ "format", G_TYPE_STRING, "S24_3LE", NULL);
}
break;
}
case FOURCC_owma:
{
GNode *owma;
- const gchar *owma_data, *codec_name = NULL;
+ const guint8 *owma_data;
+ const gchar *codec_name = NULL;
guint owma_len;
GstBuffer *buf;
gint version = 1;
}
wfex = (WAVEFORMATEX *) (owma_data + 36);
buf = gst_buffer_new_and_alloc (owma_len - 54);
- memcpy (GST_BUFFER_DATA (buf), owma_data + 54, owma_len - 54);
+ _gst_buffer_copy_into_mem (buf, 0, owma_data + 54, owma_len - 54);
if (wfex->wFormatTag == 0x0161) {
codec_name = "Windows Media Audio";
version = 2;
GstStructure *s;
gint bitrate = 0;
- list = gst_tag_list_new ();
+ list = gst_tag_list_new_empty ();
gst_tag_list_add (list, GST_TAG_MERGE_REPLACE,
GST_TAG_AUDIO_CODEC, codec, NULL);
g_free (codec);
waveheader += 8;
headerlen -= 8;
- headerbuf = gst_buffer_new ();
- GST_BUFFER_DATA (headerbuf) = (guint8 *) waveheader;
- GST_BUFFER_SIZE (headerbuf) = headerlen;
+ headerbuf = gst_buffer_new_and_alloc (headerlen);
+ _gst_buffer_copy_into_mem (headerbuf, 0, waveheader, headerlen);
if (gst_riff_parse_strf_auds (GST_ELEMENT_CAST (qtdemux),
headerbuf, &header, &extra)) {
gst_caps_unref (stream->caps);
+ /* FIXME: Need to do something with the channel reorder map */
stream->caps = gst_riff_create_audio_caps (header->format, NULL,
- header, extra, NULL, NULL);
+ header, extra, NULL, NULL, NULL);
if (extra)
gst_buffer_unref (extra);
if (len > 0x4C) {
GstBuffer *buf = gst_buffer_new_and_alloc (len - 0x4C);
- memcpy (GST_BUFFER_DATA (buf), stsd_data + 0x4C, len - 0x4C);
+ _gst_buffer_copy_into_mem (buf, 0, stsd_data + 0x4C, len - 0x4C);
gst_caps_set_simple (stream->caps,
"codec_data", GST_TYPE_BUFFER, buf, NULL);
gst_buffer_unref (buf);
/* codec-data contains alac atom size and prefix,
* ffmpeg likes it that way, not quite gst-ish though ...*/
buf = gst_buffer_new_and_alloc (len);
- memcpy (GST_BUFFER_DATA (buf), alac->data, len);
+ _gst_buffer_copy_into_mem (buf, 0, alac->data, len);
gst_caps_set_simple (stream->caps,
"codec_data", GST_TYPE_BUFFER, buf, NULL);
gst_buffer_unref (buf);
GstBuffer *buf = gst_buffer_new_and_alloc (len - 0x34);
guint bitrate;
- memcpy (GST_BUFFER_DATA (buf), stsd_data + 0x34, len - 0x34);
+ _gst_buffer_copy_into_mem (buf, 0, stsd_data + 0x34, len - 0x34);
/* If we have enough data, let's try to get the 'damr' atom. See
* the 3GPP container spec (26.244) for more details. */
if ((len - 0x34) > 8 &&
(bitrate = qtdemux_parse_amr_bitrate (buf, amrwb))) {
if (!list)
- list = gst_tag_list_new ();
+ list = gst_tag_list_new_empty ();
gst_tag_list_add (list, GST_TAG_MERGE_REPLACE,
GST_TAG_MAXIMUM_BITRATE, bitrate, NULL);
}
stream->caps =
qtdemux_sub_caps (qtdemux, stream, fourcc, stsd_data, &codec);
if (codec) {
- list = gst_tag_list_new ();
+ list = gst_tag_list_new_empty ();
gst_tag_list_add (list, GST_TAG_MERGE_REPLACE,
GST_TAG_SUBTITLE_CODEC, codec, NULL);
g_free (codec);
const gchar *lang_code;
if (!list)
- list = gst_tag_list_new ();
+ list = gst_tag_list_new_empty ();
/* convert ISO 639-2 code to ISO 639-1 */
lang_code = gst_tag_get_language_code (stream->lang_id);
static void
gst_qtdemux_guess_bitrate (GstQTDemux * qtdemux)
{
- GstFormat format = GST_FORMAT_BYTES;
QtDemuxStream *stream = NULL;
gint64 size, duration, sys_bitrate, sum_bitrate = 0;
gint i;
GST_DEBUG_OBJECT (qtdemux, "Looking for streams with unknown bitrate");
- if (!gst_pad_query_peer_duration (qtdemux->sinkpad, &format, &size) ||
- format != GST_FORMAT_BYTES) {
+ if (!gst_pad_peer_query_duration (qtdemux->sinkpad, GST_FORMAT_BYTES, &size)) {
GST_DEBUG_OBJECT (qtdemux,
"Size in bytes of the stream not known - bailing");
return;
", Stream bitrate = %u", sys_bitrate, bitrate);
if (!stream->pending_tags)
- stream->pending_tags = gst_tag_list_new ();
+ stream->pending_tags = gst_tag_list_new_empty ();
gst_tag_list_add (stream->pending_tags, GST_TAG_MERGE_REPLACE,
GST_TAG_BITRATE, bitrate, NULL);
return ((qtdemux->major_brand & GST_MAKE_FOURCC (255, 255, 0, 0)) ==
GST_MAKE_FOURCC ('3', 'g', 0, 0));
} else if (qtdemux->comp_brands != NULL) {
- guint8 *data = GST_BUFFER_DATA (qtdemux->comp_brands);
- guint size = GST_BUFFER_SIZE (qtdemux->comp_brands);
+ GstMapInfo map;
+ guint8 *data;
+ gsize size;
gboolean res = FALSE;
+ gst_buffer_map (qtdemux->comp_brands, &map, GST_MAP_READ);
+ data = map.data;
+ size = map.size;
while (size >= 4) {
res = res || ((QT_FOURCC (data) & GST_MAKE_FOURCC (255, 255, 0, 0)) ==
GST_MAKE_FOURCC ('3', 'g', 0, 0));
data += 4;
size -= 4;
}
+ gst_buffer_unmap (qtdemux->comp_brands, &map);
return res;
} else {
return FALSE;
GNode *data;
int len;
int type;
- GstBuffer *buf;
+ GstSample *sample;
data = qtdemux_tree_get_child_by_type (node, FOURCC_data);
if (data) {
type = QT_UINT32 ((guint8 *) data->data + 8);
GST_DEBUG_OBJECT (qtdemux, "have covr tag, type=%d,len=%d", type, len);
if ((type == 0x0000000d || type == 0x0000000e) && len > 16) {
- if ((buf = gst_tag_image_data_to_image_buffer ((guint8 *) data->data + 16,
+ if ((sample =
+ gst_tag_image_data_to_image_sample ((guint8 *) data->data + 16,
len - 16, GST_TAG_IMAGE_TYPE_NONE))) {
GST_DEBUG_OBJECT (qtdemux, "adding tag size %d", len - 16);
gst_tag_list_add (qtdemux->tag_list, GST_TAG_MERGE_REPLACE,
- tag1, buf, NULL);
- gst_buffer_unref (buf);
+ tag1, sample, NULL);
+ gst_sample_unref (sample);
}
}
}
if (len < 12 + 2)
return;
- buf = gst_buffer_new ();
- GST_BUFFER_DATA (buf) = data + 14;
- GST_BUFFER_SIZE (buf) = len - 14;
+ buf = gst_buffer_new_allocate (NULL, len - 14, 0);
+ gst_buffer_fill (buf, 0, data + 14, len - 14);
taglist = gst_tag_list_from_id3v2_tag (buf);
if (taglist) {
data = node->data;
len = QT_UINT32 (data);
buf = gst_buffer_new_and_alloc (len);
- memcpy (GST_BUFFER_DATA (buf), data, len);
+ _gst_buffer_copy_into_mem (buf, 0, data, len);
/* heuristic to determine style of tag */
if (QT_FOURCC (data + 4) == FOURCC_____ ||
GST_DEBUG_OBJECT (demux, "media type %s", media_type);
caps = gst_caps_new_simple (media_type, "style", G_TYPE_STRING, style, NULL);
- gst_buffer_set_caps (buf, caps);
+ // TODO conver to metadata or ???
+// gst_buffer_set_caps (buf, caps);
gst_caps_unref (caps);
g_free (media_type);
GST_DEBUG_OBJECT (demux, "adding private tag; size %d, caps %" GST_PTR_FORMAT,
- GST_BUFFER_SIZE (buf), caps);
+ len, caps);
gst_tag_list_add (demux->tag_list, GST_TAG_MERGE_APPEND,
GST_QT_DEMUX_PRIVATE_TAG, buf, NULL);
GST_DEBUG_OBJECT (qtdemux, "new tag list");
if (!qtdemux->tag_list)
- qtdemux->tag_list = gst_tag_list_new ();
+ qtdemux->tag_list = gst_tag_list_new_empty ();
i = 0;
while (i < G_N_ELEMENTS (add_funcs)) {
GstBuffer *buf;
GstTagList *taglist;
- buf = gst_buffer_new ();
- GST_BUFFER_DATA (buf) = ((guint8 *) xmp_->data) + 8;
- GST_BUFFER_SIZE (buf) = QT_UINT32 ((guint8 *) xmp_->data) - 8;
-
+ buf = _gst_buffer_new_wrapped (((guint8 *) xmp_->data) + 8,
+ QT_UINT32 ((guint8 *) xmp_->data) - 8, NULL);
taglist = gst_tag_list_from_xmp_buffer (buf);
gst_buffer_unref (buf);
const gchar *fmt;
if (tags == NULL)
- tags = gst_tag_list_new ();
+ tags = gst_tag_list_new_empty ();
if (qtdemux->major_brand == FOURCC_mjp2)
fmt = "Motion JPEG 2000";
}
if (datetime) {
if (!qtdemux->tag_list)
- qtdemux->tag_list = gst_tag_list_new ();
+ qtdemux->tag_list = gst_tag_list_new_empty ();
/* Use KEEP as explicit tags should have a higher priority than mvhd tag */
gst_tag_list_add (qtdemux->tag_list, GST_TAG_MERGE_KEEP, GST_TAG_DATE_TIME,
/* set duration in the segment info */
gst_qtdemux_get_duration (qtdemux, &duration);
if (duration) {
- gst_segment_set_duration (&qtdemux->segment, GST_FORMAT_TIME, duration);
+ qtdemux->segment.duration = duration;
/* also do not exceed duration; stop is set that way post seek anyway,
* and segment activation falls back to duration,
* whereas loop only checks stop, so let's align this here as well */
"systemstream", G_TYPE_BOOLEAN, FALSE, NULL);
break;
case 0x6C: /* MJPEG */
- caps = gst_caps_new_simple ("image/jpeg", NULL);
+ caps = gst_caps_new_empty_simple ("image/jpeg");
codec_name = "Motion-JPEG";
break;
case 0x6D: /* PNG */
- caps = gst_caps_new_simple ("image/png", NULL);
+ caps = gst_caps_new_empty_simple ("image/png");
codec_name = "PNG still images";
break;
case 0x6E: /* JPEG2000 */
break;
case 0xA4: /* Dirac */
codec_name = "Dirac";
- caps = gst_caps_new_simple ("video/x-dirac", NULL);
+ caps = gst_caps_new_empty_simple ("video/x-dirac");
break;
case 0xA5: /* AC3 */
codec_name = "AC-3 audio";
case 0xE1: /* QCELP */
/* QCELP, the codec_data is a riff tag (little endian) with
* more info (http://ftp.3gpp2.org/TSGC/Working/2003/2003-05-SanDiego/TSG-C-2003-05-San%20Diego/WG1/SWG12/C12-20030512-006%20=%20C12-20030217-015_Draft_Baseline%20Text%20of%20FFMS_R2.doc). */
- caps = gst_caps_new_simple ("audio/qcelp", NULL);
+ caps = gst_caps_new_empty_simple ("audio/qcelp");
codec_name = "QCELP";
break;
default:
GstBuffer *buffer;
buffer = gst_buffer_new_and_alloc (data_len);
- memcpy (GST_BUFFER_DATA (buffer), data_ptr, data_len);
+ _gst_buffer_copy_into_mem (buffer, 0, data_ptr, data_len);
GST_DEBUG_OBJECT (qtdemux, "setting codec_data from esds");
GST_MEMDUMP_OBJECT (qtdemux, "codec_data from esds", data_ptr, data_len);
switch (fourcc) {
case GST_MAKE_FOURCC ('p', 'n', 'g', ' '):
_codec ("PNG still images");
- caps = gst_caps_new_simple ("image/png", NULL);
+ caps = gst_caps_new_empty_simple ("image/png");
break;
case GST_MAKE_FOURCC ('j', 'p', 'e', 'g'):
_codec ("JPEG still images");
- caps = gst_caps_new_simple ("image/jpeg", NULL);
+ caps = gst_caps_new_empty_simple ("image/jpeg");
break;
case GST_MAKE_FOURCC ('m', 'j', 'p', 'a'):
case GST_MAKE_FOURCC ('A', 'V', 'D', 'J'):
case GST_MAKE_FOURCC ('M', 'J', 'P', 'G'):
case GST_MAKE_FOURCC ('d', 'm', 'b', '1'):
_codec ("Motion-JPEG");
- caps = gst_caps_new_simple ("image/jpeg", NULL);
+ caps = gst_caps_new_empty_simple ("image/jpeg");
break;
case GST_MAKE_FOURCC ('m', 'j', 'p', 'b'):
_codec ("Motion-JPEG format B");
- caps = gst_caps_new_simple ("video/x-mjpeg-b", NULL);
+ caps = gst_caps_new_empty_simple ("video/x-mjpeg-b");
break;
case GST_MAKE_FOURCC ('m', 'j', 'p', '2'):
_codec ("JPEG-2000");
_codec ("Raw RGB video");
bps = QT_UINT16 (stsd_data + 98);
/* set common stuff */
- caps = gst_caps_new_simple ("video/x-raw-rgb",
- "endianness", G_TYPE_INT, G_BYTE_ORDER, "depth", G_TYPE_INT, bps,
- NULL);
+ caps = gst_caps_new_empty_simple ("video/x-raw");
switch (bps) {
case 15:
- gst_caps_set_simple (caps,
- "bpp", G_TYPE_INT, 16,
- "endianness", G_TYPE_INT, G_BIG_ENDIAN,
- "red_mask", G_TYPE_INT, 0x7c00,
- "green_mask", G_TYPE_INT, 0x03e0,
- "blue_mask", G_TYPE_INT, 0x001f, NULL);
+ gst_caps_set_simple (caps, "format", G_TYPE_STRING, "RGB15", NULL);
break;
case 16:
- gst_caps_set_simple (caps,
- "bpp", G_TYPE_INT, 16,
- "endianness", G_TYPE_INT, G_BIG_ENDIAN,
- "red_mask", G_TYPE_INT, 0xf800,
- "green_mask", G_TYPE_INT, 0x07e0,
- "blue_mask", G_TYPE_INT, 0x001f, NULL);
+ gst_caps_set_simple (caps, "format", G_TYPE_STRING, "RGB16", NULL);
break;
case 24:
- gst_caps_set_simple (caps,
- "bpp", G_TYPE_INT, 24,
- "endianness", G_TYPE_INT, G_BIG_ENDIAN,
- "red_mask", G_TYPE_INT, 0xff0000,
- "green_mask", G_TYPE_INT, 0x00ff00,
- "blue_mask", G_TYPE_INT, 0x0000ff, NULL);
+ gst_caps_set_simple (caps, "format", G_TYPE_STRING, "RGB", NULL);
break;
case 32:
- gst_caps_set_simple (caps,
- "bpp", G_TYPE_INT, 32,
- "endianness", G_TYPE_INT, G_BIG_ENDIAN,
- "alpha_mask", G_TYPE_INT, 0xff000000,
- "red_mask", G_TYPE_INT, 0x00ff0000,
- "green_mask", G_TYPE_INT, 0x0000ff00,
- "blue_mask", G_TYPE_INT, 0x000000ff, NULL);
+ gst_caps_set_simple (caps, "format", G_TYPE_STRING, "ARGB", NULL);
break;
default:
/* unknown */
}
case GST_MAKE_FOURCC ('y', 'v', '1', '2'):
_codec ("Raw planar YUV 4:2:0");
- caps = gst_caps_new_simple ("video/x-raw-yuv",
- "format", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('I', '4', '2', '0'),
- NULL);
+ caps = gst_caps_new_simple ("video/x-raw",
+ "format", G_TYPE_STRING, "I420", NULL);
break;
case GST_MAKE_FOURCC ('y', 'u', 'v', '2'):
case GST_MAKE_FOURCC ('Y', 'u', 'v', '2'):
_codec ("Raw packed YUV 4:2:2");
- caps = gst_caps_new_simple ("video/x-raw-yuv",
- "format", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('Y', 'U', 'Y', '2'),
- NULL);
+ caps = gst_caps_new_simple ("video/x-raw",
+ "format", G_TYPE_STRING, "YUY2", NULL);
break;
case GST_MAKE_FOURCC ('2', 'v', 'u', 'y'):
case GST_MAKE_FOURCC ('2', 'V', 'u', 'y'):
_codec ("Raw packed YUV 4:2:2");
- caps = gst_caps_new_simple ("video/x-raw-yuv",
- "format", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y'),
- NULL);
+ caps = gst_caps_new_simple ("video/x-raw",
+ "format", G_TYPE_STRING, "UYVY", NULL);
break;
case GST_MAKE_FOURCC ('v', '2', '1', '0'):
_codec ("Raw packed YUV 10-bit 4:2:2");
- caps = gst_caps_new_simple ("video/x-raw-yuv",
- "format", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('v', '2', '1', '0'),
- NULL);
+ caps = gst_caps_new_simple ("video/x-raw",
+ "format", G_TYPE_STRING, "v210", NULL);
break;
case GST_MAKE_FOURCC ('r', '2', '1', '0'):
_codec ("Raw packed RGB 10-bit 4:4:4");
- caps = gst_caps_new_simple ("video/x-raw-rgb",
- "endianness", G_TYPE_INT, G_BIG_ENDIAN, "depth", G_TYPE_INT, 30,
- "bpp", G_TYPE_INT, 32,
- "endianness", G_TYPE_INT, G_BIG_ENDIAN,
- "red_mask", G_TYPE_INT, 0x3ff00000,
- "green_mask", G_TYPE_INT, 0x000ffc00,
- "blue_mask", G_TYPE_INT, 0x000003ff, NULL);
+ caps = gst_caps_new_simple ("video/x-raw",
+ "format", G_TYPE_STRING, "r210", NULL);
break;
case GST_MAKE_FOURCC ('m', 'p', 'e', 'g'):
case GST_MAKE_FOURCC ('m', 'p', 'g', '1'):
break;
case GST_MAKE_FOURCC ('g', 'i', 'f', ' '):
_codec ("GIF still images");
- caps = gst_caps_new_simple ("image/gif", NULL);
+ caps = gst_caps_new_empty_simple ("image/gif");
break;
case GST_MAKE_FOURCC ('h', '2', '6', '3'):
case GST_MAKE_FOURCC ('H', '2', '6', '3'):
case GST_MAKE_FOURCC ('U', '2', '6', '3'):
_codec ("H.263");
/* ffmpeg uses the height/width props, don't know why */
- caps = gst_caps_new_simple ("video/x-h263", NULL);
+ caps = gst_caps_new_empty_simple ("video/x-h263");
break;
case GST_MAKE_FOURCC ('m', 'p', '4', 'v'):
case GST_MAKE_FOURCC ('M', 'P', '4', 'V'):
case GST_MAKE_FOURCC ('3', 'I', 'V', '1'):
case GST_MAKE_FOURCC ('3', 'I', 'V', '2'):
_codec ("3ivX video");
- caps = gst_caps_new_simple ("video/x-3ivx", NULL);
+ caps = gst_caps_new_empty_simple ("video/x-3ivx");
break;
case GST_MAKE_FOURCC ('D', 'I', 'V', '3'):
_codec ("DivX 3");
case GST_MAKE_FOURCC ('X', 'V', 'I', 'D'):
case GST_MAKE_FOURCC ('x', 'v', 'i', 'd'):
_codec ("XVID MPEG-4");
- caps = gst_caps_new_simple ("video/x-xvid", NULL);
+ caps = gst_caps_new_empty_simple ("video/x-xvid");
break;
case GST_MAKE_FOURCC ('F', 'M', 'P', '4'):
case GST_MAKE_FOURCC ('c', 'v', 'i', 'd'):
_codec ("Cinepak");
- caps = gst_caps_new_simple ("video/x-cinepak", NULL);
+ caps = gst_caps_new_empty_simple ("video/x-cinepak");
break;
case GST_MAKE_FOURCC ('q', 'd', 'r', 'w'):
_codec ("Apple QuickDraw");
- caps = gst_caps_new_simple ("video/x-qdrw", NULL);
+ caps = gst_caps_new_empty_simple ("video/x-qdrw");
break;
case GST_MAKE_FOURCC ('r', 'p', 'z', 'a'):
_codec ("Apple video");
- caps = gst_caps_new_simple ("video/x-apple-video", NULL);
+ caps = gst_caps_new_empty_simple ("video/x-apple-video");
break;
case GST_MAKE_FOURCC ('a', 'v', 'c', '1'):
_codec ("H.264 / AVC");
break;
case GST_MAKE_FOURCC ('s', 'm', 'c', ' '):
_codec ("Apple Graphics (SMC)");
- caps = gst_caps_new_simple ("video/x-smc", NULL);
+ caps = gst_caps_new_empty_simple ("video/x-smc");
break;
case GST_MAKE_FOURCC ('V', 'P', '3', '1'):
_codec ("VP3");
- caps = gst_caps_new_simple ("video/x-vp3", NULL);
+ caps = gst_caps_new_empty_simple ("video/x-vp3");
break;
case GST_MAKE_FOURCC ('X', 'i', 'T', 'h'):
_codec ("Theora");
- caps = gst_caps_new_simple ("video/x-theora", NULL);
+ caps = gst_caps_new_empty_simple ("video/x-theora");
/* theora uses one byte of padding in the data stream because it does not
* allow 0 sized packets while theora does */
stream->padding = 1;
break;
case GST_MAKE_FOURCC ('d', 'r', 'a', 'c'):
_codec ("Dirac");
- caps = gst_caps_new_simple ("video/x-dirac", NULL);
+ caps = gst_caps_new_empty_simple ("video/x-dirac");
break;
case GST_MAKE_FOURCC ('t', 'i', 'f', 'f'):
_codec ("TIFF still images");
- caps = gst_caps_new_simple ("image/tiff", NULL);
+ caps = gst_caps_new_empty_simple ("image/tiff");
break;
case GST_MAKE_FOURCC ('i', 'c', 'o', 'd'):
_codec ("Apple Intermediate Codec");
case FOURCC_ovc1:
_codec ("VC-1");
caps = gst_caps_new_simple ("video/x-wmv",
- "wmvversion", G_TYPE_INT, 3,
- "format", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('W', 'V', 'C', '1'),
- NULL);
+ "wmvversion", G_TYPE_INT, 3, "format", G_TYPE_STRING, "WVC1", NULL);
break;
case GST_MAKE_FOURCC ('k', 'p', 'c', 'd'):
default:
s = g_strdup_printf ("video/x-gst-fourcc-%" GST_FOURCC_FORMAT,
GST_FOURCC_ARGS (fourcc));
- caps = gst_caps_new_simple (s, NULL);
+ caps = gst_caps_new_empty_simple (s);
break;
}
}
/* enable clipping for raw video streams */
s = gst_caps_get_structure (caps, 0);
name = gst_structure_get_name (s);
- if (g_str_has_prefix (name, "video/x-raw-")) {
+ if (g_str_has_prefix (name, "video/x-raw")) {
stream->need_clip = TRUE;
}
return caps;
case GST_MAKE_FOURCC ('N', 'O', 'N', 'E'):
case GST_MAKE_FOURCC ('r', 'a', 'w', ' '):
_codec ("Raw 8-bit PCM audio");
- caps = gst_caps_new_simple ("audio/x-raw-int", "width", G_TYPE_INT, 8,
- "depth", G_TYPE_INT, 8, "signed", G_TYPE_BOOLEAN, FALSE, NULL);
+ caps = gst_caps_new_simple ("audio/x-raw",
+ "format", G_TYPE_STRING, "U8", NULL);
break;
case GST_MAKE_FOURCC ('t', 'w', 'o', 's'):
endian = G_BIG_ENDIAN;
{
gchar *str;
gint depth;
+ GstAudioFormat format;
if (!endian)
endian = G_LITTLE_ENDIAN;
depth = stream->bytes_per_packet * 8;
+ format = gst_audio_format_build_integer (TRUE, endian, depth, depth);
+
str = g_strdup_printf ("Raw %d-bit PCM audio", depth);
_codec (str);
g_free (str);
- caps = gst_caps_new_simple ("audio/x-raw-int",
- "width", G_TYPE_INT, depth, "depth", G_TYPE_INT, depth,
- "endianness", G_TYPE_INT, endian,
- "signed", G_TYPE_BOOLEAN, TRUE, NULL);
+
+ caps = gst_caps_new_simple ("audio/x-raw",
+ "format", G_TYPE_STRING, gst_audio_format_to_string (format), NULL);
break;
}
case GST_MAKE_FOURCC ('f', 'l', '6', '4'):
_codec ("Raw 64-bit floating-point audio");
- caps = gst_caps_new_simple ("audio/x-raw-float", "width", G_TYPE_INT, 64,
- "endianness", G_TYPE_INT, G_BIG_ENDIAN, NULL);
+ caps = gst_caps_new_simple ("audio/x-raw",
+ "format", G_TYPE_STRING, "F64BE", NULL);
break;
case GST_MAKE_FOURCC ('f', 'l', '3', '2'):
_codec ("Raw 32-bit floating-point audio");
- caps = gst_caps_new_simple ("audio/x-raw-float", "width", G_TYPE_INT, 32,
- "endianness", G_TYPE_INT, G_BIG_ENDIAN, NULL);
+ caps = gst_caps_new_simple ("audio/x-raw",
+ "format", G_TYPE_STRING, "F32BE", NULL);
break;
case FOURCC_in24:
_codec ("Raw 24-bit PCM audio");
/* we assume BIG ENDIAN, an enda box will tell us to change this to little
* endian later */
- caps = gst_caps_new_simple ("audio/x-raw-int", "width", G_TYPE_INT, 24,
- "depth", G_TYPE_INT, 24,
- "endianness", G_TYPE_INT, G_BIG_ENDIAN,
- "signed", G_TYPE_BOOLEAN, TRUE, NULL);
+ caps = gst_caps_new_simple ("audio/x-raw",
+ "format", G_TYPE_STRING, "S24BE", NULL);
break;
case GST_MAKE_FOURCC ('i', 'n', '3', '2'):
_codec ("Raw 32-bit PCM audio");
- caps = gst_caps_new_simple ("audio/x-raw-int", "width", G_TYPE_INT, 32,
- "depth", G_TYPE_INT, 32,
- "endianness", G_TYPE_INT, G_BIG_ENDIAN,
- "signed", G_TYPE_BOOLEAN, TRUE, NULL);
+ caps = gst_caps_new_simple ("audio/x-raw",
+ "format", G_TYPE_STRING, "S32BE", NULL);
break;
case GST_MAKE_FOURCC ('u', 'l', 'a', 'w'):
_codec ("Mu-law audio");
- caps = gst_caps_new_simple ("audio/x-mulaw", NULL);
+ caps = gst_caps_new_empty_simple ("audio/x-mulaw");
break;
case GST_MAKE_FOURCC ('a', 'l', 'a', 'w'):
_codec ("A-law audio");
- caps = gst_caps_new_simple ("audio/x-alaw", NULL);
+ caps = gst_caps_new_empty_simple ("audio/x-alaw");
break;
case 0x0200736d:
case 0x6d730002:
break;
case GST_MAKE_FOURCC ('O', 'g', 'g', 'V'):
/* ogg/vorbis */
- caps = gst_caps_new_simple ("application/ogg", NULL);
+ caps = gst_caps_new_empty_simple ("application/ogg");
break;
case GST_MAKE_FOURCC ('d', 'v', 'c', 'a'):
_codec ("DV audio");
- caps = gst_caps_new_simple ("audio/x-dv", NULL);
+ caps = gst_caps_new_empty_simple ("audio/x-dv");
break;
case GST_MAKE_FOURCC ('m', 'p', '4', 'a'):
_codec ("MPEG-4 AAC audio");
break;
case GST_MAKE_FOURCC ('Q', 'D', 'M', 'C'):
_codec ("QDesign Music");
- caps = gst_caps_new_simple ("audio/x-qdm", NULL);
+ caps = gst_caps_new_empty_simple ("audio/x-qdm");
break;
case GST_MAKE_FOURCC ('Q', 'D', 'M', '2'):
_codec ("QDesign Music v.2");
"bitrate", G_TYPE_INT, QT_UINT32 (data + 40),
"blocksize", G_TYPE_INT, QT_UINT32 (data + 44), NULL);
} else {
- caps = gst_caps_new_simple ("audio/x-qdm2", NULL);
+ caps = gst_caps_new_empty_simple ("audio/x-qdm2");
}
break;
case GST_MAKE_FOURCC ('a', 'g', 's', 'm'):
_codec ("GSM audio");
- caps = gst_caps_new_simple ("audio/x-gsm", NULL);
+ caps = gst_caps_new_empty_simple ("audio/x-gsm");
break;
case GST_MAKE_FOURCC ('s', 'a', 'm', 'r'):
_codec ("AMR audio");
- caps = gst_caps_new_simple ("audio/AMR", NULL);
+ caps = gst_caps_new_empty_simple ("audio/AMR");
break;
case GST_MAKE_FOURCC ('s', 'a', 'w', 'b'):
_codec ("AMR-WB audio");
- caps = gst_caps_new_simple ("audio/AMR-WB", NULL);
+ caps = gst_caps_new_empty_simple ("audio/AMR-WB");
break;
case GST_MAKE_FOURCC ('i', 'm', 'a', '4'):
_codec ("Quicktime IMA ADPCM");
break;
case GST_MAKE_FOURCC ('a', 'l', 'a', 'c'):
_codec ("Apple lossless audio");
- caps = gst_caps_new_simple ("audio/x-alac", NULL);
+ caps = gst_caps_new_empty_simple ("audio/x-alac");
break;
case GST_MAKE_FOURCC ('Q', 'c', 'l', 'p'):
_codec ("QualComm PureVoice");
break;
case FOURCC_owma:
_codec ("WMA");
- caps = gst_caps_new_simple ("audio/x-wma", NULL);
+ caps = gst_caps_new_empty_simple ("audio/x-wma");
break;
case GST_MAKE_FOURCC ('q', 't', 'v', 'r'):
/* ? */
s = g_strdup_printf ("audio/x-gst-fourcc-%" GST_FOURCC_FORMAT,
GST_FOURCC_ARGS (fourcc));
- caps = gst_caps_new_simple (s, NULL);
+ caps = gst_caps_new_empty_simple (s);
break;
}
}
+ if (caps) {
+ GstCaps *templ_caps =
+ gst_static_pad_template_get_caps (&gst_qtdemux_audiosrc_template);
+ GstCaps *intersection = gst_caps_intersect (caps, templ_caps);
+ gst_caps_unref (caps);
+ gst_caps_unref (templ_caps);
+ caps = intersection;
+ }
+
/* enable clipping for raw audio streams */
s = gst_caps_get_structure (caps, 0);
name = gst_structure_get_name (s);
- if (g_str_has_prefix (name, "audio/x-raw-")) {
+ if (g_str_has_prefix (name, "audio/x-raw")) {
stream->need_clip = TRUE;
}
return caps;
switch (fourcc) {
case GST_MAKE_FOURCC ('m', 'p', '4', 's'):
_codec ("DVD subtitle");
- caps = gst_caps_new_simple ("video/x-dvd-subpicture", NULL);
+ caps = gst_caps_new_empty_simple ("video/x-dvd-subpicture");
break;
case GST_MAKE_FOURCC ('t', 'e', 'x', 't'):
_codec ("Quicktime timed text");
case GST_MAKE_FOURCC ('t', 'x', '3', 'g'):
_codec ("3GPP timed text");
text:
- caps = gst_caps_new_simple ("text/plain", NULL);
+ caps = gst_caps_new_empty_simple ("text/plain");
/* actual text piece needs to be extracted */
stream->need_process = TRUE;
break;
s = g_strdup_printf ("text/x-gst-fourcc-%" GST_FOURCC_FORMAT,
GST_FOURCC_ARGS (fourcc));
- caps = gst_caps_new_simple (s, NULL);
+ caps = gst_caps_new_empty_simple (s);
break;
}
}
/* configured playback region */
GstSegment segment;
- gboolean segment_running;
GstEvent *pending_newsegment;
+#if 0
/* gst index support */
GstIndex *element_index;
gint index_id;
+#endif
gint64 requested_seek_time;
guint64 seek_offset;
gboolean
qtdemux_node_dump (GstQTDemux * qtdemux, GNode * node)
{
- if (__gst_debug_min < GST_LEVEL_LOG)
+ if (_gst_debug_min < GST_LEVEL_LOG)
return TRUE;
g_node_traverse (node, G_PRE_ORDER, G_TRAVERSE_ALL, -1,
plugin_LTLIBRARIES = libgstalaw.la libgstmulaw.la
libgstalaw_la_SOURCES = alaw-encode.c alaw-decode.c alaw.c
-libgstalaw_la_CFLAGS = $(GST_CFLAGS)
-libgstalaw_la_LIBADD = $(GST_LIBS)
+libgstalaw_la_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_CFLAGS)
+libgstalaw_la_LIBADD = $(GST_PLUGINS_BASE_LIBS) -lgstaudio-$(GST_MAJORMINOR) \
+ $(GST_BASE_LIBS) $(GST_LIBS)
libgstalaw_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
libgstalaw_la_LIBTOOLFLAGS = --tag=disable-static
libgstmulaw_la_SOURCES = mulaw-encode.c mulaw-conversion.c mulaw-decode.c mulaw.c
-libgstmulaw_la_CFLAGS = $(GST_CFLAGS)
-libgstmulaw_la_LIBADD = $(GST_LIBS)
+libgstmulaw_la_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_CFLAGS)
+libgstmulaw_la_LIBADD = $(GST_PLUGINS_BASE_LIBS) -lgstaudio-$(GST_MAJORMINOR) \
+ $(GST_BASE_LIBS) $(GST_LIBS)
libgstmulaw_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
libgstmulaw_la_LIBTOOLFLAGS = --tag=disable-static
#include "config.h"
#endif
+#include <gst/audio/audio.h>
+
#include "alaw-decode.h"
extern GstStaticPadTemplate alaw_dec_src_factory;
static GstStateChangeReturn
gst_alaw_dec_change_state (GstElement * element, GstStateChange transition);
-static GstFlowReturn gst_alaw_dec_chain (GstPad * pad, GstBuffer * buffer);
-GST_BOILERPLATE (GstALawDec, gst_alaw_dec, GstElement, GST_TYPE_ELEMENT);
+static gboolean gst_alaw_dec_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+static GstFlowReturn gst_alaw_dec_chain (GstPad * pad, GstObject * parent,
+ GstBuffer * buffer);
+
+#define gst_alaw_dec_parent_class parent_class
+G_DEFINE_TYPE (GstALawDec, gst_alaw_dec, GST_TYPE_ELEMENT);
/* some day we might have defines in gstconfig.h that tell us about the
* desired cpu/memory/binary size trade-offs */
#endif /* GST_ALAW_DEC_USE_TABLE */
static gboolean
-gst_alaw_dec_sink_setcaps (GstPad * pad, GstCaps * caps)
+gst_alaw_dec_setcaps (GstALawDec * alawdec, GstCaps * caps)
{
- GstALawDec *alawdec;
GstStructure *structure;
int rate, channels;
gboolean ret;
GstCaps *outcaps;
- alawdec = GST_ALAW_DEC (GST_PAD_PARENT (pad));
-
structure = gst_caps_get_structure (caps, 0);
ret = gst_structure_get_int (structure, "rate", &rate);
if (!ret)
return FALSE;
- outcaps = gst_caps_new_simple ("audio/x-raw-int",
- "width", G_TYPE_INT, 16,
- "depth", G_TYPE_INT, 16,
- "endianness", G_TYPE_INT, G_BYTE_ORDER,
- "signed", G_TYPE_BOOLEAN, TRUE,
+ outcaps = gst_caps_new_simple ("audio/x-raw",
+ "format", G_TYPE_STRING, GST_AUDIO_NE (S16),
"rate", G_TYPE_INT, rate, "channels", G_TYPE_INT, channels, NULL);
ret = gst_pad_set_caps (alawdec->srcpad, outcaps);
}
static GstCaps *
-gst_alaw_dec_getcaps (GstPad * pad)
+gst_alaw_dec_getcaps (GstPad * pad, GstCaps * filter)
{
GstALawDec *alawdec;
GstPad *otherpad;
/* figure out the name of the caps we are going to return */
if (pad == alawdec->srcpad) {
- name = "audio/x-raw-int";
+ name = "audio/x-raw";
otherpad = alawdec->sinkpad;
} else {
name = "audio/x-alaw";
otherpad = alawdec->srcpad;
}
/* get caps from the peer, this can return NULL when there is no peer */
- othercaps = gst_pad_peer_get_caps (otherpad);
+ othercaps = gst_pad_peer_query_caps (otherpad, NULL);
/* get the template caps to make sure we return something acceptable */
templ = gst_pad_get_pad_template_caps (pad);
if (pad == alawdec->sinkpad) {
/* remove the fields we don't want */
- gst_structure_remove_fields (structure, "width", "depth", "endianness",
- "signed", NULL);
+ gst_structure_remove_fields (structure, "format", NULL);
} else {
/* add fixed fields */
- gst_structure_set (structure, "width", G_TYPE_INT, 16,
- "depth", G_TYPE_INT, 16,
- "endianness", G_TYPE_INT, G_BYTE_ORDER,
- "signed", G_TYPE_BOOLEAN, TRUE, NULL);
+ gst_structure_set (structure, "format", G_TYPE_STRING,
+ GST_AUDIO_NE (S16), NULL);
}
}
/* filter against the allowed caps of the pad to return our result */
/* there was no peer, return the template caps */
result = gst_caps_copy (templ);
}
+ if (filter && result) {
+ GstCaps *temp;
+ temp = gst_caps_intersect (result, filter);
+ gst_caps_unref (result);
+ result = temp;
+ }
return result;
}
-static void
-gst_alaw_dec_base_init (gpointer klass)
+static gboolean
+gst_alaw_dec_query (GstPad * pad, GstObject * parent, GstQuery * query)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ gboolean res;
- gst_element_class_add_static_pad_template (element_class,
- &alaw_dec_src_factory);
- gst_element_class_add_static_pad_template (element_class,
- &alaw_dec_sink_factory);
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_CAPS:
+ {
+ GstCaps *filter, *caps;
- gst_element_class_set_details_simple (element_class, "A Law audio decoder",
- "Codec/Decoder/Audio", "Convert 8bit A law to 16bit PCM",
- "Zaheer Abbas Merali <zaheerabbas at merali dot org>");
+ gst_query_parse_caps (query, &filter);
+ caps = gst_alaw_dec_getcaps (pad, filter);
+ gst_query_set_caps_result (query, caps);
+ gst_caps_unref (caps);
- GST_DEBUG_CATEGORY_INIT (alaw_dec_debug, "alawdec", 0, "A Law audio decoder");
+ res = TRUE;
+ break;
+ }
+ default:
+ res = gst_pad_query_default (pad, parent, query);
+ break;
+ }
+ return res;
}
static void
gst_alaw_dec_class_init (GstALawDecClass * klass)
{
- GstElementClass *element_class = (GstElementClass *) klass;
+ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&alaw_dec_src_factory));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&alaw_dec_sink_factory));
+
+ gst_element_class_set_details_simple (element_class, "A Law audio decoder",
+ "Codec/Decoder/Audio", "Convert 8bit A law to 16bit PCM",
+ "Zaheer Abbas Merali <zaheerabbas at merali dot org>");
element_class->change_state = GST_DEBUG_FUNCPTR (gst_alaw_dec_change_state);
+
+ GST_DEBUG_CATEGORY_INIT (alaw_dec_debug, "alawdec", 0, "A Law audio decoder");
}
static void
-gst_alaw_dec_init (GstALawDec * alawdec, GstALawDecClass * klass)
+gst_alaw_dec_init (GstALawDec * alawdec)
{
alawdec->sinkpad =
gst_pad_new_from_static_template (&alaw_dec_sink_factory, "sink");
- gst_pad_set_setcaps_function (alawdec->sinkpad,
- GST_DEBUG_FUNCPTR (gst_alaw_dec_sink_setcaps));
- gst_pad_set_getcaps_function (alawdec->sinkpad,
- GST_DEBUG_FUNCPTR (gst_alaw_dec_getcaps));
+ gst_pad_set_query_function (alawdec->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_alaw_dec_query));
+ gst_pad_set_event_function (alawdec->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_alaw_dec_event));
gst_pad_set_chain_function (alawdec->sinkpad,
GST_DEBUG_FUNCPTR (gst_alaw_dec_chain));
gst_element_add_pad (GST_ELEMENT (alawdec), alawdec->sinkpad);
alawdec->srcpad =
gst_pad_new_from_static_template (&alaw_dec_src_factory, "src");
gst_pad_use_fixed_caps (alawdec->srcpad);
- gst_pad_set_getcaps_function (alawdec->srcpad,
- GST_DEBUG_FUNCPTR (gst_alaw_dec_getcaps));
+ gst_pad_set_query_function (alawdec->srcpad,
+ GST_DEBUG_FUNCPTR (gst_alaw_dec_query));
gst_element_add_pad (GST_ELEMENT (alawdec), alawdec->srcpad);
}
+static gboolean
+gst_alaw_dec_event (GstPad * pad, GstObject * parent, GstEvent * event)
+{
+ GstALawDec *alawdec;
+ gboolean res;
+
+ alawdec = GST_ALAW_DEC (parent);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_CAPS:
+ {
+ GstCaps *caps;
+
+ gst_event_parse_caps (event, &caps);
+ gst_alaw_dec_setcaps (alawdec, caps);
+ gst_event_unref (event);
+
+ res = TRUE;
+ break;
+ }
+ default:
+ res = gst_pad_event_default (pad, parent, event);
+ break;
+ }
+ return res;
+}
+
static GstFlowReturn
-gst_alaw_dec_chain (GstPad * pad, GstBuffer * buffer)
+gst_alaw_dec_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer)
{
GstALawDec *alawdec;
+ GstMapInfo inmap, outmap;
gint16 *linear_data;
guint8 *alaw_data;
- guint alaw_size;
+ gsize alaw_size;
GstBuffer *outbuf;
gint i;
GstFlowReturn ret;
- alawdec = GST_ALAW_DEC (GST_PAD_PARENT (pad));
+ alawdec = GST_ALAW_DEC (parent);
if (G_UNLIKELY (alawdec->rate == 0))
goto not_negotiated;
GST_LOG_OBJECT (alawdec, "buffer with ts=%" GST_TIME_FORMAT,
GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer)));
- alaw_data = GST_BUFFER_DATA (buffer);
- alaw_size = GST_BUFFER_SIZE (buffer);
+ gst_buffer_map (buffer, &inmap, GST_MAP_READ);
+ alaw_data = inmap.data;
+ alaw_size = inmap.size;
- ret =
- gst_pad_alloc_buffer_and_set_caps (alawdec->srcpad,
- GST_BUFFER_OFFSET_NONE, alaw_size * 2, GST_PAD_CAPS (alawdec->srcpad),
- &outbuf);
- if (ret != GST_FLOW_OK)
- goto alloc_failed;
+ outbuf = gst_buffer_new_allocate (NULL, alaw_size * 2, 0);
- linear_data = (gint16 *) GST_BUFFER_DATA (outbuf);
+ gst_buffer_map (outbuf, &outmap, GST_MAP_WRITE);
+ linear_data = (gint16 *) outmap.data;
/* copy discont flag */
if (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DISCONT))
GST_BUFFER_TIMESTAMP (outbuf) = GST_BUFFER_TIMESTAMP (buffer);
GST_BUFFER_DURATION (outbuf) = GST_BUFFER_DURATION (buffer);
- gst_buffer_set_caps (outbuf, GST_PAD_CAPS (alawdec->srcpad));
for (i = 0; i < alaw_size; i++) {
linear_data[i] = alaw_to_s16 (alaw_data[i]);
}
+
+ gst_buffer_unmap (outbuf, &outmap);
+ gst_buffer_unmap (buffer, &inmap);
gst_buffer_unref (buffer);
ret = gst_pad_push (alawdec->srcpad, outbuf);
GST_WARNING_OBJECT (alawdec, "no input format set: not-negotiated");
return GST_FLOW_NOT_NEGOTIATED;
}
-alloc_failed:
- {
- gst_buffer_unref (buffer);
- GST_DEBUG_OBJECT (alawdec, "pad alloc failed, flow: %s",
- gst_flow_get_name (ret));
- return ret;
- }
}
static GstStateChangeReturn
#include "config.h"
#endif
+#include <gst/audio/audio.h>
#include "alaw-encode.h"
GST_DEBUG_CATEGORY_STATIC (alaw_enc_debug);
extern GstStaticPadTemplate alaw_enc_src_factory;
extern GstStaticPadTemplate alaw_enc_sink_factory;
-static GstFlowReturn gst_alaw_enc_chain (GstPad * pad, GstBuffer * buffer);
+static gboolean gst_alaw_enc_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+static GstFlowReturn gst_alaw_enc_chain (GstPad * pad, GstObject * parent,
+ GstBuffer * buffer);
-GST_BOILERPLATE (GstALawEnc, gst_alaw_enc, GstElement, GST_TYPE_ELEMENT);
+G_DEFINE_TYPE (GstALawEnc, gst_alaw_enc, GST_TYPE_ELEMENT);
/* some day we might have defines in gstconfig.h that tell us about the
* desired cpu/memory/binary size trade-offs */
#endif /* GST_ALAW_ENC_USE_TABLE */
static GstCaps *
-gst_alaw_enc_getcaps (GstPad * pad)
+gst_alaw_enc_getcaps (GstPad * pad, GstCaps * filter)
{
GstALawEnc *alawenc;
GstPad *otherpad;
name = "audio/x-alaw";
otherpad = alawenc->sinkpad;
} else {
- name = "audio/x-raw-int";
+ name = "audio/x-raw";
otherpad = alawenc->srcpad;
}
/* get caps from the peer, this can return NULL when there is no peer */
- othercaps = gst_pad_peer_get_caps (otherpad);
+ othercaps = gst_pad_peer_query_caps (otherpad, NULL);
/* get the template caps to make sure we return something acceptable */
templ = gst_pad_get_pad_template_caps (pad);
if (pad == alawenc->srcpad) {
/* remove the fields we don't want */
- gst_structure_remove_fields (structure, "width", "depth", "endianness",
- "signed", NULL);
+ gst_structure_remove_fields (structure, "format", NULL);
} else {
/* add fixed fields */
- gst_structure_set (structure, "width", G_TYPE_INT, 16,
- "depth", G_TYPE_INT, 16,
- "endianness", G_TYPE_INT, G_BYTE_ORDER,
- "signed", G_TYPE_BOOLEAN, TRUE, NULL);
+ gst_structure_set (structure, "format", G_TYPE_STRING,
+ GST_AUDIO_NE (S16), NULL);
}
}
/* filter against the allowed caps of the pad to return our result */
/* there was no peer, return the template caps */
result = gst_caps_copy (templ);
}
+ if (filter && result) {
+ GstCaps *temp;
+
+ temp = gst_caps_intersect (result, filter);
+ gst_caps_unref (result);
+ result = temp;
+ }
return result;
}
static gboolean
-gst_alaw_enc_setcaps (GstPad * pad, GstCaps * caps)
+gst_alaw_enc_query (GstPad * pad, GstObject * parent, GstQuery * query)
+{
+ gboolean res;
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_CAPS:
+ {
+ GstCaps *filter, *caps;
+
+ gst_query_parse_caps (query, &filter);
+ caps = gst_alaw_enc_getcaps (pad, filter);
+ gst_query_set_caps_result (query, caps);
+ gst_caps_unref (caps);
+
+ res = TRUE;
+ break;
+ }
+ default:
+ res = gst_pad_query_default (pad, parent, query);
+ break;
+ }
+ return res;
+}
+
+static gboolean
+gst_alaw_enc_setcaps (GstALawEnc * alawenc, GstCaps * caps)
{
- GstALawEnc *alawenc;
- GstPad *otherpad;
GstStructure *structure;
gboolean ret;
GstCaps *base_caps;
- alawenc = GST_ALAW_ENC (GST_PAD_PARENT (pad));
-
structure = gst_caps_get_structure (caps, 0);
gst_structure_get_int (structure, "channels", &alawenc->channels);
gst_structure_get_int (structure, "rate", &alawenc->rate);
- if (pad == alawenc->sinkpad) {
- otherpad = alawenc->srcpad;
- } else {
- otherpad = alawenc->sinkpad;
- }
-
- base_caps = gst_caps_copy (gst_pad_get_pad_template_caps (otherpad));
+ base_caps = gst_caps_copy (gst_pad_get_pad_template_caps (alawenc->srcpad));
structure = gst_caps_get_structure (base_caps, 0);
gst_structure_set (structure, "rate", G_TYPE_INT, alawenc->rate, NULL);
gst_structure_set (structure, "channels", G_TYPE_INT, alawenc->channels,
GST_DEBUG_OBJECT (alawenc, "rate=%d, channels=%d", alawenc->rate,
alawenc->channels);
- ret = gst_pad_set_caps (otherpad, base_caps);
+ ret = gst_pad_set_caps (alawenc->srcpad, base_caps);
gst_caps_unref (base_caps);
}
static void
-gst_alaw_enc_base_init (gpointer klass)
+gst_alaw_enc_class_init (GstALawEncClass * klass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
- gst_element_class_add_static_pad_template (element_class,
- &alaw_enc_src_factory);
- gst_element_class_add_static_pad_template (element_class,
- &alaw_enc_sink_factory);
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&alaw_enc_src_factory));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&alaw_enc_sink_factory));
gst_element_class_set_details_simple (element_class,
"A Law audio encoder", "Codec/Encoder/Audio",
}
static void
-gst_alaw_enc_class_init (GstALawEncClass * klass)
-{
- /* nothing to do here for now */
-}
-
-static void
-gst_alaw_enc_init (GstALawEnc * alawenc, GstALawEncClass * klass)
+gst_alaw_enc_init (GstALawEnc * alawenc)
{
alawenc->sinkpad =
gst_pad_new_from_static_template (&alaw_enc_sink_factory, "sink");
- gst_pad_set_setcaps_function (alawenc->sinkpad,
- GST_DEBUG_FUNCPTR (gst_alaw_enc_setcaps));
- gst_pad_set_getcaps_function (alawenc->sinkpad,
- GST_DEBUG_FUNCPTR (gst_alaw_enc_getcaps));
+ gst_pad_set_query_function (alawenc->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_alaw_enc_query));
+ gst_pad_set_event_function (alawenc->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_alaw_enc_event));
gst_pad_set_chain_function (alawenc->sinkpad,
GST_DEBUG_FUNCPTR (gst_alaw_enc_chain));
gst_element_add_pad (GST_ELEMENT (alawenc), alawenc->sinkpad);
alawenc->srcpad =
gst_pad_new_from_static_template (&alaw_enc_src_factory, "src");
- gst_pad_set_setcaps_function (alawenc->srcpad,
- GST_DEBUG_FUNCPTR (gst_alaw_enc_setcaps));
- gst_pad_set_getcaps_function (alawenc->srcpad,
- GST_DEBUG_FUNCPTR (gst_alaw_enc_getcaps));
+ gst_pad_set_query_function (alawenc->srcpad,
+ GST_DEBUG_FUNCPTR (gst_alaw_enc_query));
gst_pad_use_fixed_caps (alawenc->srcpad);
gst_element_add_pad (GST_ELEMENT (alawenc), alawenc->srcpad);
alawenc->rate = 0;
}
+static gboolean
+gst_alaw_enc_event (GstPad * pad, GstObject * parent, GstEvent * event)
+{
+ GstALawEnc *alawenc;
+ gboolean res;
+
+ alawenc = GST_ALAW_ENC (parent);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_CAPS:
+ {
+ GstCaps *caps;
+
+ gst_event_parse_caps (event, &caps);
+ gst_alaw_enc_setcaps (alawenc, caps);
+ gst_event_unref (event);
+
+ res = TRUE;
+ break;
+ }
+ default:
+ res = gst_pad_event_default (pad, parent, event);
+ break;
+ }
+ return res;
+}
+
static GstFlowReturn
-gst_alaw_enc_chain (GstPad * pad, GstBuffer * buffer)
+gst_alaw_enc_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer)
{
GstALawEnc *alawenc;
+ GstMapInfo inmap, outmap;
gint16 *linear_data;
- guint linear_size;
+ gsize linear_size;
guint8 *alaw_data;
guint alaw_size;
GstBuffer *outbuf;
GstFlowReturn ret;
GstClockTime timestamp, duration;
- alawenc = GST_ALAW_ENC (GST_PAD_PARENT (pad));
+ alawenc = GST_ALAW_ENC (parent);
if (G_UNLIKELY (alawenc->rate == 0 || alawenc->channels == 0))
goto not_negotiated;
- linear_data = (gint16 *) GST_BUFFER_DATA (buffer);
- linear_size = GST_BUFFER_SIZE (buffer);
+ gst_buffer_map (buffer, &inmap, GST_MAP_READ);
+ linear_data = (gint16 *) inmap.data;
+ linear_size = inmap.size;
alaw_size = linear_size / 2;
GST_LOG_OBJECT (alawenc, "buffer with ts=%" GST_TIME_FORMAT,
GST_TIME_ARGS (timestamp));
- ret =
- gst_pad_alloc_buffer_and_set_caps (alawenc->srcpad,
- GST_BUFFER_OFFSET_NONE, alaw_size, GST_PAD_CAPS (alawenc->srcpad),
- &outbuf);
- if (ret != GST_FLOW_OK)
- goto done;
+ outbuf = gst_buffer_new_allocate (NULL, alaw_size, 0);
if (duration == GST_CLOCK_TIME_NONE) {
duration = gst_util_uint64_scale_int (alaw_size,
GST_SECOND, alawenc->rate * alawenc->channels);
}
- if (GST_BUFFER_SIZE (outbuf) < alaw_size) {
- /* pad-alloc can return a smaller buffer */
- gst_buffer_unref (outbuf);
- outbuf = gst_buffer_new_and_alloc (alaw_size);
- }
-
- alaw_data = (guint8 *) GST_BUFFER_DATA (outbuf);
+ gst_buffer_map (outbuf, &outmap, GST_MAP_WRITE);
+ alaw_data = outmap.data;
+ alaw_size = outmap.size;
/* copy discont flag */
if (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DISCONT))
GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
GST_BUFFER_DURATION (outbuf) = duration;
- gst_buffer_set_caps (outbuf, GST_PAD_CAPS (alawenc->srcpad));
-
for (i = 0; i < alaw_size; i++) {
alaw_data[i] = s16_to_alaw (linear_data[i]);
}
- ret = gst_pad_push (alawenc->srcpad, outbuf);
-
-done:
-
+ gst_buffer_unmap (outbuf, &outmap);
+ gst_buffer_unmap (buffer, &inmap);
gst_buffer_unref (buffer);
+ ret = gst_pad_push (alawenc->srcpad, outbuf);
+
return ret;
not_negotiated:
{
- ret = GST_FLOW_NOT_NEGOTIATED;
- goto done;
+ gst_buffer_unref (buffer);
+ return GST_FLOW_NOT_NEGOTIATED;
}
}
#include "config.h"
#endif
+#include <gst/audio/audio.h>
+
#include "alaw-encode.h"
#include "alaw-decode.h"
GstStaticPadTemplate alaw_dec_src_factory = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("audio/x-raw-int, "
- "rate = (int) [ 8000, 192000 ], "
- "channels = (int) [ 1, 2 ], "
- "endianness = (int) BYTE_ORDER, "
- "width = (int) 16, " "depth = (int) 16, " "signed = (boolean) True")
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "format = (string) " GST_AUDIO_NE (S16) ", "
+ "rate = (int) [ 8000, 192000 ], " "channels = (int) [ 1, 2 ]")
);
GstStaticPadTemplate alaw_dec_sink_factory = GST_STATIC_PAD_TEMPLATE ("sink",
GstStaticPadTemplate alaw_enc_sink_factory = GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("audio/x-raw-int, "
- "rate = (int) [ 8000, 192000 ], "
- "channels = (int) [ 1, 2 ], "
- "endianness = (int) BYTE_ORDER, "
- "width = (int) 16, " "depth = (int) 16, " "signed = (boolean) True")
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "format = (string) " GST_AUDIO_NE (S16) ", "
+ "rate = (int) [ 8000, 192000 ], " "channels = (int) [ 1, 2 ]")
);
GstStaticPadTemplate alaw_enc_src_factory = GST_STATIC_PAD_TEMPLATE ("src",
#include "config.h"
#endif
#include <gst/gst.h>
+#include <gst/audio/audio.h>
+
#include "mulaw-decode.h"
#include "mulaw-conversion.h"
ARG_0
};
-static void gst_mulawdec_class_init (GstMuLawDecClass * klass);
-static void gst_mulawdec_base_init (GstMuLawDecClass * klass);
-static void gst_mulawdec_init (GstMuLawDec * mulawdec);
static GstStateChangeReturn
gst_mulawdec_change_state (GstElement * element, GstStateChange transition);
-static GstFlowReturn gst_mulawdec_chain (GstPad * pad, GstBuffer * buffer);
+static gboolean gst_mulawdec_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+static GstFlowReturn gst_mulawdec_chain (GstPad * pad, GstObject * parent,
+ GstBuffer * buffer);
-static GstElementClass *parent_class = NULL;
+#define gst_mulawdec_parent_class parent_class
+G_DEFINE_TYPE (GstMuLawDec, gst_mulawdec, GST_TYPE_ELEMENT);
static gboolean
-mulawdec_sink_setcaps (GstPad * pad, GstCaps * caps)
+mulawdec_setcaps (GstMuLawDec * mulawdec, GstCaps * caps)
{
- GstMuLawDec *mulawdec;
GstStructure *structure;
int rate, channels;
gboolean ret;
GstCaps *outcaps;
- mulawdec = GST_MULAWDEC (GST_PAD_PARENT (pad));
-
structure = gst_caps_get_structure (caps, 0);
ret = gst_structure_get_int (structure, "rate", &rate);
ret = ret && gst_structure_get_int (structure, "channels", &channels);
if (!ret)
return FALSE;
- outcaps = gst_caps_new_simple ("audio/x-raw-int",
- "width", G_TYPE_INT, 16,
- "depth", G_TYPE_INT, 16,
- "endianness", G_TYPE_INT, G_BYTE_ORDER,
- "signed", G_TYPE_BOOLEAN, TRUE,
+ outcaps = gst_caps_new_simple ("audio/x-raw",
+ "format", G_TYPE_STRING, GST_AUDIO_NE (S16),
"rate", G_TYPE_INT, rate, "channels", G_TYPE_INT, channels, NULL);
ret = gst_pad_set_caps (mulawdec->srcpad, outcaps);
gst_caps_unref (outcaps);
}
static GstCaps *
-mulawdec_getcaps (GstPad * pad)
+mulawdec_getcaps (GstPad * pad, GstCaps * filter)
{
GstMuLawDec *mulawdec;
GstPad *otherpad;
/* figure out the name of the caps we are going to return */
if (pad == mulawdec->srcpad) {
- name = "audio/x-raw-int";
+ name = "audio/x-raw";
otherpad = mulawdec->sinkpad;
} else {
name = "audio/x-mulaw";
otherpad = mulawdec->srcpad;
}
/* get caps from the peer, this can return NULL when there is no peer */
- othercaps = gst_pad_peer_get_caps (otherpad);
+ othercaps = gst_pad_peer_query_caps (otherpad, NULL);
/* get the template caps to make sure we return something acceptable */
templ = gst_pad_get_pad_template_caps (pad);
if (pad == mulawdec->sinkpad) {
/* remove the fields we don't want */
- gst_structure_remove_fields (structure, "width", "depth", "endianness",
- "signed", NULL);
+ gst_structure_remove_fields (structure, "format", NULL);
} else {
/* add fixed fields */
- gst_structure_set (structure, "width", G_TYPE_INT, 16,
- "depth", G_TYPE_INT, 16,
- "endianness", G_TYPE_INT, G_BYTE_ORDER,
- "signed", G_TYPE_BOOLEAN, TRUE, NULL);
+ gst_structure_set (structure, "format", G_TYPE_STRING,
+ GST_AUDIO_NE (S16), NULL);
}
}
/* filter against the allowed caps of the pad to return our result */
/* there was no peer, return the template caps */
result = gst_caps_copy (templ);
}
+ if (filter && result) {
+ GstCaps *temp;
+
+ temp = gst_caps_intersect (result, filter);
+ gst_caps_unref (result);
+ result = temp;
+ }
return result;
}
-GType
-gst_mulawdec_get_type (void)
+static gboolean
+gst_mulawdec_query (GstPad * pad, GstObject * parent, GstQuery * query)
{
- static GType mulawdec_type = 0;
-
- if (!mulawdec_type) {
- static const GTypeInfo mulawdec_info = {
- sizeof (GstMuLawDecClass),
- (GBaseInitFunc) gst_mulawdec_base_init,
- NULL,
- (GClassInitFunc) gst_mulawdec_class_init,
- NULL,
- NULL,
- sizeof (GstMuLawDec),
- 0,
- (GInstanceInitFunc) gst_mulawdec_init,
- };
-
- mulawdec_type =
- g_type_register_static (GST_TYPE_ELEMENT, "GstMuLawDec", &mulawdec_info,
- 0);
+ gboolean res;
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_CAPS:
+ {
+ GstCaps *filter, *caps;
+
+ gst_query_parse_caps (query, &filter);
+ caps = mulawdec_getcaps (pad, filter);
+ gst_query_set_caps_result (query, caps);
+ gst_caps_unref (caps);
+
+ res = TRUE;
+ break;
+ }
+ default:
+ res = gst_pad_query_default (pad, parent, query);
+ break;
}
- return mulawdec_type;
+ return res;
}
static void
-gst_mulawdec_base_init (GstMuLawDecClass * klass)
+gst_mulawdec_class_init (GstMuLawDecClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstElementClass *element_class = (GstElementClass *) klass;
+
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&mulaw_dec_src_factory));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&mulaw_dec_sink_factory));
- gst_element_class_add_static_pad_template (element_class,
- &mulaw_dec_src_factory);
- gst_element_class_add_static_pad_template (element_class,
- &mulaw_dec_sink_factory);
gst_element_class_set_details_simple (element_class, "Mu Law audio decoder",
"Codec/Decoder/Audio",
"Convert 8bit mu law to 16bit PCM",
"Zaheer Abbas Merali <zaheerabbas at merali dot org>");
-}
-
-static void
-gst_mulawdec_class_init (GstMuLawDecClass * klass)
-{
- GstElementClass *element_class = (GstElementClass *) klass;
-
- parent_class = g_type_class_peek_parent (klass);
element_class->change_state = GST_DEBUG_FUNCPTR (gst_mulawdec_change_state);
}
{
mulawdec->sinkpad =
gst_pad_new_from_static_template (&mulaw_dec_sink_factory, "sink");
- gst_pad_set_setcaps_function (mulawdec->sinkpad, mulawdec_sink_setcaps);
- gst_pad_set_getcaps_function (mulawdec->sinkpad, mulawdec_getcaps);
+ gst_pad_set_query_function (mulawdec->sinkpad, gst_mulawdec_query);
+ gst_pad_set_event_function (mulawdec->sinkpad, gst_mulawdec_event);
gst_pad_set_chain_function (mulawdec->sinkpad, gst_mulawdec_chain);
gst_element_add_pad (GST_ELEMENT (mulawdec), mulawdec->sinkpad);
mulawdec->srcpad =
gst_pad_new_from_static_template (&mulaw_dec_src_factory, "src");
- gst_pad_use_fixed_caps (mulawdec->srcpad);
- gst_pad_set_getcaps_function (mulawdec->srcpad, mulawdec_getcaps);
+ gst_pad_set_query_function (mulawdec->srcpad, gst_mulawdec_query);
gst_element_add_pad (GST_ELEMENT (mulawdec), mulawdec->srcpad);
}
+static gboolean
+gst_mulawdec_event (GstPad * pad, GstObject * parent, GstEvent * event)
+{
+ GstMuLawDec *mulawdec;
+ gboolean res;
+
+ mulawdec = GST_MULAWDEC (parent);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_CAPS:
+ {
+ GstCaps *caps;
+
+ gst_event_parse_caps (event, &caps);
+ mulawdec_setcaps (mulawdec, caps);
+ gst_event_unref (event);
+
+ res = TRUE;
+ break;
+ }
+ default:
+ res = gst_pad_event_default (pad, parent, event);
+ break;
+ }
+ return res;
+}
+
static GstFlowReturn
-gst_mulawdec_chain (GstPad * pad, GstBuffer * buffer)
+gst_mulawdec_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer)
{
GstMuLawDec *mulawdec;
+ GstMapInfo inmap, outmap;
gint16 *linear_data;
guint8 *mulaw_data;
- guint mulaw_size;
+ gsize mulaw_size, linear_size;
GstBuffer *outbuf;
GstFlowReturn ret;
- mulawdec = GST_MULAWDEC (GST_PAD_PARENT (pad));
+ mulawdec = GST_MULAWDEC (parent);
if (G_UNLIKELY (mulawdec->rate == 0))
goto not_negotiated;
- mulaw_data = (guint8 *) GST_BUFFER_DATA (buffer);
- mulaw_size = GST_BUFFER_SIZE (buffer);
+ gst_buffer_map (buffer, &inmap, GST_MAP_READ);
+ mulaw_data = inmap.data;
+ mulaw_size = inmap.size;
- ret =
- gst_pad_alloc_buffer_and_set_caps (mulawdec->srcpad,
- GST_BUFFER_OFFSET_NONE, mulaw_size * 2, GST_PAD_CAPS (mulawdec->srcpad),
- &outbuf);
- if (ret != GST_FLOW_OK)
- goto alloc_failed;
+ linear_size = mulaw_size * 2;
- linear_data = (gint16 *) GST_BUFFER_DATA (outbuf);
+ outbuf = gst_buffer_new_allocate (NULL, linear_size, 0);
+ gst_buffer_map (outbuf, &outmap, GST_MAP_WRITE);
+ linear_data = (gint16 *) outmap.data;
/* copy discont flag */
if (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DISCONT))
GST_BUFFER_TIMESTAMP (outbuf) = GST_BUFFER_TIMESTAMP (buffer);
if (GST_BUFFER_DURATION (outbuf) == GST_CLOCK_TIME_NONE)
GST_BUFFER_DURATION (outbuf) = gst_util_uint64_scale_int (GST_SECOND,
- mulaw_size * 2, 2 * mulawdec->rate * mulawdec->channels);
+ linear_size, 2 * mulawdec->rate * mulawdec->channels);
else
GST_BUFFER_DURATION (outbuf) = GST_BUFFER_DURATION (buffer);
- gst_buffer_set_caps (outbuf, GST_PAD_CAPS (mulawdec->srcpad));
mulaw_decode (mulaw_data, linear_data, mulaw_size);
+ gst_buffer_unmap (outbuf, &outmap);
+ gst_buffer_unmap (buffer, &inmap);
gst_buffer_unref (buffer);
ret = gst_pad_push (mulawdec->srcpad, outbuf);
gst_buffer_unref (buffer);
return GST_FLOW_NOT_NEGOTIATED;
}
-alloc_failed:
- {
- GST_DEBUG_OBJECT (mulawdec, "pad alloc failed, flow: %s",
- gst_flow_get_name (ret));
- gst_buffer_unref (buffer);
- return ret;
- }
}
static GstStateChangeReturn
#include "config.h"
#endif
#include <gst/gst.h>
+#include <gst/audio/audio.h>
+
#include "mulaw-encode.h"
#include "mulaw-conversion.h"
ARG_0
};
-static void gst_mulawenc_class_init (GstMuLawEncClass * klass);
-static void gst_mulawenc_base_init (GstMuLawEncClass * klass);
-static void gst_mulawenc_init (GstMuLawEnc * mulawenc);
-
-static GstFlowReturn gst_mulawenc_chain (GstPad * pad, GstBuffer * buffer);
+static gboolean gst_mulawenc_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+static GstFlowReturn gst_mulawenc_chain (GstPad * pad, GstObject * parent,
+ GstBuffer * buffer);
-static GstElementClass *parent_class = NULL;
+#define gst_mulawenc_parent_class parent_class
+G_DEFINE_TYPE (GstMuLawEnc, gst_mulawenc, GST_TYPE_ELEMENT);
/*static guint gst_stereo_signals[LAST_SIGNAL] = { 0 }; */
static GstCaps *
-mulawenc_getcaps (GstPad * pad)
+mulawenc_getcaps (GstPad * pad, GstCaps * filter)
{
GstMuLawEnc *mulawenc;
GstPad *otherpad;
name = "audio/x-mulaw";
otherpad = mulawenc->sinkpad;
} else {
- name = "audio/x-raw-int";
+ name = "audio/x-raw";
otherpad = mulawenc->srcpad;
}
/* get caps from the peer, this can return NULL when there is no peer */
- othercaps = gst_pad_peer_get_caps (otherpad);
+ othercaps = gst_pad_peer_query_caps (otherpad, NULL);
/* get the template caps to make sure we return something acceptable */
templ = gst_pad_get_pad_template_caps (pad);
if (pad == mulawenc->srcpad) {
/* remove the fields we don't want */
- gst_structure_remove_fields (structure, "width", "depth", "endianness",
- "signed", NULL);
+ gst_structure_remove_fields (structure, "format", NULL);
} else {
/* add fixed fields */
- gst_structure_set (structure, "width", G_TYPE_INT, 16,
- "depth", G_TYPE_INT, 16,
- "endianness", G_TYPE_INT, G_BYTE_ORDER,
- "signed", G_TYPE_BOOLEAN, TRUE, NULL);
+ gst_structure_set (structure, "format", G_TYPE_STRING,
+ GST_AUDIO_NE (S16), NULL);
}
}
/* filter against the allowed caps of the pad to return our result */
/* there was no peer, return the template caps */
result = gst_caps_copy (templ);
}
+ if (filter && result) {
+ GstCaps *temp;
+
+ temp = gst_caps_intersect (result, filter);
+ gst_caps_unref (result);
+ result = temp;
+ }
+
return result;
}
static gboolean
-mulawenc_setcaps (GstPad * pad, GstCaps * caps)
+gst_mulawenc_query (GstPad * pad, GstObject * parent, GstQuery * query)
+{
+ gboolean res;
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_CAPS:
+ {
+ GstCaps *filter, *caps;
+
+ gst_query_parse_caps (query, &filter);
+ caps = mulawenc_getcaps (pad, filter);
+ gst_query_set_caps_result (query, caps);
+ gst_caps_unref (caps);
+
+ res = TRUE;
+ break;
+ }
+ default:
+ res = gst_pad_query_default (pad, parent, query);
+ break;
+ }
+ return res;
+}
+
+
+static gboolean
+mulawenc_setcaps (GstMuLawEnc * mulawenc, GstCaps * caps)
{
- GstMuLawEnc *mulawenc;
- GstPad *otherpad;
GstStructure *structure;
GstCaps *base_caps;
- mulawenc = GST_MULAWENC (gst_pad_get_parent (pad));
-
structure = gst_caps_get_structure (caps, 0);
gst_structure_get_int (structure, "channels", &mulawenc->channels);
gst_structure_get_int (structure, "rate", &mulawenc->rate);
- if (pad == mulawenc->sinkpad) {
- otherpad = mulawenc->srcpad;
- } else {
- otherpad = mulawenc->sinkpad;
- }
- base_caps = gst_caps_copy (gst_pad_get_pad_template_caps (otherpad));
+ base_caps = gst_caps_copy (gst_pad_get_pad_template_caps (mulawenc->srcpad));
structure = gst_caps_get_structure (base_caps, 0);
gst_structure_set (structure, "rate", G_TYPE_INT, mulawenc->rate, NULL);
gst_structure_set (structure, "channels", G_TYPE_INT, mulawenc->channels,
NULL);
- gst_pad_set_caps (otherpad, base_caps);
+ gst_pad_set_caps (mulawenc->srcpad, base_caps);
- gst_object_unref (mulawenc);
gst_caps_unref (base_caps);
return TRUE;
}
-GType
-gst_mulawenc_get_type (void)
-{
- static GType mulawenc_type = 0;
-
- if (!mulawenc_type) {
- static const GTypeInfo mulawenc_info = {
- sizeof (GstMuLawEncClass),
- (GBaseInitFunc) gst_mulawenc_base_init,
- NULL,
- (GClassInitFunc) gst_mulawenc_class_init,
- NULL,
- NULL,
- sizeof (GstMuLawEnc),
- 0,
- (GInstanceInitFunc) gst_mulawenc_init,
- };
-
- mulawenc_type =
- g_type_register_static (GST_TYPE_ELEMENT, "GstMuLawEnc", &mulawenc_info,
- 0);
- }
- return mulawenc_type;
-}
-
static void
-gst_mulawenc_base_init (GstMuLawEncClass * klass)
+gst_mulawenc_class_init (GstMuLawEncClass * klass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
- gst_element_class_add_static_pad_template (element_class,
- &mulaw_enc_src_factory);
- gst_element_class_add_static_pad_template (element_class,
- &mulaw_enc_sink_factory);
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&mulaw_enc_src_factory));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&mulaw_enc_sink_factory));
+
gst_element_class_set_details_simple (element_class, "Mu Law audio encoder",
"Codec/Encoder/Audio",
"Convert 16bit PCM to 8bit mu law",
}
static void
-gst_mulawenc_class_init (GstMuLawEncClass * klass)
-{
- parent_class = g_type_class_peek_parent (klass);
-}
-
-static void
gst_mulawenc_init (GstMuLawEnc * mulawenc)
{
mulawenc->sinkpad =
gst_pad_new_from_static_template (&mulaw_enc_sink_factory, "sink");
- gst_pad_set_setcaps_function (mulawenc->sinkpad, mulawenc_setcaps);
- gst_pad_set_getcaps_function (mulawenc->sinkpad, mulawenc_getcaps);
+ gst_pad_set_query_function (mulawenc->sinkpad, gst_mulawenc_query);
+ gst_pad_set_event_function (mulawenc->sinkpad, gst_mulawenc_event);
gst_pad_set_chain_function (mulawenc->sinkpad, gst_mulawenc_chain);
gst_element_add_pad (GST_ELEMENT (mulawenc), mulawenc->sinkpad);
mulawenc->srcpad =
gst_pad_new_from_static_template (&mulaw_enc_src_factory, "src");
- gst_pad_set_setcaps_function (mulawenc->srcpad, mulawenc_setcaps);
- gst_pad_set_getcaps_function (mulawenc->srcpad, mulawenc_getcaps);
- gst_pad_use_fixed_caps (mulawenc->srcpad);
+ gst_pad_set_query_function (mulawenc->srcpad, gst_mulawenc_query);
gst_element_add_pad (GST_ELEMENT (mulawenc), mulawenc->srcpad);
/* init rest */
mulawenc->rate = 0;
}
+static gboolean
+gst_mulawenc_event (GstPad * pad, GstObject * parent, GstEvent * event)
+{
+ GstMuLawEnc *mulawenc;
+ gboolean res;
+
+ mulawenc = GST_MULAWENC (parent);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_CAPS:
+ {
+ GstCaps *caps;
+
+ gst_event_parse_caps (event, &caps);
+ mulawenc_setcaps (mulawenc, caps);
+ gst_event_unref (event);
+
+ res = TRUE;
+ break;
+ }
+ default:
+ res = gst_pad_event_default (pad, parent, event);
+ break;
+ }
+ return res;
+}
+
static GstFlowReturn
-gst_mulawenc_chain (GstPad * pad, GstBuffer * buffer)
+gst_mulawenc_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer)
{
GstMuLawEnc *mulawenc;
+ GstMapInfo inmap, outmap;
gint16 *linear_data;
- guint linear_size;
+ gsize linear_size;
guint8 *mulaw_data;
guint mulaw_size;
GstBuffer *outbuf;
GstFlowReturn ret;
GstClockTime timestamp, duration;
- mulawenc = GST_MULAWENC (gst_pad_get_parent (pad));
+ mulawenc = GST_MULAWENC (parent);
if (!mulawenc->rate || !mulawenc->channels)
goto not_negotiated;
- linear_data = (gint16 *) GST_BUFFER_DATA (buffer);
- linear_size = GST_BUFFER_SIZE (buffer);
+ gst_buffer_map (buffer, &inmap, GST_MAP_READ);
+ linear_data = (gint16 *) inmap.data;
+ linear_size = inmap.size;
mulaw_size = linear_size / 2;
timestamp = GST_BUFFER_TIMESTAMP (buffer);
duration = GST_BUFFER_DURATION (buffer);
- ret = gst_pad_alloc_buffer_and_set_caps (mulawenc->srcpad,
- GST_BUFFER_OFFSET_NONE, mulaw_size, GST_PAD_CAPS (mulawenc->srcpad),
- &outbuf);
- if (ret != GST_FLOW_OK)
- goto alloc_failed;
+ outbuf = gst_buffer_new_allocate (NULL, mulaw_size, 0);
if (duration == -1) {
duration = gst_util_uint64_scale_int (mulaw_size,
GST_SECOND, mulawenc->rate * mulawenc->channels);
}
- if (GST_BUFFER_SIZE (outbuf) < mulaw_size) {
- /* pad-alloc can suggest a smaller size */
- gst_buffer_unref (outbuf);
- outbuf = gst_buffer_new_and_alloc (mulaw_size);
- }
-
- mulaw_data = (guint8 *) GST_BUFFER_DATA (outbuf);
+ gst_buffer_map (outbuf, &outmap, GST_MAP_WRITE);
+ mulaw_data = outmap.data;
/* copy discont flag */
if (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DISCONT))
GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
GST_BUFFER_DURATION (outbuf) = duration;
- gst_buffer_set_caps (outbuf, GST_PAD_CAPS (mulawenc->srcpad));
-
mulaw_encode (linear_data, mulaw_data, mulaw_size);
+ gst_buffer_unmap (outbuf, &outmap);
+ gst_buffer_unmap (buffer, &inmap);
gst_buffer_unref (buffer);
ret = gst_pad_push (mulawenc->srcpad, outbuf);
done:
- gst_object_unref (mulawenc);
return ret;
gst_buffer_unref (buffer);
goto done;
}
-alloc_failed:
- {
- GST_DEBUG_OBJECT (mulawenc, "pad alloc failed");
- gst_buffer_unref (buffer);
- goto done;
- }
}
#include "mulaw-encode.h"
#include "mulaw-decode.h"
+#if G_BYTE_ORDER == G_LITTLE_ENDIAN
+#define INT_FORMAT "S16LE"
+#else
+#define INT_FORMAT "S16BE"
+#endif
+
GstStaticPadTemplate mulaw_dec_src_factory = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("audio/x-raw-int, "
- "rate = (int) [ 8000, 192000 ], "
- "channels = (int) [ 1, 2 ], "
- "endianness = (int) BYTE_ORDER, "
- "width = (int) 16, " "depth = (int) 16, " "signed = (boolean) True")
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "format = (string) " INT_FORMAT ", "
+ "rate = (int) [ 8000, 192000 ], " "channels = (int) [ 1, 2 ]")
);
GstStaticPadTemplate mulaw_dec_sink_factory = GST_STATIC_PAD_TEMPLATE ("sink",
GstStaticPadTemplate mulaw_enc_sink_factory = GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("audio/x-raw-int, "
- "rate = (int) [ 8000, 192000 ], "
- "channels = (int) [ 1, 2 ], "
- "endianness = (int) BYTE_ORDER, "
- "width = (int) 16, " "depth = (int) 16, " "signed = (boolean) True")
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "format = (string) " INT_FORMAT ", "
+ "rate = (int) [ 8000, 192000 ], " "channels = (int) [ 1, 2 ]")
);
GstStaticPadTemplate mulaw_enc_src_factory = GST_STATIC_PAD_TEMPLATE ("src",
libgstlevel_la_SOURCES = gstlevel.c
libgstlevel_la_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_BASE_CFLAGS) $(GST_CFLAGS)
-libgstlevel_la_LIBADD = $(GST_BASE_LIBS) $(LIBM)
+libgstlevel_la_LIBADD = $(GST_BASE_LIBS) $(LIBM) -lgstaudio-$(GST_MAJORMINOR)
libgstlevel_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
libgstlevel_la_LIBTOOLFLAGS = --tag=disable-static
#define EPSILON 1e-35f
static GstStaticPadTemplate sink_template_factory =
- GST_STATIC_PAD_TEMPLATE ("sink",
+GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("audio/x-raw-int, "
- "rate = (int) [ 1, MAX ], "
- "channels = (int) [ 1, MAX ], "
- "endianness = (int) BYTE_ORDER, "
- "width = (int) { 8, 16, 32 }, "
- "depth = (int) { 8, 16, 32 }, "
- "signed = (boolean) true; "
- "audio/x-raw-float, "
- "rate = (int) [ 1, MAX ], "
- "channels = (int) [ 1, MAX ], "
- "endianness = (int) BYTE_ORDER, " "width = (int) {32, 64} ")
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "format = (string) { S8, " GST_AUDIO_NE (S16) ", " GST_AUDIO_NE (S32)
+ GST_AUDIO_NE (F32) "," GST_AUDIO_NE (F64) " },"
+ "layout = (string) interleaved, "
+ "rate = (int) [ 1, MAX ], " "channels = (int) [ 1, MAX ]")
);
static GstStaticPadTemplate src_template_factory =
- GST_STATIC_PAD_TEMPLATE ("src",
+GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("audio/x-raw-int, "
- "rate = (int) [ 1, MAX ], "
- "channels = (int) [ 1, MAX ], "
- "endianness = (int) BYTE_ORDER, "
- "width = (int) { 8, 16, 32 }, "
- "depth = (int) { 8, 16, 32 }, "
- "signed = (boolean) true; "
- "audio/x-raw-float, "
- "rate = (int) [ 1, MAX ], "
- "channels = (int) [ 1, MAX ], "
- "endianness = (int) BYTE_ORDER, " "width = (int) {32, 64} ")
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "format = (string) { S8, " GST_AUDIO_NE (S16) ", " GST_AUDIO_NE (S32)
+ GST_AUDIO_NE (F32) "," GST_AUDIO_NE (F64) " },"
+ "layout = (string) interleaved, "
+ "rate = (int) [ 1, MAX ], " "channels = (int) [ 1, MAX ]")
);
enum
PROP_PEAK_FALLOFF
};
-GST_BOILERPLATE (GstLevel, gst_level, GstBaseTransform,
- GST_TYPE_BASE_TRANSFORM);
+#define gst_level_parent_class parent_class
+G_DEFINE_TYPE (GstLevel, gst_level, GST_TYPE_BASE_TRANSFORM);
static void gst_level_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void
-gst_level_base_init (gpointer g_class)
-{
- GstElementClass *element_class = g_class;
-
- gst_element_class_add_static_pad_template (element_class,
- &sink_template_factory);
- gst_element_class_add_static_pad_template (element_class,
- &src_template_factory);
- gst_element_class_set_details_simple (element_class, "Level",
- "Filter/Analyzer/Audio",
- "RMS/Peak/Decaying Peak Level messager for audio/raw",
- "Thomas Vander Stichele <thomas at apestaart dot org>");
-}
-
-static void
gst_level_class_init (GstLevelClass * klass)
{
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
+ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
GstBaseTransformClass *trans_class = GST_BASE_TRANSFORM_CLASS (klass);
gobject_class->set_property = gst_level_set_property;
GST_DEBUG_CATEGORY_INIT (level_debug, "level", 0, "Level calculation");
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&sink_template_factory));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&src_template_factory));
+ gst_element_class_set_details_simple (element_class, "Level",
+ "Filter/Analyzer/Audio",
+ "RMS/Peak/Decaying Peak Level messager for audio/raw",
+ "Thomas Vander Stichele <thomas at apestaart dot org>");
+
trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_level_set_caps);
trans_class->start = GST_DEBUG_FUNCPTR (gst_level_start);
trans_class->transform_ip = GST_DEBUG_FUNCPTR (gst_level_transform_ip);
}
static void
-gst_level_init (GstLevel * filter, GstLevelClass * g_class)
+gst_level_init (GstLevel * filter)
{
filter->CS = NULL;
filter->peak = NULL;
- filter->rate = 0;
- filter->width = 0;
- filter->channels = 0;
+ gst_audio_info_init (&filter->info);
filter->interval = GST_SECOND / 10;
filter->decay_peak_ttl = GST_SECOND / 10 * 3;
break;
case PROP_SIGNAL_INTERVAL:
filter->interval = g_value_get_uint64 (value);
- if (filter->rate) {
+ if (GST_AUDIO_INFO_RATE (&filter->info)) {
filter->interval_frames =
- GST_CLOCK_TIME_TO_FRAMES (filter->interval, filter->rate);
+ GST_CLOCK_TIME_TO_FRAMES (filter->interval,
+ GST_AUDIO_INFO_RATE (&filter->info));
}
break;
case PROP_PEAK_TTL:
*/
-static gint
-structure_get_int (GstStructure * structure, const gchar * field)
-{
- gint ret;
-
- if (!gst_structure_get_int (structure, field, &ret))
- g_assert_not_reached ();
-
- return ret;
-}
-
static gboolean
gst_level_set_caps (GstBaseTransform * trans, GstCaps * in, GstCaps * out)
{
GstLevel *filter = GST_LEVEL (trans);
- const gchar *mimetype;
- GstStructure *structure;
- gint i;
+ GstAudioInfo info;
+ gint i, channels, rate;
- structure = gst_caps_get_structure (in, 0);
- filter->rate = structure_get_int (structure, "rate");
- filter->width = structure_get_int (structure, "width");
- filter->channels = structure_get_int (structure, "channels");
- mimetype = gst_structure_get_name (structure);
+ if (!gst_audio_info_from_caps (&info, in))
+ return FALSE;
- /* FIXME: set calculator func depending on caps */
- filter->process = NULL;
- if (strcmp (mimetype, "audio/x-raw-int") == 0) {
- GST_DEBUG_OBJECT (filter, "use int: %u", filter->width);
- switch (filter->width) {
- case 8:
- filter->process = gst_level_calculate_gint8;
- break;
- case 16:
- filter->process = gst_level_calculate_gint16;
- break;
- case 32:
- filter->process = gst_level_calculate_gint32;
- break;
- }
- } else if (strcmp (mimetype, "audio/x-raw-float") == 0) {
- GST_DEBUG_OBJECT (filter, "use float, %u", filter->width);
- switch (filter->width) {
- case 32:
- filter->process = gst_level_calculate_gfloat;
- break;
- case 64:
- filter->process = gst_level_calculate_gdouble;
- break;
- }
+ switch (GST_AUDIO_INFO_FORMAT (&info)) {
+ case GST_AUDIO_FORMAT_S8:
+ filter->process = gst_level_calculate_gint8;
+ break;
+ case GST_AUDIO_FORMAT_S16:
+ filter->process = gst_level_calculate_gint16;
+ break;
+ case GST_AUDIO_FORMAT_S32:
+ filter->process = gst_level_calculate_gint32;
+ break;
+ case GST_AUDIO_FORMAT_F32:
+ filter->process = gst_level_calculate_gfloat;
+ break;
+ case GST_AUDIO_FORMAT_F64:
+ filter->process = gst_level_calculate_gdouble;
+ break;
+ default:
+ filter->process = NULL;
+ break;
}
+ filter->info = info;
+
+ channels = GST_AUDIO_INFO_CHANNELS (&info);
+ rate = GST_AUDIO_INFO_RATE (&info);
+
/* allocate channel variable arrays */
g_free (filter->CS);
g_free (filter->peak);
g_free (filter->decay_peak);
g_free (filter->decay_peak_base);
g_free (filter->decay_peak_age);
- filter->CS = g_new (gdouble, filter->channels);
- filter->peak = g_new (gdouble, filter->channels);
- filter->last_peak = g_new (gdouble, filter->channels);
- filter->decay_peak = g_new (gdouble, filter->channels);
- filter->decay_peak_base = g_new (gdouble, filter->channels);
+ filter->CS = g_new (gdouble, channels);
+ filter->peak = g_new (gdouble, channels);
+ filter->last_peak = g_new (gdouble, channels);
+ filter->decay_peak = g_new (gdouble, channels);
+ filter->decay_peak_base = g_new (gdouble, channels);
- filter->decay_peak_age = g_new (GstClockTime, filter->channels);
+ filter->decay_peak_age = g_new (GstClockTime, channels);
- for (i = 0; i < filter->channels; ++i) {
+ for (i = 0; i < channels; ++i) {
filter->CS[i] = filter->peak[i] = filter->last_peak[i] =
filter->decay_peak[i] = filter->decay_peak_base[i] = 0.0;
filter->decay_peak_age[i] = G_GUINT64_CONSTANT (0);
}
- filter->interval_frames =
- GST_CLOCK_TIME_TO_FRAMES (filter->interval, filter->rate);
+ filter->interval_frames = GST_CLOCK_TIME_TO_FRAMES (filter->interval, rate);
return TRUE;
}
gst_level_transform_ip (GstBaseTransform * trans, GstBuffer * in)
{
GstLevel *filter;
+ GstMapInfo map;
guint8 *in_data;
+ gsize in_size;
gdouble CS;
guint i;
guint num_frames = 0;
guint num_int_samples = 0; /* number of interleaved samples
* ie. total count for all channels combined */
GstClockTimeDiff falloff_time;
+ gint channels, rate, bps;
filter = GST_LEVEL (trans);
- in_data = GST_BUFFER_DATA (in);
- num_int_samples = GST_BUFFER_SIZE (in) / (filter->width / 8);
+ channels = GST_AUDIO_INFO_CHANNELS (&filter->info);
+ bps = GST_AUDIO_INFO_BPS (&filter->info);
+ rate = GST_AUDIO_INFO_RATE (&filter->info);
+
+ gst_buffer_map (in, &map, GST_MAP_READ);
+ in_data = map.data;
+ in_size = map.size;
+
+ num_int_samples = in_size / bps;
GST_LOG_OBJECT (filter, "analyzing %u sample frames at ts %" GST_TIME_FORMAT,
num_int_samples, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (in)));
- g_return_val_if_fail (num_int_samples % filter->channels == 0,
- GST_FLOW_ERROR);
+ g_return_val_if_fail (num_int_samples % channels == 0, GST_FLOW_ERROR);
- num_frames = num_int_samples / filter->channels;
+ num_frames = num_int_samples / channels;
- for (i = 0; i < filter->channels; ++i) {
+ for (i = 0; i < channels; ++i) {
if (!GST_BUFFER_FLAG_IS_SET (in, GST_BUFFER_FLAG_GAP)) {
- filter->process (in_data, num_int_samples, filter->channels, &CS,
+ filter->process (in_data, num_int_samples, channels, &CS,
&filter->peak[i]);
GST_LOG_OBJECT (filter,
"channel %d, cumulative sum %f, peak %f, over %d samples/%d channels",
- i, CS, filter->peak[i], num_int_samples, filter->channels);
+ i, CS, filter->peak[i], num_int_samples, channels);
filter->CS[i] += CS;
} else {
filter->peak[i] = 0.0;
}
- in_data += (filter->width / 8);
+ in_data += bps;
- filter->decay_peak_age[i] +=
- GST_FRAMES_TO_CLOCK_TIME (num_frames, filter->rate);
+ filter->decay_peak_age[i] += GST_FRAMES_TO_CLOCK_TIME (num_frames, rate);
GST_LOG_OBJECT (filter, "filter peak info [%d]: decay peak %f, age %"
GST_TIME_FORMAT, i,
filter->decay_peak[i], GST_TIME_ARGS (filter->decay_peak_age[i]));
if (filter->message) {
GstMessage *m;
GstClockTime duration =
- GST_FRAMES_TO_CLOCK_TIME (filter->num_frames, filter->rate);
+ GST_FRAMES_TO_CLOCK_TIME (filter->num_frames, rate);
m = gst_level_message_new (filter, filter->message_ts, duration);
"message: ts %" GST_TIME_FORMAT ", num_frames %d",
GST_TIME_ARGS (filter->message_ts), filter->num_frames);
- for (i = 0; i < filter->channels; ++i) {
+ for (i = 0; i < channels; ++i) {
gdouble RMS;
gdouble RMSdB, lastdB, decaydB;
filter->num_frames = 0;
}
+ gst_buffer_unmap (in, &map);
+
return GST_FLOW_OK;
}
#include <gst/gst.h>
#include <gst/base/gstbasetransform.h>
-
+#include <gst/audio/audio.h>
G_BEGIN_DECLS
gboolean message; /* whether or not to post messages */
guint64 interval; /* how many seconds between emits */
- gint rate; /* caps variables */
- gint width;
- gint channels;
+ GstAudioInfo info;
gdouble decay_peak_ttl; /* time to live for peak in seconds */
gdouble decay_peak_falloff; /* falloff in dB/sec */
gdouble *MS; /* normalized Mean Square of buffer */
gdouble *RMS_dB; /* RMS in dB to emit */
GstClockTime *decay_peak_age; /* age of last peak */
-
+
void (*process)(gpointer, guint, guint, gdouble*, gdouble*);
};
/* Peeks following element id and element length in datastream provided
* by @peek with @ctx as user data.
- * Returns GST_FLOW_UNEXPECTED if not enough data to read id and length.
+ * Returns GST_FLOW_EOS if not enough data to read id and length.
* Otherwise, @needed provides the prefix length (id + length), and
* @length provides element length.
*
ebml->el = el;
ebml->offset = offset;
ebml->buf = buf;
+ gst_buffer_map (buf, &ebml->map, GST_MAP_READ);
ebml->readers = g_array_sized_new (FALSE, FALSE, sizeof (GstEbmlMaster), 10);
m.offset = ebml->offset;
- gst_byte_reader_init (&m.br, GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (buf));
+ gst_byte_reader_init (&m.br, ebml->map.data, ebml->map.size);
g_array_append_val (ebml->readers, m);
}
if (ebml->readers)
g_array_free (ebml->readers, TRUE);
ebml->readers = NULL;
- if (ebml->buf)
+ if (ebml->buf) {
+ gst_buffer_unmap (ebml->buf, &ebml->map);
gst_buffer_unref (ebml->buf);
+ }
ebml->buf = NULL;
ebml->el = NULL;
}
if (G_LIKELY (gst_byte_reader_peek_data (br, peek, data)))
return GST_FLOW_OK;
else
- return GST_FLOW_UNEXPECTED;
+ return GST_FLOW_EOS;
}
static GstFlowReturn
offset = gst_ebml_read_get_pos (ebml) - ebml->offset;
if (G_LIKELY (gst_byte_reader_skip (gst_ebml_read_br (ebml), length))) {
- *buf = gst_buffer_create_sub (ebml->buf, offset, length);
+ *buf = gst_buffer_copy_region (ebml->buf, GST_BUFFER_COPY_ALL,
+ offset, length);
} else {
*buf = NULL;
return GST_FLOW_PARSE;
GstBuffer *buf;
guint64 offset;
+ GstMapInfo map;
GArray *readers;
} GstEbmlRead;
GST_DEBUG_CATEGORY_STATIC (gst_ebml_write_debug);
#define GST_CAT_DEFAULT gst_ebml_write_debug
-#define _do_init(thing) \
+#define _do_init \
GST_DEBUG_CATEGORY_INIT (gst_ebml_write_debug, "ebmlwrite", 0, "Write EBML structured data")
-GST_BOILERPLATE_FULL (GstEbmlWrite, gst_ebml_write, GstObject, GST_TYPE_OBJECT,
+#define parent_class gst_ebml_write_parent_class
+G_DEFINE_TYPE_WITH_CODE (GstEbmlWrite, gst_ebml_write, GST_TYPE_OBJECT,
_do_init);
static void gst_ebml_write_finalize (GObject * object);
static void
-gst_ebml_write_base_init (gpointer g_class)
-{
-}
-
-static void
gst_ebml_write_class_init (GstEbmlWriteClass * klass)
{
GObjectClass *object = G_OBJECT_CLASS (klass);
}
static void
-gst_ebml_write_init (GstEbmlWrite * ebml, GstEbmlWriteClass * klass)
+gst_ebml_write_init (GstEbmlWrite * ebml)
{
ebml->srcpad = NULL;
ebml->pos = 0;
- ebml->last_pos = G_MAXUINT64; /* force newsegment event */
+ ebml->last_pos = G_MAXUINT64; /* force segment event */
ebml->cache = NULL;
ebml->streamheader = NULL;
ebml->caps = NULL;
}
- GST_CALL_PARENT (G_OBJECT_CLASS, finalize, (object));
+ G_OBJECT_CLASS (parent_class)->finalize (object);
}
gst_ebml_write_reset (GstEbmlWrite * ebml)
{
ebml->pos = 0;
- ebml->last_pos = G_MAXUINT64; /* force newsegment event */
+ ebml->last_pos = G_MAXUINT64; /* force segment event */
if (ebml->cache) {
gst_byte_writer_free (ebml->cache);
buffer = gst_byte_writer_free_and_get_buffer (ebml->streamheader);
ebml->streamheader = NULL;
- GST_DEBUG ("Streamheader was size %d", GST_BUFFER_SIZE (buffer));
+ GST_DEBUG ("Streamheader was size %" G_GSIZE_FORMAT,
+ gst_buffer_get_size (buffer));
ebml->writing_streamheader = FALSE;
return buffer;
}
static gboolean
-gst_ebml_writer_send_new_segment_event (GstEbmlWrite * ebml, guint64 new_pos)
+gst_ebml_writer_send_segment_event (GstEbmlWrite * ebml, guint64 new_pos)
{
+ GstSegment segment;
gboolean res;
GST_INFO ("seeking to %" G_GUINT64_FORMAT, new_pos);
- res = gst_pad_push_event (ebml->srcpad,
- gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_BYTES, new_pos, -1, 0));
+ gst_segment_init (&segment, GST_FORMAT_BYTES);
+ segment.start = new_pos;
+ segment.stop = -1;
+ segment.position = 0;
+
+ res = gst_pad_push_event (ebml->srcpad, gst_event_new_segment (&segment));
if (!res)
GST_WARNING ("seek to %" G_GUINT64_FORMAT "failed", new_pos);
buffer = gst_byte_writer_free_and_get_buffer (ebml->cache);
ebml->cache = NULL;
- GST_DEBUG ("Flushing cache of size %d", GST_BUFFER_SIZE (buffer));
- gst_buffer_set_caps (buffer, ebml->caps);
+ GST_DEBUG ("Flushing cache of size %" G_GSIZE_FORMAT,
+ gst_buffer_get_size (buffer));
GST_BUFFER_TIMESTAMP (buffer) = timestamp;
- GST_BUFFER_OFFSET (buffer) = ebml->pos - GST_BUFFER_SIZE (buffer);
+ GST_BUFFER_OFFSET (buffer) = ebml->pos - gst_buffer_get_size (buffer);
GST_BUFFER_OFFSET_END (buffer) = ebml->pos;
if (ebml->last_write_result == GST_FLOW_OK) {
if (GST_BUFFER_OFFSET (buffer) != ebml->last_pos) {
- gst_ebml_writer_send_new_segment_event (ebml, GST_BUFFER_OFFSET (buffer));
+ gst_ebml_writer_send_segment_event (ebml, GST_BUFFER_OFFSET (buffer));
GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT);
}
if (ebml->writing_streamheader) {
- GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_IN_CAPS);
+ GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_HEADER);
}
if (!is_keyframe) {
GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT);
* Returns: A new #GstBuffer.
*/
static GstBuffer *
-gst_ebml_write_element_new (GstEbmlWrite * ebml, guint8 ** data_out, guint size)
+gst_ebml_write_element_new (GstEbmlWrite * ebml, GstMapInfo * map, guint size)
{
/* Create new buffer of size + ID + length */
GstBuffer *buf;
buf = gst_buffer_new_and_alloc (size);
GST_BUFFER_TIMESTAMP (buf) = ebml->timestamp;
- *data_out = GST_BUFFER_DATA (buf);
+ /* FIXME unmap not possible */
+ gst_buffer_map (buf, map, GST_MAP_WRITE);
return buf;
}
gst_ebml_write_element_push (GstEbmlWrite * ebml, GstBuffer * buf,
guint8 * buf_data, guint8 * buf_data_end)
{
+ GstMapInfo map;
guint data_size;
- if (!buf_data)
- buf_data = GST_BUFFER_DATA (buf);
+ map.data = NULL;
- if (buf_data_end) {
+ if (buf_data_end)
data_size = buf_data_end - buf_data;
- GST_BUFFER_SIZE (buf) = data_size;
- } else {
- data_size = GST_BUFFER_SIZE (buf);
- }
+ else
+ data_size = gst_buffer_get_size (buf);
ebml->pos += data_size;
/* if there's no cache, then don't push it! */
if (ebml->writing_streamheader) {
+ if (!buf_data) {
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ buf_data = map.data;
+ }
gst_byte_writer_put_data (ebml->streamheader, buf_data, data_size);
}
if (ebml->cache) {
+ if (!buf_data) {
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ buf_data = map.data;
+ }
gst_byte_writer_put_data (ebml->cache, buf_data, data_size);
+ if (map.data)
+ gst_buffer_unmap (buf, &map);
gst_buffer_unref (buf);
return;
}
+ if (buf_data && map.data)
+ gst_buffer_unmap (buf, &map);
+
if (ebml->last_write_result == GST_FLOW_OK) {
- buf = gst_buffer_make_metadata_writable (buf);
- gst_buffer_set_caps (buf, ebml->caps);
+ buf = gst_buffer_make_writable (buf);
GST_BUFFER_OFFSET (buf) = ebml->pos - data_size;
GST_BUFFER_OFFSET_END (buf) = ebml->pos;
if (ebml->writing_streamheader) {
- GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_IN_CAPS);
+ GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_HEADER);
}
GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
if (GST_BUFFER_OFFSET (buf) != ebml->last_pos) {
- gst_ebml_writer_send_new_segment_event (ebml, GST_BUFFER_OFFSET (buf));
+ gst_ebml_writer_send_segment_event (ebml, GST_BUFFER_OFFSET (buf));
GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DISCONT);
}
ebml->last_pos = ebml->pos;
GstBuffer *buf;
guint8 *data_start, *data_end;
guint size = gst_ebml_write_get_uint_size (num);
+ GstMapInfo map;
- buf = gst_ebml_write_element_new (ebml, &data_start, sizeof (num));
- data_end = data_start;
+ buf = gst_ebml_write_element_new (ebml, &map, sizeof (num));
+ data_end = data_start = map.data;
/* write */
gst_ebml_write_element_id (&data_end, id);
gst_ebml_write_element_size (&data_end, size);
gst_ebml_write_set_uint (&data_end, num, size);
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_set_size (buf, (data_end - data_start));
+
gst_ebml_write_element_push (ebml, buf, data_start, data_end);
}
{
GstBuffer *buf;
guint8 *data_start, *data_end;
+ GstMapInfo map;
+
/* if the signed number is on the edge of a extra-byte,
* then we'll fall over when detecting it. Example: if I
* have a number (-)0x8000 (G_MINSHORT), then my abs()<<1
guint64 unum = (num < 0 ? (-num - 1) << 1 : num << 1);
guint size = gst_ebml_write_get_uint_size (unum);
- buf = gst_ebml_write_element_new (ebml, &data_start, sizeof (num));
- data_end = data_start;
+ buf = gst_ebml_write_element_new (ebml, &map, sizeof (num));
+ data_end = data_start = map.data;
/* make unsigned */
if (num >= 0) {
gst_ebml_write_element_id (&data_end, id);
gst_ebml_write_element_size (&data_end, size);
gst_ebml_write_set_uint (&data_end, unum, size);
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_set_size (buf, (data_end - data_start));
+
gst_ebml_write_element_push (ebml, buf, data_start, data_end);
}
gst_ebml_write_float (GstEbmlWrite * ebml, guint32 id, gdouble num)
{
GstBuffer *buf;
+ GstMapInfo map;
guint8 *data_start, *data_end;
- buf = gst_ebml_write_element_new (ebml, &data_start, sizeof (num));
- data_end = data_start;
+ buf = gst_ebml_write_element_new (ebml, &map, sizeof (num));
+ data_end = data_start = map.data;
gst_ebml_write_element_id (&data_end, id);
gst_ebml_write_element_size (&data_end, 8);
num = GDOUBLE_TO_BE (num);
gst_ebml_write_element_data (&data_end, (guint8 *) & num, 8);
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_set_size (buf, (data_end - data_start));
+
gst_ebml_write_element_push (ebml, buf, data_start, data_end);
}
{
gint len = strlen (str) + 1; /* add trailing '\0' */
GstBuffer *buf;
+ GstMapInfo map;
guint8 *data_start, *data_end;
- buf = gst_ebml_write_element_new (ebml, &data_start, len);
- data_end = data_start;
+ buf = gst_ebml_write_element_new (ebml, &map, len);
+ data_end = data_start = map.data;
gst_ebml_write_element_id (&data_end, id);
gst_ebml_write_element_size (&data_end, len);
gst_ebml_write_element_data (&data_end, (guint8 *) str, len);
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_set_size (buf, (data_end - data_start));
+
gst_ebml_write_element_push (ebml, buf, data_start, data_end);
}
{
guint64 pos = ebml->pos;
GstBuffer *buf;
+ GstMapInfo map;
guint8 *data_start, *data_end;
- buf = gst_ebml_write_element_new (ebml, &data_start, 0);
- data_end = data_start;
+ buf = gst_ebml_write_element_new (ebml, &map, 0);
+ data_end = data_start = map.data;
gst_ebml_write_element_id (&data_end, id);
pos += data_end - data_start;
gst_ebml_write_element_size (&data_end, GST_EBML_SIZE_UNKNOWN);
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_set_size (buf, (data_end - data_start));
+
gst_ebml_write_element_push (ebml, buf, data_start, data_end);
return pos;
guint64 extra_size)
{
guint64 pos = ebml->pos;
- GstBuffer *buf;
+ guint8 *data = g_malloc (8);
+ GstBuffer *buf = gst_buffer_new_wrapped (data, 8);
gst_ebml_write_seek (ebml, startpos);
- buf = gst_buffer_new_and_alloc (8);
- GST_WRITE_UINT64_BE (GST_BUFFER_DATA (buf),
+ GST_WRITE_UINT64_BE (data,
(G_GINT64_CONSTANT (1) << 56) | (pos - startpos - 8 + extra_size));
gst_ebml_write_element_push (ebml, buf, NULL, NULL);
guint32 id, guint8 * binary, guint64 length)
{
GstBuffer *buf;
+ GstMapInfo map;
guint8 *data_start, *data_end;
- buf = gst_ebml_write_element_new (ebml, &data_start, length);
- data_end = data_start;
+ buf = gst_ebml_write_element_new (ebml, &map, length);
+ data_end = data_start = map.data;
gst_ebml_write_element_id (&data_end, id);
gst_ebml_write_element_size (&data_end, length);
gst_ebml_write_element_data (&data_end, binary, length);
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_set_size (buf, (data_end - data_start));
+
gst_ebml_write_element_push (ebml, buf, data_start, data_end);
}
gst_ebml_write_buffer_header (GstEbmlWrite * ebml, guint32 id, guint64 length)
{
GstBuffer *buf;
+ GstMapInfo map;
guint8 *data_start, *data_end;
- buf = gst_ebml_write_element_new (ebml, &data_start, 0);
- data_end = data_start;
+ buf = gst_ebml_write_element_new (ebml, &map, 0);
+ data_end = data_start = map.data;
gst_ebml_write_element_id (&data_end, id);
gst_ebml_write_element_size (&data_end, length);
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_set_size (buf, (data_end - data_start));
+
gst_ebml_write_element_push (ebml, buf, data_start, data_end);
}
gst_ebml_replace_uint (GstEbmlWrite * ebml, guint64 pos, guint64 num)
{
guint64 oldpos = ebml->pos;
- GstBuffer *buf = gst_buffer_new_and_alloc (8);
guint8 *data_start, *data_end;
+ GstBuffer *buf;
- data_start = GST_BUFFER_DATA (buf);
+ data_start = g_malloc (8);
data_end = data_start;
+ buf = gst_buffer_new_wrapped (data_start, 8);
gst_ebml_write_seek (ebml, pos);
gst_ebml_write_set_uint (&data_end, num, 8);
#include "config.h"
#endif
-/* FIXME 0.11: suppress warnings for deprecated API such as GStaticRecMutex
- * with newer GLib versions (>= 2.31.0) */
-#define GLIB_DISABLE_DEPRECATION_WARNINGS
-
#include <math.h>
#include <string.h>
#include <glib/gprintf.h>
#include <gst/riff/riff-ids.h>
#include <gst/riff/riff-media.h>
+#include <gst/audio/audio.h>
#include <gst/tag/tag.h>
-
#include <gst/pbutils/pbutils.h>
#include "matroska-demux.h"
/* TODO: fill in caps! */
static GstStaticPadTemplate audio_src_templ =
-GST_STATIC_PAD_TEMPLATE ("audio_%02d",
+GST_STATIC_PAD_TEMPLATE ("audio_%u",
GST_PAD_SRC,
GST_PAD_SOMETIMES,
GST_STATIC_CAPS ("ANY")
);
static GstStaticPadTemplate video_src_templ =
-GST_STATIC_PAD_TEMPLATE ("video_%02d",
+GST_STATIC_PAD_TEMPLATE ("video_%u",
GST_PAD_SRC,
GST_PAD_SOMETIMES,
GST_STATIC_CAPS ("ANY")
);
static GstStaticPadTemplate subtitle_src_templ =
- GST_STATIC_PAD_TEMPLATE ("subtitle_%02d",
+ GST_STATIC_PAD_TEMPLATE ("subtitle_%u",
GST_PAD_SRC,
GST_PAD_SOMETIMES,
GST_STATIC_CAPS ("text/x-pango-markup; application/x-ssa; "
GstQuery * query);
/* pad functions */
-static gboolean gst_matroska_demux_sink_activate_pull (GstPad * sinkpad,
- gboolean active);
-static gboolean gst_matroska_demux_sink_activate (GstPad * sinkpad);
+static gboolean gst_matroska_demux_sink_activate (GstPad * sinkpad,
+ GstObject * parent);
+static gboolean gst_matroska_demux_sink_activate_mode (GstPad * sinkpad,
+ GstObject * parent, GstPadMode mode, gboolean active);
static gboolean gst_matroska_demux_handle_seek_event (GstMatroskaDemux * demux,
GstPad * pad, GstEvent * event);
static gboolean gst_matroska_demux_handle_src_event (GstPad * pad,
- GstEvent * event);
-static const GstQueryType *gst_matroska_demux_get_src_query_types (GstPad *
- pad);
+ GstObject * parent, GstEvent * event);
static gboolean gst_matroska_demux_handle_src_query (GstPad * pad,
- GstQuery * query);
+ GstObject * parent, GstQuery * query);
static gboolean gst_matroska_demux_handle_sink_event (GstPad * pad,
- GstEvent * event);
+ GstObject * parent, GstEvent * event);
static GstFlowReturn gst_matroska_demux_chain (GstPad * pad,
- GstBuffer * buffer);
+ GstObject * object, GstBuffer * buffer);
static GstStateChangeReturn
gst_matroska_demux_change_state (GstElement * element,
GstStateChange transition);
+#if 0
static void
gst_matroska_demux_set_index (GstElement * element, GstIndex * index);
static GstIndex *gst_matroska_demux_get_index (GstElement * element);
+#endif
/* caps functions */
static GstCaps *gst_matroska_demux_video_caps (GstMatroskaTrackVideoContext
guint prop_id, GValue * value, GParamSpec * pspec);
GType gst_matroska_demux_get_type (void);
-GST_BOILERPLATE (GstMatroskaDemux, gst_matroska_demux, GstElement,
- GST_TYPE_ELEMENT);
-
-static void
-gst_matroska_demux_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_static_pad_template (element_class, &video_src_templ);
- gst_element_class_add_static_pad_template (element_class, &audio_src_templ);
- gst_element_class_add_static_pad_template (element_class,
- &subtitle_src_templ);
- gst_element_class_add_static_pad_template (element_class, &sink_templ);
-
- gst_element_class_set_details_simple (element_class, "Matroska demuxer",
- "Codec/Demuxer",
- "Demuxes Matroska/WebM streams into video/audio/subtitles",
- "GStreamer maintainers <gstreamer-devel@lists.sourceforge.net>");
-}
+#define parent_class gst_matroska_demux_parent_class
+G_DEFINE_TYPE (GstMatroskaDemux, gst_matroska_demux, GST_TYPE_ELEMENT);
static void
gst_matroska_demux_finalize (GObject * object)
g_object_class_install_property (gobject_class, ARG_MAX_GAP_TIME,
g_param_spec_uint64 ("max-gap-time", "Maximum gap time",
- "The demuxer sends out newsegment events for skipping "
+ "The demuxer sends out segment events for skipping "
"gaps longer than this (0 = disabled).", 0, G_MAXUINT64,
DEFAULT_MAX_GAP_TIME, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
GST_DEBUG_FUNCPTR (gst_matroska_demux_element_send_event);
gstelement_class->query =
GST_DEBUG_FUNCPTR (gst_matroska_demux_element_query);
-
+#if 0
gstelement_class->set_index =
GST_DEBUG_FUNCPTR (gst_matroska_demux_set_index);
gstelement_class->get_index =
GST_DEBUG_FUNCPTR (gst_matroska_demux_get_index);
+#endif
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&video_src_templ));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&audio_src_templ));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&subtitle_src_templ));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&sink_templ));
+
+ gst_element_class_set_details_simple (gstelement_class, "Matroska demuxer",
+ "Codec/Demuxer",
+ "Demuxes Matroska/WebM streams into video/audio/subtitles",
+ "GStreamer maintainers <gstreamer-devel@lists.sourceforge.net>");
}
static void
-gst_matroska_demux_init (GstMatroskaDemux * demux,
- GstMatroskaDemuxClass * klass)
+gst_matroska_demux_init (GstMatroskaDemux * demux)
{
demux->common.sinkpad = gst_pad_new_from_static_template (&sink_templ,
"sink");
gst_pad_set_activate_function (demux->common.sinkpad,
GST_DEBUG_FUNCPTR (gst_matroska_demux_sink_activate));
- gst_pad_set_activatepull_function (demux->common.sinkpad,
- GST_DEBUG_FUNCPTR (gst_matroska_demux_sink_activate_pull));
+ gst_pad_set_activatemode_function (demux->common.sinkpad,
+ GST_DEBUG_FUNCPTR (gst_matroska_demux_sink_activate_mode));
gst_pad_set_chain_function (demux->common.sinkpad,
GST_DEBUG_FUNCPTR (gst_matroska_demux_chain));
gst_pad_set_event_function (demux->common.sinkpad,
/* property defaults */
demux->max_gap_time = DEFAULT_MAX_GAP_TIME;
+ GST_OBJECT_FLAG_SET (demux, GST_ELEMENT_FLAG_INDEXABLE);
+
/* finish off */
gst_matroska_demux_reset (GST_ELEMENT (demux));
}
demux->next_cluster_offset = 0;
demux->index_offset = 0;
demux->seekable = FALSE;
- demux->need_newsegment = FALSE;
+ demux->need_segment = FALSE;
demux->building_index = FALSE;
if (demux->seek_event) {
gst_event_unref (demux->seek_event);
demux->seek_index = NULL;
demux->seek_entry = 0;
- if (demux->close_segment) {
- gst_event_unref (demux->close_segment);
- demux->close_segment = NULL;
- }
-
if (demux->new_segment) {
gst_event_unref (demux->new_segment);
demux->new_segment = NULL;
}
-
+#if 0
if (demux->common.element_index) {
gst_object_unref (demux->common.element_index);
demux->common.element_index = NULL;
}
demux->common.element_index_writer_id = -1;
+#endif
if (demux->common.global_tags) {
gst_tag_list_free (demux->common.global_tags);
}
- demux->common.global_tags = gst_tag_list_new ();
+ demux->common.global_tags = gst_tag_list_new_empty ();
if (demux->common.cached_buffer) {
+ if (demux->common.cached_data) {
+ gst_buffer_unmap (demux->common.cached_buffer, &demux->common.cached_map);
+ demux->common.cached_data = NULL;
+ }
gst_buffer_unref (demux->common.cached_buffer);
demux->common.cached_buffer = NULL;
}
static GstBuffer *
gst_matroska_decode_buffer (GstMatroskaTrackContext * context, GstBuffer * buf)
{
- guint8 *data;
- guint size;
- GstBuffer *new_buf;
+ GstMapInfo map;
+ gpointer data;
+ gsize size;
g_return_val_if_fail (GST_IS_BUFFER (buf), NULL);
GST_DEBUG ("decoding buffer %p", buf);
- data = GST_BUFFER_DATA (buf);
- size = GST_BUFFER_SIZE (buf);
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ data = map.data;
+ size = map.size;
- g_return_val_if_fail (data != NULL && size > 0, buf);
+ g_return_val_if_fail (size > 0, buf);
if (gst_matroska_decode_data (context->encodings, &data, &size,
GST_MATROSKA_TRACK_ENCODING_SCOPE_FRAME, FALSE)) {
- new_buf = gst_buffer_new ();
- GST_BUFFER_MALLOCDATA (new_buf) = (guint8 *) data;
- GST_BUFFER_DATA (new_buf) = (guint8 *) data;
- GST_BUFFER_SIZE (new_buf) = size;
-
+ gst_buffer_unmap (buf, &map);
gst_buffer_unref (buf);
- buf = new_buf;
-
- return buf;
+ return gst_buffer_new_wrapped (data, size);
} else {
GST_DEBUG ("decode data failed");
+ gst_buffer_unmap (buf, &map);
gst_buffer_unref (buf);
return NULL;
}
}
if (context->type == 0 || context->codec_id == NULL || (ret != GST_FLOW_OK
- && ret != GST_FLOW_UNEXPECTED)) {
- if (ret == GST_FLOW_OK || ret == GST_FLOW_UNEXPECTED)
+ && ret != GST_FLOW_EOS)) {
+ if (ret == GST_FLOW_OK || ret == GST_FLOW_EOS)
GST_WARNING_OBJECT (ebml, "Unknown stream/codec in track entry header");
demux->common.num_streams--;
GstMatroskaTrackVideoContext *videocontext =
(GstMatroskaTrackVideoContext *) context;
- padname = g_strdup_printf ("video_%02d", demux->num_v_streams++);
- templ = gst_element_class_get_pad_template (klass, "video_%02d");
+ padname = g_strdup_printf ("video_%u", demux->num_v_streams++);
+ templ = gst_element_class_get_pad_template (klass, "video_%u");
caps = gst_matroska_demux_video_caps (videocontext,
- context->codec_id, (guint8 *) context->codec_priv,
+ context->codec_id, context->codec_priv,
context->codec_priv_size, &codec, &riff_fourcc);
if (codec) {
- list = gst_tag_list_new ();
- gst_tag_list_add (list, GST_TAG_MERGE_REPLACE,
- GST_TAG_VIDEO_CODEC, codec, NULL);
+ list = gst_tag_list_new (GST_TAG_VIDEO_CODEC, codec, NULL);
g_free (codec);
}
break;
GstMatroskaTrackAudioContext *audiocontext =
(GstMatroskaTrackAudioContext *) context;
- padname = g_strdup_printf ("audio_%02d", demux->num_a_streams++);
- templ = gst_element_class_get_pad_template (klass, "audio_%02d");
+ padname = g_strdup_printf ("audio_%u", demux->num_a_streams++);
+ templ = gst_element_class_get_pad_template (klass, "audio_%u");
caps = gst_matroska_demux_audio_caps (audiocontext,
context->codec_id, context->codec_priv, context->codec_priv_size,
&codec, &riff_audio_fmt);
if (codec) {
- list = gst_tag_list_new ();
- gst_tag_list_add (list, GST_TAG_MERGE_REPLACE,
- GST_TAG_AUDIO_CODEC, codec, NULL);
+ list = gst_tag_list_new (GST_TAG_AUDIO_CODEC, codec, NULL);
g_free (codec);
}
break;
GstMatroskaTrackSubtitleContext *subtitlecontext =
(GstMatroskaTrackSubtitleContext *) context;
- padname = g_strdup_printf ("subtitle_%02d", demux->num_t_streams++);
- templ = gst_element_class_get_pad_template (klass, "subtitle_%02d");
+ padname = g_strdup_printf ("subtitle_%u", demux->num_t_streams++);
+ templ = gst_element_class_get_pad_template (klass, "subtitle_%u");
caps = gst_matroska_demux_subtitle_caps (subtitlecontext,
context->codec_id, context->codec_priv, context->codec_priv_size);
break;
const gchar *lang;
if (!list)
- list = gst_tag_list_new ();
+ list = gst_tag_list_new_empty ();
/* Matroska contains ISO 639-2B codes, we want ISO 639-1 */
lang = gst_tag_get_language_code (context->language);
"codec_id='%s'", context->codec_id);
switch (context->type) {
case GST_MATROSKA_TRACK_TYPE_VIDEO:
- caps = gst_caps_new_simple ("video/x-unknown", NULL);
+ caps = gst_caps_new_empty_simple ("video/x-unknown");
break;
case GST_MATROSKA_TRACK_TYPE_AUDIO:
- caps = gst_caps_new_simple ("audio/x-unknown", NULL);
+ caps = gst_caps_new_empty_simple ("audio/x-unknown");
break;
case GST_MATROSKA_TRACK_TYPE_SUBTITLE:
- caps = gst_caps_new_simple ("application/x-subtitle-unknown", NULL);
+ caps = gst_caps_new_empty_simple ("application/x-subtitle-unknown");
break;
case GST_MATROSKA_TRACK_TYPE_COMPLEX:
default:
- caps = gst_caps_new_simple ("application/x-matroska-unknown", NULL);
+ caps = gst_caps_new_empty_simple ("application/x-matroska-unknown");
break;
}
gst_caps_set_simple (caps, "codec-id", G_TYPE_STRING, context->codec_id,
/* add any unrecognised riff fourcc / audio format, but after codec-id */
if (context->type == GST_MATROSKA_TRACK_TYPE_AUDIO && riff_audio_fmt != 0)
gst_caps_set_simple (caps, "format", G_TYPE_INT, riff_audio_fmt, NULL);
- else if (context->type == GST_MATROSKA_TRACK_TYPE_VIDEO && riff_fourcc != 0)
- gst_caps_set_simple (caps, "fourcc", GST_TYPE_FOURCC, riff_fourcc, NULL);
+ else if (context->type == GST_MATROSKA_TRACK_TYPE_VIDEO && riff_fourcc != 0) {
+ gchar *fstr = g_strdup_printf ("%" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (riff_fourcc));
+ gst_caps_set_simple (caps, "fourcc", G_TYPE_STRING, fstr, NULL);
+ g_free (fstr);
+ }
}
/* the pad in here */
gst_pad_set_event_function (context->pad,
GST_DEBUG_FUNCPTR (gst_matroska_demux_handle_src_event));
- gst_pad_set_query_type_function (context->pad,
- GST_DEBUG_FUNCPTR (gst_matroska_demux_get_src_query_types));
gst_pad_set_query_function (context->pad,
GST_DEBUG_FUNCPTR (gst_matroska_demux_handle_src_query));
gst_pad_set_element_private (context->pad, context);
gst_pad_use_fixed_caps (context->pad);
- gst_pad_set_caps (context->pad, context->caps);
gst_pad_set_active (context->pad, TRUE);
+ gst_pad_set_caps (context->pad, context->caps);
gst_element_add_pad (GST_ELEMENT (demux), context->pad);
g_free (padname);
return ret;
}
-static const GstQueryType *
-gst_matroska_demux_get_src_query_types (GstPad * pad)
-{
- static const GstQueryType query_types[] = {
- GST_QUERY_POSITION,
- GST_QUERY_DURATION,
- GST_QUERY_SEEKING,
- 0
- };
-
- return query_types;
-}
-
static gboolean
gst_matroska_demux_query (GstMatroskaDemux * demux, GstPad * pad,
GstQuery * query)
demux->stream_start_time);
else
gst_query_set_position (query, GST_FORMAT_TIME,
- MAX (demux->common.segment.last_stop, demux->stream_start_time) -
+ MAX (demux->common.segment.position, demux->stream_start_time) -
demux->stream_start_time);
GST_OBJECT_UNLOCK (demux);
} else if (format == GST_FORMAT_DEFAULT && context
break;
}
default:
- res = gst_pad_query_default (pad, query);
+ res = gst_pad_query_default (pad, (GstObject *) demux, query);
break;
}
}
static gboolean
-gst_matroska_demux_handle_src_query (GstPad * pad, GstQuery * query)
+gst_matroska_demux_handle_src_query (GstPad * pad, GstObject * parent,
+ GstQuery * query)
{
- gboolean ret;
- GstMatroskaDemux *demux = GST_MATROSKA_DEMUX (gst_pad_get_parent (pad));
-
- ret = gst_matroska_demux_query (demux, pad, query);
+ GstMatroskaDemux *demux = GST_MATROSKA_DEMUX (parent);
- gst_object_unref (demux);
-
- return ret;
+ return gst_matroska_demux_query (demux, pad, query);
}
/* returns FALSE if there are no pads to deliver event to,
static gboolean
gst_matroska_demux_send_event (GstMatroskaDemux * demux, GstEvent * event)
{
- gboolean is_newsegment;
+ gboolean is_segment;
gboolean ret = FALSE;
gint i;
GST_DEBUG_OBJECT (demux, "Sending event of type %s to all source pads",
GST_EVENT_TYPE_NAME (event));
- is_newsegment = (GST_EVENT_TYPE (event) == GST_EVENT_NEWSEGMENT);
+ is_segment = (GST_EVENT_TYPE (event) == GST_EVENT_SEGMENT);
g_assert (demux->common.src->len == demux->common.num_streams);
for (i = 0; i < demux->common.src->len; i++) {
ret = TRUE;
/* FIXME: send global tags before stream tags */
- if (G_UNLIKELY (is_newsegment && stream->pending_tags != NULL)) {
+ if (G_UNLIKELY (is_segment && stream->pending_tags != NULL)) {
GST_DEBUG_OBJECT (demux, "Sending pending_tags %p for pad %s:%s : %"
GST_PTR_FORMAT, stream->pending_tags,
GST_DEBUG_PAD_NAME (stream->pad), stream->pending_tags);
- gst_element_found_tags_for_pad (GST_ELEMENT (demux), stream->pad,
- stream->pending_tags);
+ gst_pad_push_event (stream->pad,
+ gst_event_new_tag (stream->pending_tags));
stream->pending_tags = NULL;
}
}
- if (G_UNLIKELY (is_newsegment && demux->common.global_tags != NULL)) {
+ if (G_UNLIKELY (is_segment && demux->common.global_tags != NULL)) {
+ GstEvent *tag_event;
gst_tag_list_add (demux->common.global_tags, GST_TAG_MERGE_REPLACE,
GST_TAG_CONTAINER_FORMAT, "Matroska", NULL);
GST_DEBUG_OBJECT (demux, "Sending global_tags %p : %" GST_PTR_FORMAT,
demux->common.global_tags, demux->common.global_tags);
- gst_element_found_tags (GST_ELEMENT (demux), demux->common.global_tags);
+
+ tag_event = gst_event_new_tag (demux->common.global_tags);
+
+ for (i = 0; i < demux->common.src->len; i++) {
+ GstMatroskaTrackContext *stream;
+
+ stream = g_ptr_array_index (demux->common.src, i);
+ gst_pad_push_event (stream->pad, gst_event_ref (tag_event));
+ }
+
+ gst_event_unref (tag_event);
demux->common.global_tags = NULL;
}
/* update the time */
gst_matroska_read_common_reset_streams (&demux->common, entry->time, TRUE);
- demux->common.segment.last_stop = entry->time;
+ demux->common.segment.position = entry->time;
demux->seek_block = entry->block;
demux->seek_first = TRUE;
demux->last_stop_end = GST_CLOCK_TIME_NONE;
GstFlowReturn ret = GST_FLOW_OK;
const guint chunk = 64 * 1024;
GstBuffer *buf = NULL;
+ GstMapInfo map;
+ gpointer data = NULL;
+ gsize size;
guint64 length;
guint32 id;
guint needed;
gint cluster_pos;
if (buf != NULL) {
+ gst_buffer_unmap (buf, &map);
gst_buffer_unref (buf);
buf = NULL;
}
ret = gst_pad_pull_range (demux->common.sinkpad, newpos, chunk, &buf);
if (ret != GST_FLOW_OK)
break;
- GST_DEBUG_OBJECT (demux, "read buffer size %d at offset %" G_GINT64_FORMAT,
- GST_BUFFER_SIZE (buf), newpos);
- gst_byte_reader_init_from_buffer (&reader, buf);
+ GST_DEBUG_OBJECT (demux,
+ "read buffer size %" G_GSIZE_FORMAT " at offset %" G_GINT64_FORMAT,
+ gst_buffer_get_size (buf), newpos);
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ data = map.data;
+ size = map.size;
+ gst_byte_reader_init (&reader, data, size);
resume:
cluster_pos = gst_byte_reader_masked_scan_uint32 (&reader, 0xffffffff,
GST_MATROSKA_ID_CLUSTER, 0, gst_byte_reader_get_remaining (&reader));
}
if (buf) {
+ gst_buffer_unmap (buf, &map);
gst_buffer_unref (buf);
buf = NULL;
}
/* estimate using start and current position */
GST_OBJECT_LOCK (demux);
opos = demux->common.offset - demux->common.ebml_segment_start;
- otime = demux->common.segment.last_stop;
+ otime = demux->common.segment.position;
GST_OBJECT_UNLOCK (demux);
/* sanitize */
while (1) {
ret = gst_matroska_demux_search_cluster (demux, &newpos);
- if (ret == GST_FLOW_UNEXPECTED) {
+ if (ret == GST_FLOW_EOS) {
/* heuristic HACK */
newpos = startpos * 80 / 100;
GST_DEBUG_OBJECT (demux, "EOS; "
continue;
error:
- if (ret == GST_FLOW_UNEXPECTED) {
+ if (ret == GST_FLOW_EOS) {
if (prev_cluster_time != GST_CLOCK_TIME_NONE)
break;
}
* we might be playing a file that's still being recorded
* so, invalidate our current duration, which is only a moving target,
* and should not be used to clamp anything */
- if (!demux->streaming && !demux->common.index &&
- demux->invalid_duration) {
- gst_segment_set_duration (&seeksegment, GST_FORMAT_TIME,
- GST_CLOCK_TIME_NONE);
+ if (!demux->streaming && !demux->common.index && demux->invalid_duration) {
+ seeksegment.duration = GST_CLOCK_TIME_NONE;
}
if (event) {
GST_DEBUG_OBJECT (demux, "configuring seek");
- gst_segment_set_seek (&seeksegment, rate, format, flags,
+ gst_segment_do_seek (&seeksegment, rate, format, flags,
cur_type, cur, stop_type, stop, &update);
/* compensate for clip start time */
if (GST_CLOCK_TIME_IS_VALID (demux->stream_start_time)) {
- seeksegment.last_stop += demux->stream_start_time;
+ seeksegment.position += demux->stream_start_time;
seeksegment.start += demux->stream_start_time;
if (GST_CLOCK_TIME_IS_VALID (seeksegment.stop))
seeksegment.stop += demux->stream_start_time;
/* restore segment duration (if any effect),
* would be determined again when parsing, but anyway ... */
- gst_segment_set_duration (&seeksegment, GST_FORMAT_TIME,
- demux->common.segment.duration);
+ seeksegment.duration = demux->common.segment.duration;
flush = ! !(flags & GST_SEEK_FLAG_FLUSH);
keyunit = ! !(flags & GST_SEEK_FLAG_KEY_UNIT);
GST_OBJECT_LOCK (demux);
track = gst_matroska_read_common_get_seek_track (&demux->common, track);
if ((entry = gst_matroska_read_common_do_index_seek (&demux->common, track,
- seeksegment.last_stop, &demux->seek_index, &demux->seek_entry)) ==
+ seeksegment.position, &demux->seek_index, &demux->seek_entry)) ==
NULL) {
/* pull mode without index can scan later on */
if (demux->streaming) {
if (demux->streaming) {
/* need to seek to cluster start to pick up cluster time */
/* upstream takes care of flushing and all that
- * ... and newsegment event handling takes care of the rest */
+ * ... and segment event handling takes care of the rest */
return perform_seek_to_offset (demux,
entry->pos + demux->common.ebml_segment_start);
}
if (!demux->streaming && !entry) {
/* need to stop flushing upstream as we need it next */
if (flush)
- gst_pad_push_event (demux->common.sinkpad, gst_event_new_flush_stop ());
- entry = gst_matroska_demux_search_pos (demux, seeksegment.last_stop);
+ gst_pad_push_event (demux->common.sinkpad,
+ gst_event_new_flush_stop (TRUE));
+ entry = gst_matroska_demux_search_pos (demux, seeksegment.position);
/* keep local copy */
if (entry) {
scan_entry = *entry;
} else {
GST_DEBUG_OBJECT (demux, "Scan failed to find matching position");
if (flush)
- gst_matroska_demux_send_event (demux, gst_event_new_flush_stop ());
+ gst_matroska_demux_send_event (demux, gst_event_new_flush_stop (TRUE));
goto seek_error;
}
}
GST_DEBUG_OBJECT (demux, "seek to key unit, adjusting segment start to %"
GST_TIME_FORMAT, GST_TIME_ARGS (entry->time));
seeksegment.start = MAX (entry->time, demux->stream_start_time);
- seeksegment.last_stop = seeksegment.start;
+ seeksegment.position = seeksegment.start;
seeksegment.time = seeksegment.start - demux->stream_start_time;
}
exit:
if (flush) {
GST_DEBUG_OBJECT (demux, "Stopping flush");
- gst_pad_push_event (demux->common.sinkpad, gst_event_new_flush_stop ());
- gst_matroska_demux_send_event (demux, gst_event_new_flush_stop ());
- } else if (demux->segment_running && update) {
- GST_DEBUG_OBJECT (demux, "Closing currently running segment");
-
- GST_OBJECT_LOCK (demux);
- if (demux->close_segment)
- gst_event_unref (demux->close_segment);
-
- demux->close_segment = gst_event_new_new_segment (TRUE,
- demux->common.segment.rate, GST_FORMAT_TIME,
- demux->common.segment.start, demux->common.segment.last_stop,
- demux->common.segment.time);
- GST_OBJECT_UNLOCK (demux);
+ gst_pad_push_event (demux->common.sinkpad, gst_event_new_flush_stop (TRUE));
+ gst_matroska_demux_send_event (demux, gst_event_new_flush_stop (TRUE));
}
GST_OBJECT_LOCK (demux);
GST_OBJECT_LOCK (demux);
if (demux->new_segment)
gst_event_unref (demux->new_segment);
- demux->new_segment = gst_event_new_new_segment_full (!update,
- demux->common.segment.rate, demux->common.segment.applied_rate,
- demux->common.segment.format, demux->common.segment.start,
- demux->common.segment.stop, demux->common.segment.time);
+ /* On port from 0.10, discarded !update (for segment.update) here, FIXME? */
+ demux->new_segment = gst_event_new_segment (&demux->common.segment);
GST_OBJECT_UNLOCK (demux);
/* restart our task since it might have been stopped when we did the
* flush. */
- demux->segment_running = TRUE;
gst_pad_start_task (demux->common.sinkpad,
(GstTaskFunction) gst_matroska_demux_loop, demux->common.sinkpad);
}
static gboolean
-gst_matroska_demux_handle_src_event (GstPad * pad, GstEvent * event)
+gst_matroska_demux_handle_src_event (GstPad * pad, GstObject * parent,
+ GstEvent * event)
{
- GstMatroskaDemux *demux = GST_MATROSKA_DEMUX (gst_pad_get_parent (pad));
+ GstMatroskaDemux *demux = GST_MATROSKA_DEMUX (parent);
gboolean res = TRUE;
switch (GST_EVENT_TYPE (event)) {
GstClockTimeDiff diff;
GstClockTime timestamp;
- gst_event_parse_qos (event, &proportion, &diff, ×tamp);
+ gst_event_parse_qos (event, NULL, &proportion, &diff, ×tamp);
GST_OBJECT_LOCK (demux);
videocontext->earliest_time = timestamp + diff;
break;
}
- gst_object_unref (demux);
-
return res;
}
static GstFlowReturn
gst_matroska_demux_seek_to_previous_keyframe (GstMatroskaDemux * demux)
{
- GstFlowReturn ret = GST_FLOW_UNEXPECTED;
+ GstFlowReturn ret = GST_FLOW_EOS;
gboolean done = TRUE;
gint i;
- g_return_val_if_fail (demux->seek_index, GST_FLOW_UNEXPECTED);
+ g_return_val_if_fail (demux->seek_index, GST_FLOW_EOS);
g_return_val_if_fail (demux->seek_entry < demux->seek_index->len,
- GST_FLOW_UNEXPECTED);
+ GST_FLOW_EOS);
GST_DEBUG_OBJECT (demux, "locating previous keyframe");
GST_OBJECT_LOCK (demux);
GST_LOG_OBJECT (demux, "Sync to %" GST_TIME_FORMAT,
- GST_TIME_ARGS (demux->common.segment.last_stop));
+ GST_TIME_ARGS (demux->common.segment.position));
g_assert (demux->common.num_streams == demux->common.src->len);
for (stream_nr = 0; stream_nr < demux->common.src->len; stream_nr++) {
/* does it lag? 0.5 seconds is a random threshold...
* lag need only be considered if we have advanced into requested segment */
if (GST_CLOCK_TIME_IS_VALID (context->pos) &&
- GST_CLOCK_TIME_IS_VALID (demux->common.segment.last_stop) &&
- demux->common.segment.last_stop > demux->common.segment.start &&
- context->pos + (GST_SECOND / 2) < demux->common.segment.last_stop) {
+ GST_CLOCK_TIME_IS_VALID (demux->common.segment.position) &&
+ demux->common.segment.position > demux->common.segment.start &&
+ context->pos + (GST_SECOND / 2) < demux->common.segment.position) {
gint64 new_start;
+ GstSegment segment;
GstEvent *event;
- new_start = demux->common.segment.last_stop - (GST_SECOND / 2);
+ new_start = demux->common.segment.position - (GST_SECOND / 2);
if (GST_CLOCK_TIME_IS_VALID (demux->common.segment.stop))
new_start = MIN (new_start, demux->common.segment.stop);
GST_DEBUG_OBJECT (demux,
context->pos = new_start;
/* advance stream time */
- event = gst_event_new_new_segment (TRUE, demux->common.segment.rate,
- demux->common.segment.format, new_start, demux->common.segment.stop,
- new_start);
+ segment = demux->common.segment;
+ segment.start = new_start;
+ segment.position = new_start;
+ event = gst_event_new_segment (&segment);
GST_OBJECT_UNLOCK (demux);
gst_pad_push_event (context->pad, event);
GST_OBJECT_LOCK (demux);
GstFlowReturn ret, cret;
GstBuffer *header_buf;
- header_buf = gst_buffer_new_and_alloc (len);
- gst_buffer_set_caps (header_buf, stream->caps);
- memcpy (GST_BUFFER_DATA (header_buf), data, len);
+ header_buf = gst_buffer_new_wrapped (g_memdup (data, len), len);
if (stream->set_discont) {
GST_BUFFER_FLAG_SET (header_buf, GST_BUFFER_FLAG_DISCONT);
guint8 *pdata;
guint off, len;
- GST_LOG_OBJECT (demux, "priv data size = %u", stream->codec_priv_size);
+ GST_LOG_OBJECT (demux, "priv data size = %" G_GSIZE_FORMAT,
+ stream->codec_priv_size);
pdata = (guint8 *) stream->codec_priv;
GstMatroskaTrackContext * stream)
{
GstFlowReturn ret;
- guint8 *pdata;
+ guint8 *pdata = stream->codec_priv;
- GST_LOG_OBJECT (demux, "priv data size = %u", stream->codec_priv_size);
-
- pdata = (guint8 *) stream->codec_priv;
+ GST_LOG_OBJECT (demux, "priv data size = %" G_GSIZE_FORMAT,
+ stream->codec_priv_size);
/* need at least 'fLaC' marker + STREAMINFO metadata block */
if (stream->codec_priv_size < 80) {
GstMatroskaTrackContext * stream)
{
GstFlowReturn ret;
- guint8 *p = (guint8 *) stream->codec_priv;
+ guint8 *p = stream->codec_priv;
gint i, offset, num_packets;
guint *length, last;
/* start of the stream and vorbis audio or theora video, need to
* send the codec_priv data as first three packets */
num_packets = p[0] + 1;
- GST_DEBUG_OBJECT (demux, "%u stream headers, total length=%u bytes",
+ GST_DEBUG_OBJECT (demux,
+ "%u stream headers, total length=%" G_GSIZE_FORMAT " bytes",
(guint) num_packets, stream->codec_priv_size);
length = g_alloca (num_packets * sizeof (guint));
* elsewhere, but for now, only interested in a small part */
/* make sure we have terminating 0 */
- buf = g_strndup ((gchar *) stream->codec_priv, stream->codec_priv_size);
+ buf = g_strndup (stream->codec_priv, stream->codec_priv_size);
/* just locate and parse palette part */
start = strstr (buf, "palette:");
{
guint8 *seq_header;
guint seq_header_len;
- guint32 header;
+ guint32 header, tmp;
if (stream->codec_state) {
seq_header = stream->codec_state;
if (GST_BUFFER_FLAG_IS_SET (*buf, GST_BUFFER_FLAG_DELTA_UNIT))
return GST_FLOW_OK;
- if (GST_BUFFER_SIZE (*buf) < 4)
+ if (gst_buffer_get_size (*buf) < 4)
return GST_FLOW_OK;
- header = GST_READ_UINT32_BE (GST_BUFFER_DATA (*buf));
+ gst_buffer_extract (*buf, 0, &tmp, sizeof (guint32));
+ header = GUINT32_FROM_BE (tmp);
+
/* Sequence start code, if not found prepend */
if (header != 0x000001b3) {
GstBuffer *newbuf;
- newbuf = gst_buffer_new_and_alloc (GST_BUFFER_SIZE (*buf) + seq_header_len);
- gst_buffer_set_caps (newbuf, stream->caps);
-
GST_DEBUG_OBJECT (element, "Prepending MPEG sequence header");
- gst_buffer_copy_metadata (newbuf, *buf, GST_BUFFER_COPY_TIMESTAMPS |
- GST_BUFFER_COPY_FLAGS);
- g_memmove (GST_BUFFER_DATA (newbuf), seq_header, seq_header_len);
- g_memmove (GST_BUFFER_DATA (newbuf) + seq_header_len,
- GST_BUFFER_DATA (*buf), GST_BUFFER_SIZE (*buf));
+
+ newbuf = gst_buffer_new_wrapped (g_memdup (seq_header, seq_header_len),
+ seq_header_len);
+
+ gst_buffer_copy_into (newbuf, *buf, GST_BUFFER_COPY_TIMESTAMPS |
+ GST_BUFFER_COPY_FLAGS | GST_BUFFER_COPY_MEMORY, 0,
+ gst_buffer_get_size (*buf));
+
gst_buffer_unref (*buf);
*buf = newbuf;
}
GstMatroskaTrackAudioContext *audiocontext =
(GstMatroskaTrackAudioContext *) stream;
GstBuffer *newbuf = NULL;
- guint8 *data;
- guint newlen;
+ GstMapInfo map, outmap;
+ guint8 *buf_data, *data;
Wavpack4Header wvh;
wvh.ck_id[0] = 'w';
wvh.block_index = audiocontext->wvpk_block_index;
if (audiocontext->channels <= 2) {
- guint32 block_samples;
+ guint32 block_samples, tmp;
+ gsize size = gst_buffer_get_size (*buf);
- block_samples = GST_READ_UINT32_LE (GST_BUFFER_DATA (*buf));
+ gst_buffer_extract (*buf, 0, &tmp, sizeof (guint32));
+ block_samples = GUINT32_FROM_LE (tmp);
/* we need to reconstruct the header of the wavpack block */
/* -20 because ck_size is the size of the wavpack block -8
* and lace_size is the size of the wavpack block + 12
* (the three guint32 of the header that already are in the buffer) */
- wvh.ck_size = GST_BUFFER_SIZE (*buf) + sizeof (Wavpack4Header) - 20;
+ wvh.ck_size = size + sizeof (Wavpack4Header) - 20;
/* block_samples, flags and crc are already in the buffer */
- newlen = GST_BUFFER_SIZE (*buf) + sizeof (Wavpack4Header) - 12;
- newbuf = gst_buffer_new_and_alloc (newlen);
- gst_buffer_set_caps (newbuf, stream->caps);
+ newbuf = gst_buffer_new_allocate (NULL, sizeof (Wavpack4Header) - 12, 0);
- data = GST_BUFFER_DATA (newbuf);
+ gst_buffer_map (newbuf, &outmap, GST_MAP_WRITE);
+ data = outmap.data;
data[0] = 'w';
data[1] = 'v';
data[2] = 'p';
GST_WRITE_UINT8 (data + 11, wvh.index_no);
GST_WRITE_UINT32_LE (data + 12, wvh.total_samples);
GST_WRITE_UINT32_LE (data + 16, wvh.block_index);
- g_memmove (data + 20, GST_BUFFER_DATA (*buf), GST_BUFFER_SIZE (*buf));
- gst_buffer_copy_metadata (newbuf, *buf,
- GST_BUFFER_COPY_TIMESTAMPS | GST_BUFFER_COPY_FLAGS);
+
+ /* Append data from buf: */
+ gst_buffer_copy_into (newbuf, *buf, GST_BUFFER_COPY_TIMESTAMPS |
+ GST_BUFFER_COPY_FLAGS | GST_BUFFER_COPY_MEMORY, 0, size);
+
gst_buffer_unref (*buf);
*buf = newbuf;
audiocontext->wvpk_block_index += block_samples;
} else {
- guint8 *outdata;
+ guint8 *outdata = NULL;
guint outpos = 0;
- guint size;
+ gsize buf_size, size, out_size = 0;
guint32 block_samples, flags, crc, blocksize;
- data = GST_BUFFER_DATA (*buf);
- size = GST_BUFFER_SIZE (*buf);
+ gst_buffer_map (*buf, &map, GST_MAP_READ);
+ buf_data = map.data;
+ buf_size = map.size;
- if (size < 4) {
+ if (buf_size < 4) {
GST_ERROR_OBJECT (element, "Too small wavpack buffer");
+ gst_buffer_unmap (*buf, &map);
return GST_FLOW_ERROR;
}
+ data = buf_data;
+ size = buf_size;
+
block_samples = GST_READ_UINT32_LE (data);
data += 4;
size -= 4;
if (blocksize == 0 || size < blocksize)
break;
+ g_assert ((newbuf == NULL) == (outdata == NULL));
+
if (newbuf == NULL) {
- newbuf = gst_buffer_new_and_alloc (sizeof (Wavpack4Header) + blocksize);
- gst_buffer_set_caps (newbuf, stream->caps);
+ out_size = sizeof (Wavpack4Header) + blocksize;
+ newbuf = gst_buffer_new_allocate (NULL, out_size, 0);
- gst_buffer_copy_metadata (newbuf, *buf,
- GST_BUFFER_COPY_TIMESTAMPS | GST_BUFFER_COPY_FLAGS);
+ gst_buffer_copy_into (newbuf, *buf,
+ GST_BUFFER_COPY_TIMESTAMPS | GST_BUFFER_COPY_FLAGS, 0, -1);
outpos = 0;
- outdata = GST_BUFFER_DATA (newbuf);
+ gst_buffer_map (newbuf, &outmap, GST_MAP_WRITE);
+ outdata = outmap.data;
} else {
- GST_BUFFER_SIZE (newbuf) += sizeof (Wavpack4Header) + blocksize;
- GST_BUFFER_DATA (newbuf) =
- g_realloc (GST_BUFFER_DATA (newbuf), GST_BUFFER_SIZE (newbuf));
- GST_BUFFER_MALLOCDATA (newbuf) = GST_BUFFER_DATA (newbuf);
- outdata = GST_BUFFER_DATA (newbuf);
+ gst_buffer_unmap (newbuf, &outmap);
+ out_size += sizeof (Wavpack4Header) + blocksize;
+ gst_buffer_set_size (newbuf, out_size);
+ gst_buffer_map (newbuf, &outmap, GST_MAP_WRITE);
+ outdata = outmap.data;
}
outdata[outpos] = 'w';
data += blocksize;
size -= blocksize;
}
+ gst_buffer_unmap (*buf, &map);
gst_buffer_unref (*buf);
+
+ if (newbuf)
+ gst_buffer_unmap (newbuf, &outmap);
+
*buf = newbuf;
audiocontext->wvpk_block_index += block_samples;
}
GstMatroskaTrackContext * stream, GstBuffer ** buf)
{
GstMatroskaTrackSubtitleContext *sub_stream;
- const gchar *encoding, *data;
+ const gchar *encoding;
GError *err = NULL;
GstBuffer *newbuf;
gchar *utf8;
- guint size;
+ GstMapInfo map;
sub_stream = (GstMatroskaTrackSubtitleContext *) stream;
- data = (const gchar *) GST_BUFFER_DATA (*buf);
- size = GST_BUFFER_SIZE (*buf);
+ if (!gst_buffer_map (*buf, &map, GST_MAP_READ))
+ return GST_FLOW_OK;
if (!sub_stream->invalid_utf8) {
- if (g_utf8_validate (data, size, NULL)) {
+ if (g_utf8_validate ((gchar *) map.data, map.size, NULL)) {
goto next;
}
GST_WARNING_OBJECT (element, "subtitle stream %d is not valid UTF-8, this "
}
}
- utf8 = g_convert_with_fallback (data, size, "UTF-8", encoding, (char *) "*",
- NULL, NULL, &err);
+ utf8 =
+ g_convert_with_fallback ((gchar *) map.data, map.size, "UTF-8", encoding,
+ (char *) "*", NULL, NULL, &err);
if (err) {
GST_LOG_OBJECT (element, "could not convert string from '%s' to UTF-8: %s",
/* invalid input encoding, fall back to ISO-8859-15 (always succeeds) */
encoding = "ISO-8859-15";
- utf8 = g_convert_with_fallback (data, size, "UTF-8", encoding, (char *) "*",
- NULL, NULL, NULL);
+ utf8 =
+ g_convert_with_fallback ((gchar *) map.data, map.size, "UTF-8",
+ encoding, (char *) "*", NULL, NULL, NULL);
}
GST_LOG_OBJECT (element, "converted subtitle text from %s to UTF-8 %s",
if (utf8 == NULL)
utf8 = g_strdup ("invalid subtitle");
- newbuf = gst_buffer_new ();
- GST_BUFFER_MALLOCDATA (newbuf) = (guint8 *) utf8;
- GST_BUFFER_DATA (newbuf) = (guint8 *) utf8;
- GST_BUFFER_SIZE (newbuf) = strlen (utf8);
- gst_buffer_copy_metadata (newbuf, *buf, GST_BUFFER_COPY_ALL);
+ newbuf = gst_buffer_new_wrapped (utf8, strlen (utf8));
+ gst_buffer_copy_into (newbuf, *buf,
+ GST_BUFFER_COPY_TIMESTAMPS | GST_BUFFER_COPY_FLAGS | GST_BUFFER_COPY_META,
+ 0, -1);
+ gst_buffer_unmap (*buf, &map);
gst_buffer_unref (*buf);
*buf = newbuf;
- data = (const gchar *) GST_BUFFER_DATA (*buf);
- size = GST_BUFFER_SIZE (*buf);
+ gst_buffer_map (*buf, &map, GST_MAP_READ);
next:
-
if (sub_stream->check_markup) {
/* caps claim markup text, so we need to escape text,
* except if text is already markup and then needs no further escaping */
sub_stream->seen_markup_tag = sub_stream->seen_markup_tag ||
- gst_matroska_demux_subtitle_chunk_has_tag (element, data);
+ gst_matroska_demux_subtitle_chunk_has_tag (element, (gchar *) map.data);
if (!sub_stream->seen_markup_tag) {
- utf8 = g_markup_escape_text (data, size);
+ utf8 = g_markup_escape_text ((gchar *) map.data, map.size);
- newbuf = gst_buffer_new ();
- GST_BUFFER_MALLOCDATA (newbuf) = (guint8 *) utf8;
- GST_BUFFER_DATA (newbuf) = (guint8 *) utf8;
- GST_BUFFER_SIZE (newbuf) = strlen (utf8);
- gst_buffer_copy_metadata (newbuf, *buf, GST_BUFFER_COPY_ALL);
+ newbuf = gst_buffer_new_wrapped (utf8, strlen (utf8));
+ gst_buffer_copy_into (newbuf, *buf,
+ GST_BUFFER_COPY_TIMESTAMPS | GST_BUFFER_COPY_FLAGS |
+ GST_BUFFER_COPY_META, 0, -1);
+ gst_buffer_unmap (*buf, &map);
gst_buffer_unref (*buf);
*buf = newbuf;
gst_matroska_demux_check_aac (GstElement * element,
GstMatroskaTrackContext * stream, GstBuffer ** buf)
{
- const guint8 *data;
+ guint8 data[2];
guint size;
- data = GST_BUFFER_DATA (*buf);
- size = GST_BUFFER_SIZE (*buf);
+ gst_buffer_extract (*buf, 0, data, 2);
+ size = gst_buffer_get_size (*buf);
if (size > 2 && data[0] == 0xff && (data[1] >> 4 == 0x0f)) {
GstCaps *new_caps;
gst_structure_remove_field (s, "codec_data");
gst_caps_replace (&stream->caps, new_caps);
gst_pad_set_caps (stream->pad, new_caps);
- gst_buffer_set_caps (*buf, new_caps);
GST_DEBUG_OBJECT (element, "ADTS AAC audio data; removing codec-data, "
"new caps: %" GST_PTR_FORMAT, new_caps);
gst_caps_unref (new_caps);
return GST_FLOW_OK;
}
+static GstBuffer *
+gst_matroska_demux_align_buffer (GstMatroskaDemux * demux,
+ GstBuffer * buffer, gsize alignment)
+{
+ GstMapInfo map;
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+
+ if (map.size < sizeof (guintptr)) {
+ gst_buffer_unmap (buffer, &map);
+ return buffer;
+ }
+
+ if (((guintptr) map.data) & (alignment - 1)) {
+ GstBuffer *new_buffer;
+
+ new_buffer = gst_buffer_new_allocate (NULL,
+ gst_buffer_get_size (buffer), alignment - 1);
+
+ /* Copy data "by hand", so ensure alignment is kept: */
+ gst_buffer_fill (new_buffer, 0, map.data, map.size);
+
+ gst_buffer_copy_into (new_buffer, buffer, GST_BUFFER_COPY_METADATA, 0, -1);
+ GST_DEBUG_OBJECT (demux,
+ "We want output aligned on %" G_GSIZE_FORMAT ", reallocated",
+ alignment);
+
+ gst_buffer_unmap (buffer, &map);
+ gst_buffer_unref (buffer);
+
+ return new_buffer;
+ }
+
+ gst_buffer_unmap (buffer, &map);
+ return buffer;
+}
+
static GstFlowReturn
gst_matroska_demux_parse_blockgroup_or_simpleblock (GstMatroskaDemux * demux,
GstEbmlRead * ebml, guint64 cluster_time, guint64 cluster_offset,
guint32 id;
guint64 block_duration = -1;
GstBuffer *buf = NULL;
+ GstMapInfo map;
gint stream_num = -1, n, laces = 0;
guint size = 0;
gint *lace_size = NULL;
guint8 *data;
if (buf) {
+ gst_buffer_unmap (buf, &map);
gst_buffer_unref (buf);
buf = NULL;
}
if ((ret = gst_ebml_read_buffer (ebml, &id, &buf)) != GST_FLOW_OK)
break;
- data = GST_BUFFER_DATA (buf);
- size = GST_BUFFER_SIZE (buf);
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ data = map.data;
+ size = map.size;
/* first byte(s): blocknum */
if ((n = gst_matroska_ebmlnum_uint (data, size, &num)) < 0)
}
}
- GST_DEBUG_OBJECT (demux, "CodecState of %u bytes",
+ GST_DEBUG_OBJECT (demux, "CodecState of %" G_GSIZE_FORMAT " bytes",
stream->codec_state_size);
break;
}
}
/* need to refresh segment info ASAP */
- if (GST_CLOCK_TIME_IS_VALID (lace_time) && demux->need_newsegment) {
- guint64 clace_time;
+ if (GST_CLOCK_TIME_IS_VALID (lace_time) && demux->need_segment) {
+ GstSegment *segment = &demux->common.segment;
+ guint64 segment_duration = 0;
GST_DEBUG_OBJECT (demux,
"generating segment starting at %" GST_TIME_FORMAT,
"Setting stream start time to %" GST_TIME_FORMAT,
GST_TIME_ARGS (lace_time));
}
- clace_time = MAX (lace_time, demux->stream_start_time);
- gst_segment_set_newsegment (&demux->common.segment, FALSE,
- demux->common.segment.rate, GST_FORMAT_TIME, clace_time,
- GST_CLOCK_TIME_NONE, clace_time - demux->stream_start_time);
+ if (GST_CLOCK_TIME_IS_VALID (segment->stop))
+ segment_duration = segment->stop - segment->start;
+ else if (GST_CLOCK_TIME_IS_VALID (segment->position))
+ segment_duration = segment->position - segment->start;
+ segment->base += segment_duration / fabs (segment->rate);
+ segment->start = MAX (lace_time, demux->stream_start_time);
+ segment->stop = GST_CLOCK_TIME_NONE;
+ segment->position = segment->start - demux->stream_start_time;
/* now convey our segment notion downstream */
- gst_matroska_demux_send_event (demux, gst_event_new_new_segment (FALSE,
- demux->common.segment.rate, demux->common.segment.format,
- demux->common.segment.start, demux->common.segment.stop,
- demux->common.segment.start));
- demux->need_newsegment = FALSE;
+ gst_matroska_demux_send_event (demux, gst_event_new_segment (segment));
+ demux->need_segment = FALSE;
}
if (block_duration != -1) {
}
}
- sub = gst_buffer_create_sub (buf,
- GST_BUFFER_SIZE (buf) - size, lace_size[n]);
+ sub = gst_buffer_copy_region (buf, GST_BUFFER_COPY_ALL,
+ gst_buffer_get_size (buf) - size, lace_size[n]);
GST_DEBUG_OBJECT (demux, "created subbuffer %p", sub);
if (delta_unit)
GST_CLOCK_TIME_IS_VALID (demux->last_stop_end) &&
demux->common.segment.rate > 0.0) {
GstClockTimeDiff diff;
- GstEvent *event1, *event2;
- /* only send newsegments with increasing start times,
+ /* only send segments with increasing start times,
* otherwise if these go back and forth downstream (sinks) increase
* accumulated time and running_time */
diff = GST_CLOCK_DIFF (demux->last_stop_end, lace_time);
&& lace_time > demux->common.segment.start
&& (!GST_CLOCK_TIME_IS_VALID (demux->common.segment.stop)
|| lace_time < demux->common.segment.stop)) {
+ GstSegment segment;
+ GstEvent *event1, *event2;
GST_DEBUG_OBJECT (demux,
"Gap of %" G_GINT64_FORMAT " ns detected in"
"stream %d (%" GST_TIME_FORMAT " -> %" GST_TIME_FORMAT "). "
- "Sending updated NEWSEGMENT events", diff,
+ "Sending updated SEGMENT events", diff,
stream->index, GST_TIME_ARGS (stream->pos),
GST_TIME_ARGS (lace_time));
- /* send newsegment events such that the gap is not accounted in
- * accum time, hence running_time */
+ /* send segment events such that the gap is not accounted in
+ * segment base time, hence running_time */
/* close ahead of gap */
- event1 = gst_event_new_new_segment (TRUE,
- demux->common.segment.rate, demux->common.segment.format,
- demux->last_stop_end, demux->last_stop_end,
- demux->last_stop_end);
+ segment = demux->common.segment;
+ segment.start = demux->last_stop_end;
+ segment.stop = demux->last_stop_end;
+ segment.position = demux->last_stop_end;
+ event1 = gst_event_new_segment (&segment);
/* skip gap */
- event2 = gst_event_new_new_segment (FALSE,
- demux->common.segment.rate,
- demux->common.segment.format, lace_time,
- demux->common.segment.stop, lace_time);
+ segment.start = lace_time;
+ segment.stop = demux->common.segment.stop;
+ segment.position = lace_time;
+ event2 = gst_event_new_segment (&segment);
GST_OBJECT_UNLOCK (demux);
gst_matroska_demux_send_event (demux, event1);
gst_matroska_demux_send_event (demux, event2);
GST_OBJECT_LOCK (demux);
/* align segment view with downstream,
- * prevents double-counting accum when closing segment */
- gst_segment_set_newsegment (&demux->common.segment, FALSE,
- demux->common.segment.rate, demux->common.segment.format,
- lace_time, demux->common.segment.stop, lace_time);
- demux->common.segment.last_stop = lace_time;
+ * prevents double-counting base time when closing segment */
+ /* FIXME: in 0.10, the segment base/accum got updated here, but
+ * maybe we don't need that because of the double accounting
+ * mentioned above? */
+ demux->common.segment = segment;
}
}
- if (!GST_CLOCK_TIME_IS_VALID (demux->common.segment.last_stop)
- || demux->common.segment.last_stop < lace_time) {
- demux->common.segment.last_stop = lace_time;
+ if (!GST_CLOCK_TIME_IS_VALID (demux->common.segment.position)
+ || demux->common.segment.position < lace_time) {
+ demux->common.segment.position = lace_time;
}
GST_OBJECT_UNLOCK (demux);
if (demux->common.segment.duration == -1 ||
demux->stream_start_time + demux->common.segment.duration <
last_stop_end) {
- gst_segment_set_duration (&demux->common.segment, GST_FORMAT_TIME,
- last_stop_end - demux->stream_start_time);
+ demux->common.segment.duration =
+ last_stop_end - demux->stream_start_time;
GST_OBJECT_UNLOCK (demux);
if (!demux->invalid_duration) {
gst_element_post_message (GST_ELEMENT_CAST (demux),
stream->from_offset = offset;
GST_DEBUG_OBJECT (demux,
- "Pushing lace %d, data of size %d for stream %d, time=%"
- GST_TIME_FORMAT " and duration=%" GST_TIME_FORMAT, n,
- GST_BUFFER_SIZE (sub), stream_num,
+ "Pushing lace %d, data of size %" G_GSIZE_FORMAT
+ " for stream %d, time=%" GST_TIME_FORMAT " and duration=%"
+ GST_TIME_FORMAT, n, gst_buffer_get_size (sub), stream_num,
GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (sub)),
GST_TIME_ARGS (GST_BUFFER_DURATION (sub)));
+#if 0
if (demux->common.element_index) {
if (stream->index_writer_id == -1)
gst_index_get_writer_id (demux->common.element_index,
GST_FORMAT_TIME, GST_BUFFER_TIMESTAMP (sub), GST_FORMAT_BYTES,
cluster_offset, NULL);
}
-
- gst_buffer_set_caps (sub, GST_PAD_CAPS (stream->pad));
+#endif
/* Postprocess the buffers depending on the codec used */
if (stream->postprocess_frame) {
elements typically assume minimal alignment.
Therefore, create an aligned copy if necessary. */
g_assert (stream->alignment <= G_MEM_ALIGN);
- if (((guintptr) GST_BUFFER_DATA (sub)) & (stream->alignment - 1)) {
- GstBuffer *buffer = gst_buffer_new_and_alloc (GST_BUFFER_SIZE (sub));
- memcpy (GST_BUFFER_DATA (buffer), GST_BUFFER_DATA (sub),
- GST_BUFFER_SIZE (sub));
- gst_buffer_copy_metadata (buffer, sub, GST_BUFFER_COPY_ALL);
- GST_DEBUG_OBJECT (demux, "We want output aligned on %d, reallocated",
- stream->alignment);
- gst_buffer_unref (sub);
- sub = buffer;
- }
+ sub = gst_matroska_demux_align_buffer (demux, sub, stream->alignment);
ret = gst_pad_push (stream->pad, sub);
if (demux->common.segment.rate < 0) {
- if (lace_time > demux->common.segment.stop
- && ret == GST_FLOW_UNEXPECTED) {
- /* In reverse playback we can get a GST_FLOW_UNEXPECTED when
+ if (lace_time > demux->common.segment.stop && ret == GST_FLOW_EOS) {
+ /* In reverse playback we can get a GST_FLOW_EOS when
* we are at the end of the segment, so we just need to jump
* back to the previous section. */
GST_DEBUG_OBJECT (demux, "downstream has reached end of segment");
}
done:
- if (buf)
+ if (buf) {
+ gst_buffer_unmap (buf, &map);
gst_buffer_unref (buf);
+ }
g_free (lace_size);
return ret;
}
}
- if (ret != GST_FLOW_OK && ret != GST_FLOW_UNEXPECTED)
+ if (ret != GST_FLOW_OK && ret != GST_FLOW_EOS)
return ret;
if (!seek_id || seek_pos == (guint64) - 1) {
if (flush <= gst_adapter_available (demux->common.adapter))
gst_adapter_flush (demux->common.adapter, flush);
else
- return GST_FLOW_UNEXPECTED;
+ return GST_FLOW_EOS;
}
return GST_FLOW_OK;
}
/* initializes @ebml with @bytes from input stream at current offset.
- * Returns UNEXPECTED if insufficient available,
+ * Returns EOS if insufficient available,
* ERROR if too much was attempted to read. */
static inline GstFlowReturn
gst_matroska_demux_take (GstMatroskaDemux * demux, guint64 bytes,
if (gst_adapter_available (demux->common.adapter) >= bytes)
buffer = gst_adapter_take_buffer (demux->common.adapter, bytes);
else
- ret = GST_FLOW_UNEXPECTED;
+ ret = GST_FLOW_EOS;
} else
ret = gst_matroska_read_common_peek_bytes (&demux->common,
demux->common.offset, bytes, &buffer, NULL);
/* try harder to query upstream size if we didn't get it the first time */
if (seekable && stop == -1) {
- GstFormat fmt = GST_FORMAT_BYTES;
-
GST_DEBUG_OBJECT (demux, "doing duration query to fix up unset stop");
- gst_pad_query_peer_duration (demux->common.sinkpad, &fmt, &stop);
+ gst_pad_peer_query_duration (demux->common.sinkpad, GST_FORMAT_BYTES,
+ &stop);
}
/* if upstream doesn't know the size, it's likely that it's not seekable in
demux->first_cluster_offset = demux->common.offset;
GST_DEBUG_OBJECT (demux, "signaling no more pads");
gst_element_no_more_pads (GST_ELEMENT (demux));
- /* send initial newsegment - we wait till we know the first
+ /* send initial segment - we wait till we know the first
incoming timestamp, so we can properly set the start of
the segment. */
- demux->need_newsegment = TRUE;
+ demux->need_segment = TRUE;
}
demux->cluster_time = GST_CLOCK_TIME_NONE;
demux->cluster_offset = demux->common.offset;
goto parse_failed;
GST_DEBUG_OBJECT (demux, "ClusterTimeCode: %" G_GUINT64_FORMAT, num);
demux->cluster_time = num;
+#if 0
if (demux->common.element_index) {
if (demux->common.element_index_writer_id == -1)
gst_index_get_writer_id (demux->common.element_index,
GST_FORMAT_TIME, demux->cluster_time,
GST_FORMAT_BYTES, demux->cluster_offset, NULL);
}
+#endif
break;
}
case GST_MATROSKA_ID_BLOCKGROUP:
/* If we have to close a segment, send a new segment to do this now */
if (G_LIKELY (demux->common.state == GST_MATROSKA_READ_STATE_DATA)) {
- if (G_UNLIKELY (demux->close_segment)) {
- gst_matroska_demux_send_event (demux, demux->close_segment);
- demux->close_segment = NULL;
- }
if (G_UNLIKELY (demux->new_segment)) {
gst_matroska_demux_send_event (demux, demux->new_segment);
demux->new_segment = NULL;
ret = gst_matroska_read_common_peek_id_length_pull (&demux->common,
GST_ELEMENT_CAST (demux), &id, &length, &needed);
- if (ret == GST_FLOW_UNEXPECTED)
+ if (ret == GST_FLOW_EOS)
goto eos;
if (ret != GST_FLOW_OK) {
if (gst_matroska_demux_check_parse_error (demux))
length, needed);
ret = gst_matroska_demux_parse_id (demux, id, length, needed);
- if (ret == GST_FLOW_UNEXPECTED)
+ if (ret == GST_FLOW_EOS)
goto eos;
if (ret != GST_FLOW_OK)
goto pause;
}
GST_INFO_OBJECT (demux, "All streams are EOS");
- ret = GST_FLOW_UNEXPECTED;
+ ret = GST_FLOW_EOS;
goto eos;
}
if (G_UNLIKELY (demux->common.offset ==
gst_matroska_read_common_get_length (&demux->common))) {
GST_LOG_OBJECT (demux, "Reached end of stream");
- ret = GST_FLOW_UNEXPECTED;
+ ret = GST_FLOW_EOS;
goto eos;
}
gboolean push_eos = FALSE;
GST_LOG_OBJECT (demux, "pausing task, reason %s", reason);
- demux->segment_running = FALSE;
gst_pad_pause_task (demux->common.sinkpad);
- if (ret == GST_FLOW_UNEXPECTED) {
+ if (ret == GST_FLOW_EOS) {
/* perform EOS logic */
/* If we were in the headers, make sure we send no-more-pads.
!GST_CLOCK_TIME_IS_VALID (demux->common.segment.stop) &&
GST_CLOCK_TIME_IS_VALID (demux->common.segment.start) &&
demux->last_stop_end > demux->common.segment.start) {
- /* arrange to accumulate duration downstream, but avoid sending
- * newsegment with decreasing start (w.r.t. sync newsegment events) */
- GstEvent *event =
- gst_event_new_new_segment_full (TRUE, demux->common.segment.rate,
- demux->common.segment.applied_rate, demux->common.segment.format,
- demux->last_stop_end, demux->last_stop_end,
- demux->common.segment.time + (demux->last_stop_end -
- demux->common.segment.start));
+ GstSegment segment = demux->common.segment;
+ GstEvent *event;
+
+ segment.stop = demux->last_stop_end;
+ event = gst_event_new_segment (&segment);
gst_matroska_demux_send_event (demux, event);
}
} else {
push_eos = TRUE;
}
- } else if (ret == GST_FLOW_NOT_LINKED || ret < GST_FLOW_UNEXPECTED) {
+ } else if (ret == GST_FLOW_NOT_LINKED || ret < GST_FLOW_EOS) {
/* for fatal errors we post an error message */
GST_ELEMENT_ERROR (demux, STREAM, FAILED, (NULL),
("stream stopped, reason %s", reason));
/* send EOS, and prevent hanging if no streams yet */
GST_LOG_OBJECT (demux, "Sending EOS, at end of stream");
if (!gst_matroska_demux_send_event (demux, gst_event_new_eos ()) &&
- (ret == GST_FLOW_UNEXPECTED)) {
+ (ret == GST_FLOW_EOS)) {
GST_ELEMENT_ERROR (demux, STREAM, DEMUX,
(NULL), ("got eos but no streams (yet)"));
}
res = gst_pad_push_event (demux->common.sinkpad, event);
- /* newsegment event will update offset */
+ /* segment event will update offset */
return res;
}
static GstFlowReturn
-gst_matroska_demux_chain (GstPad * pad, GstBuffer * buffer)
+gst_matroska_demux_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer)
{
- GstMatroskaDemux *demux = GST_MATROSKA_DEMUX (GST_PAD_PARENT (pad));
+ GstMatroskaDemux *demux = GST_MATROSKA_DEMUX (parent);
guint available;
GstFlowReturn ret = GST_FLOW_OK;
guint needed = 0;
ret = gst_matroska_read_common_peek_id_length_push (&demux->common,
GST_ELEMENT_CAST (demux), &id, &length, &needed);
- if (G_UNLIKELY (ret != GST_FLOW_OK && ret != GST_FLOW_UNEXPECTED))
+ if (G_UNLIKELY (ret != GST_FLOW_OK && ret != GST_FLOW_EOS))
return ret;
GST_LOG_OBJECT (demux, "Offset %" G_GUINT64_FORMAT ", Element id 0x%x, "
return GST_FLOW_OK;
ret = gst_matroska_demux_parse_id (demux, id, length, needed);
- if (ret == GST_FLOW_UNEXPECTED) {
+ if (ret == GST_FLOW_EOS) {
/* need more data */
return GST_FLOW_OK;
} else if (ret != GST_FLOW_OK) {
}
static gboolean
-gst_matroska_demux_handle_sink_event (GstPad * pad, GstEvent * event)
+gst_matroska_demux_handle_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event)
{
gboolean res = TRUE;
- GstMatroskaDemux *demux = GST_MATROSKA_DEMUX (GST_PAD_PARENT (pad));
+ GstMatroskaDemux *demux = GST_MATROSKA_DEMUX (parent);
GST_DEBUG_OBJECT (demux,
"have event type %s: %p on sink pad", GST_EVENT_TYPE_NAME (event), event);
switch (GST_EVENT_TYPE (event)) {
- case GST_EVENT_NEWSEGMENT:
+ case GST_EVENT_SEGMENT:
{
- GstFormat format;
- gdouble rate, arate;
- gint64 start, stop, time = 0;
- gboolean update;
- GstSegment segment;
+ const GstSegment *segment;
/* some debug output */
- gst_segment_init (&segment, GST_FORMAT_UNDEFINED);
- gst_event_parse_new_segment_full (event, &update, &rate, &arate, &format,
- &start, &stop, &time);
- gst_segment_set_newsegment_full (&segment, update, rate, arate, format,
- start, stop, time);
+ gst_event_parse_segment (event, &segment);
+ /* FIXME: do we need to update segment base here (like accum in 0.10)? */
GST_DEBUG_OBJECT (demux,
- "received format %d newsegment %" GST_SEGMENT_FORMAT, format,
- &segment);
+ "received format %d segment %" GST_SEGMENT_FORMAT, segment->format,
+ segment);
if (demux->common.state < GST_MATROSKA_READ_STATE_DATA) {
GST_DEBUG_OBJECT (demux, "still starting");
}
/* we only expect a BYTE segment, e.g. following a seek */
- if (format != GST_FORMAT_BYTES) {
+ if (segment->format != GST_FORMAT_BYTES) {
GST_DEBUG_OBJECT (demux, "unsupported segment format, ignoring");
goto exit;
}
/* clear current segment leftover */
gst_adapter_clear (demux->common.adapter);
/* and some streaming setup */
- demux->common.offset = start;
+ demux->common.offset = segment->start;
/* do not know where we are;
- * need to come across a cluster and generate newsegment */
- demux->common.segment.last_stop = GST_CLOCK_TIME_NONE;
+ * need to come across a cluster and generate segment */
+ demux->common.segment.position = GST_CLOCK_TIME_NONE;
demux->cluster_time = GST_CLOCK_TIME_NONE;
demux->cluster_offset = 0;
- demux->need_newsegment = TRUE;
+ demux->need_segment = TRUE;
/* but keep some of the upstream segment */
- demux->common.segment.rate = rate;
+ demux->common.segment.rate = segment->rate;
GST_OBJECT_UNLOCK (demux);
exit:
- /* chain will send initial newsegment after pads have been added,
+ /* chain will send initial segment after pads have been added,
* or otherwise come up with one */
GST_DEBUG_OBJECT (demux, "eating event");
gst_event_unref (event);
GST_OBJECT_LOCK (demux);
gst_matroska_read_common_reset_streams (&demux->common,
GST_CLOCK_TIME_NONE, TRUE);
- demux->common.segment.last_stop = GST_CLOCK_TIME_NONE;
+ demux->common.segment.position = GST_CLOCK_TIME_NONE;
demux->cluster_time = GST_CLOCK_TIME_NONE;
demux->cluster_offset = 0;
GST_OBJECT_UNLOCK (demux);
/* fall-through */
}
default:
- res = gst_pad_event_default (pad, event);
+ res = gst_pad_event_default (pad, parent, event);
break;
}
}
static gboolean
-gst_matroska_demux_sink_activate (GstPad * sinkpad)
+gst_matroska_demux_sink_activate (GstPad * sinkpad, GstObject * parent)
{
- GstMatroskaDemux *demux = GST_MATROSKA_DEMUX (GST_PAD_PARENT (sinkpad));
+ GstMatroskaDemux *demux = GST_MATROSKA_DEMUX (parent);
+ GstQuery *query;
+ gboolean pull_mode = FALSE;
+
+ query = gst_query_new_scheduling ();
- if (gst_pad_check_pull_range (sinkpad)) {
+ if (gst_pad_peer_query (sinkpad, query))
+ pull_mode = gst_query_has_scheduling_mode (query, GST_PAD_MODE_PULL);
+
+ gst_query_unref (query);
+
+ if (pull_mode) {
GST_DEBUG ("going to pull mode");
demux->streaming = FALSE;
- return gst_pad_activate_pull (sinkpad, TRUE);
+ return gst_pad_activate_mode (sinkpad, GST_PAD_MODE_PULL, TRUE);
} else {
GST_DEBUG ("going to push (streaming) mode");
demux->streaming = TRUE;
- return gst_pad_activate_push (sinkpad, TRUE);
+ return gst_pad_activate_mode (sinkpad, GST_PAD_MODE_PUSH, TRUE);
}
-
- return FALSE;
}
static gboolean
-gst_matroska_demux_sink_activate_pull (GstPad * sinkpad, gboolean active)
+gst_matroska_demux_sink_activate_mode (GstPad * sinkpad, GstObject * parent,
+ GstPadMode mode, gboolean active)
{
- GstMatroskaDemux *demux = GST_MATROSKA_DEMUX (GST_PAD_PARENT (sinkpad));
-
- if (active) {
- /* if we have a scheduler we can start the task */
- demux->segment_running = TRUE;
- gst_pad_start_task (sinkpad, (GstTaskFunction) gst_matroska_demux_loop,
- sinkpad);
- } else {
- demux->segment_running = FALSE;
- gst_pad_stop_task (sinkpad);
+ switch (mode) {
+ case GST_PAD_MODE_PULL:
+ if (active) {
+ /* if we have a scheduler we can start the task */
+ gst_pad_start_task (sinkpad, (GstTaskFunction) gst_matroska_demux_loop,
+ sinkpad);
+ } else {
+ gst_pad_stop_task (sinkpad);
+ }
+ return TRUE;
+ case GST_PAD_MODE_PUSH:
+ return TRUE;
+ default:
+ return FALSE;
}
-
- return TRUE;
}
static void
vids->imp_colors = GUINT32_FROM_LE (vids->imp_colors);
if (size > sizeof (gst_riff_strf_vids)) { /* some extra_data */
- buf = gst_buffer_new_and_alloc (size - sizeof (gst_riff_strf_vids));
- memcpy (GST_BUFFER_DATA (buf),
- (guint8 *) vids + sizeof (gst_riff_strf_vids),
- GST_BUFFER_SIZE (buf));
+ gsize offset = sizeof (gst_riff_strf_vids);
+
+ buf =
+ gst_buffer_new_wrapped (g_memdup ((guint8 *) vids + offset,
+ size - offset), size - offset);
}
if (riff_fourcc)
g_free (vids);
}
} else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_VIDEO_UNCOMPRESSED)) {
- guint32 fourcc = 0;
+ const gchar *format = NULL;
switch (videocontext->fourcc) {
case GST_MAKE_FOURCC ('I', '4', '2', '0'):
*codec_name = g_strdup ("Raw planar YUV 4:2:0");
- fourcc = videocontext->fourcc;
+ format = "I420";
break;
case GST_MAKE_FOURCC ('Y', 'U', 'Y', '2'):
*codec_name = g_strdup ("Raw packed YUV 4:2:2");
- fourcc = videocontext->fourcc;
+ format = "YUY2";
break;
case GST_MAKE_FOURCC ('Y', 'V', '1', '2'):
*codec_name = g_strdup ("Raw packed YUV 4:2:0");
- fourcc = videocontext->fourcc;
+ format = "YV12";
break;
case GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y'):
*codec_name = g_strdup ("Raw packed YUV 4:2:2");
- fourcc = videocontext->fourcc;
+ format = "UYVY";
break;
case GST_MAKE_FOURCC ('A', 'Y', 'U', 'V'):
*codec_name = g_strdup ("Raw packed YUV 4:4:4 with alpha channel");
- fourcc = videocontext->fourcc;
+ format = "AYUV";
break;
default:
return NULL;
}
- caps = gst_caps_new_simple ("video/x-raw-yuv",
- "format", GST_TYPE_FOURCC, fourcc, NULL);
+ caps = gst_caps_new_simple ("video/x-raw",
+ "format", G_TYPE_STRING, format, NULL);
} else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_VIDEO_MPEG4_SP)) {
caps = gst_caps_new_simple ("video/x-divx",
"divxversion", G_TYPE_INT, 4, NULL);
"mpegversion", G_TYPE_INT, 4,
"systemstream", G_TYPE_BOOLEAN, FALSE, NULL);
if (data) {
- GstBuffer *priv = gst_buffer_new_and_alloc (size);
+ GstBuffer *priv;
- memcpy (GST_BUFFER_DATA (priv), data, size);
+ priv = gst_buffer_new_wrapped (g_memdup (data, size), size);
gst_caps_set_simple (caps, "codec_data", GST_TYPE_BUFFER, priv, NULL);
gst_buffer_unref (priv);
}
*codec_name = g_strdup_printf ("MPEG-%d video", mpegversion);
context->postprocess_frame = gst_matroska_demux_add_mpeg_seq_header;
} else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_VIDEO_MJPEG)) {
- caps = gst_caps_new_simple ("image/jpeg", NULL);
+ caps = gst_caps_new_empty_simple ("image/jpeg");
*codec_name = g_strdup ("Motion-JPEG");
} else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_VIDEO_MPEG4_AVC)) {
- caps = gst_caps_new_simple ("video/x-h264", NULL);
+ caps = gst_caps_new_empty_simple ("video/x-h264");
if (data) {
- GstBuffer *priv = gst_buffer_new_and_alloc (size);
+ GstBuffer *priv;
/* First byte is the version, second is the profile indication, and third
* is the 5 contraint_set_flags and 3 reserved bits. Fourth byte is the
gst_codec_utils_h264_caps_set_level_and_profile (caps, data + 1,
size - 1);
- memcpy (GST_BUFFER_DATA (priv), data, size);
+ priv = gst_buffer_new_wrapped (g_memdup (data, size), size);
gst_caps_set_simple (caps, "codec_data", GST_TYPE_BUFFER, priv, NULL);
gst_buffer_unref (priv);
subformat = GST_READ_UINT32_BE (data + 0x1a);
rformat = GST_READ_UINT32_BE (data + 0x1e);
- priv = gst_buffer_new_and_alloc (size - 0x1a);
-
- memcpy (GST_BUFFER_DATA (priv), data + 0x1a, size - 0x1a);
- gst_caps_set_simple (caps,
- "codec_data", GST_TYPE_BUFFER, priv,
- "format", G_TYPE_INT, rformat,
- "subformat", G_TYPE_INT, subformat, NULL);
+ priv =
+ gst_buffer_new_wrapped (g_memdup (data + 0x1a, size - 0x1a),
+ size - 0x1a);
+ gst_caps_set_simple (caps, "codec_data", GST_TYPE_BUFFER, priv, "format",
+ G_TYPE_INT, rformat, "subformat", G_TYPE_INT, subformat, NULL);
gst_buffer_unref (priv);
}
*codec_name = g_strdup_printf ("RealVideo %d.0", rmversion);
} else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_VIDEO_THEORA)) {
- caps = gst_caps_new_simple ("video/x-theora", NULL);
+ caps = gst_caps_new_empty_simple ("video/x-theora");
context->send_xiph_headers = TRUE;
} else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_VIDEO_DIRAC)) {
- caps = gst_caps_new_simple ("video/x-dirac", NULL);
+ caps = gst_caps_new_empty_simple ("video/x-dirac");
*codec_name = g_strdup_printf ("Dirac");
} else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_VIDEO_VP8)) {
- caps = gst_caps_new_simple ("video/x-vp8", NULL);
+ caps = gst_caps_new_empty_simple ("video/x-vp8");
*codec_name = g_strdup_printf ("On2 VP8");
} else {
GST_WARNING ("Unknown codec '%s', cannot build Caps", codec_id);
*codec_name = g_strdup_printf ("MPEG-1 layer %d", layer);
} else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_PCM_INT_BE) ||
!strcmp (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_PCM_INT_LE)) {
+ gboolean sign;
gint endianness;
+ GstAudioFormat format;
+ sign = (audiocontext->bitdepth != 8);
if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_PCM_INT_BE))
endianness = G_BIG_ENDIAN;
else
endianness = G_LITTLE_ENDIAN;
- caps = gst_caps_new_simple ("audio/x-raw-int",
- "width", G_TYPE_INT, audiocontext->bitdepth,
- "depth", G_TYPE_INT, audiocontext->bitdepth,
- "signed", G_TYPE_BOOLEAN, audiocontext->bitdepth != 8,
- "endianness", G_TYPE_INT, endianness, NULL);
+ format = gst_audio_format_build_integer (sign, endianness,
+ audiocontext->bitdepth, audiocontext->bitdepth);
+
+ /* FIXME: Channel mask and reordering */
+ caps = gst_caps_new_simple ("audio/x-raw",
+ "format", G_TYPE_STRING, gst_audio_format_to_string (format),
+ "layout", G_TYPE_STRING, "interleaved", NULL);
*codec_name = g_strdup_printf ("Raw %d-bit PCM audio",
audiocontext->bitdepth);
context->alignment = audiocontext->bitdepth / 8;
} else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_PCM_FLOAT)) {
- caps = gst_caps_new_simple ("audio/x-raw-float",
- "endianness", G_TYPE_INT, G_LITTLE_ENDIAN,
- "width", G_TYPE_INT, audiocontext->bitdepth, NULL);
+ const gchar *format;
+ if (audiocontext->bitdepth == 32)
+ format = "F32LE";
+ else
+ format = "F64LE";
+ /* FIXME: Channel mask and reordering */
+ caps = gst_caps_new_simple ("audio/x-raw",
+ "format", G_TYPE_STRING, format,
+ "layout", G_TYPE_STRING, "interleaved", NULL);
*codec_name = g_strdup_printf ("Raw %d-bit floating-point audio",
audiocontext->bitdepth);
context->alignment = audiocontext->bitdepth / 8;
"framed", G_TYPE_BOOLEAN, TRUE, NULL);
*codec_name = g_strdup ("E-AC-3 audio");
} else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_DTS)) {
- caps = gst_caps_new_simple ("audio/x-dts", NULL);
+ caps = gst_caps_new_empty_simple ("audio/x-dts");
*codec_name = g_strdup ("DTS audio");
} else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_VORBIS)) {
- caps = gst_caps_new_simple ("audio/x-vorbis", NULL);
+ caps = gst_caps_new_empty_simple ("audio/x-vorbis");
context->send_xiph_headers = TRUE;
/* vorbis decoder does tags */
} else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_FLAC)) {
- caps = gst_caps_new_simple ("audio/x-flac", NULL);
+ caps = gst_caps_new_empty_simple ("audio/x-flac");
context->send_flac_headers = TRUE;
} else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_SPEEX)) {
- caps = gst_caps_new_simple ("audio/x-speex", NULL);
+ caps = gst_caps_new_empty_simple ("audio/x-speex");
context->send_speex_headers = TRUE;
} else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_ACM)) {
gst_riff_strf_auds auds;
if (data) {
- GstBuffer *codec_data = gst_buffer_new ();
+ GstBuffer *codec_data;
/* little-endian -> byte-order */
auds.format = GST_READ_UINT16_LE (data);
auds.size = GST_READ_UINT16_LE (data + 16);
/* 18 is the waveformatex size */
- gst_buffer_set_data (codec_data, data + 18, auds.size);
+ codec_data = gst_buffer_new_wrapped_full (data + 18, NULL, 0, auds.size);
if (riff_audio_fmt)
*riff_audio_fmt = auds.format;
+ /* FIXME: Handle reorder map */
caps = gst_riff_create_audio_caps (auds.format, NULL, &auds, NULL,
- codec_data, codec_name);
+ codec_data, codec_name, NULL);
gst_buffer_unref (codec_data);
if (caps == NULL) {
if (freq_index == 15)
explicit_freq_bytes = 3;
GST_DEBUG ("obj_type = %u, freq_index = %u", obj_type, freq_index);
- priv = gst_buffer_new_and_alloc (context->codec_priv_size);
- memcpy (GST_BUFFER_DATA (priv), context->codec_priv,
- context->codec_priv_size);
+ priv = gst_buffer_new_wrapped (g_memdup (context->codec_priv,
+ context->codec_priv_size), context->codec_priv_size);
/* assume SBR if samplerate <= 24kHz */
if (obj_type == 5 || (freq_index >= 6 && freq_index != 15) ||
(context->codec_priv_size == (5 + explicit_freq_bytes))) {
/* make up decoder-specific data if it is not supplied */
if (priv == NULL) {
- priv = gst_buffer_new_and_alloc (5);
- data = GST_BUFFER_DATA (priv);
+ GstMapInfo map;
+
+ priv = gst_buffer_new_allocate (NULL, 5, 0);
+ gst_buffer_map (priv, &map, GST_MAP_WRITE);
+ data = map.data;
rate_idx = aac_rate_idx (audiocontext->samplerate);
profile = aac_profile_idx (codec_id);
data[0] = ((profile + 1) << 3) | ((rate_idx & 0xE) >> 1);
data[1] = ((rate_idx & 0x1) << 7) | (audiocontext->channels << 3);
- GST_BUFFER_SIZE (priv) = 2;
if (!strncmp (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_AAC_MPEG2,
strlen (GST_MATROSKA_CODEC_ID_AUDIO_AAC_MPEG2))) {
mpegversion = 2;
+ gst_buffer_unmap (priv, &map);
+ gst_buffer_set_size (priv, 2);
} else if (!strncmp (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_AAC_MPEG4,
strlen (GST_MATROSKA_CODEC_ID_AUDIO_AAC_MPEG4))) {
mpegversion = 4;
data[2] = AAC_SYNC_EXTENSION_TYPE >> 3;
data[3] = ((AAC_SYNC_EXTENSION_TYPE & 0x07) << 5) | 5;
data[4] = (1 << 7) | (rate_idx << 3);
- GST_BUFFER_SIZE (priv) = 5;
+ gst_buffer_unmap (priv, &map);
+ } else {
+ gst_buffer_unmap (priv, &map);
+ gst_buffer_set_size (priv, 2);
}
} else {
+ gst_buffer_unmap (priv, &map);
gst_buffer_unref (priv);
priv = NULL;
GST_ERROR ("Unknown AAC profile and no codec private data");
G_TYPE_INT, leaf_size, "width", G_TYPE_INT, sample_width, NULL);
if ((size - 78) >= extra_data_size) {
- priv = gst_buffer_new_and_alloc (extra_data_size);
- memcpy (GST_BUFFER_DATA (priv), data + 78, extra_data_size);
+ priv = gst_buffer_new_wrapped (g_memdup (data + 78, extra_data_size),
+ extra_data_size);
gst_caps_set_simple (caps, "codec_data", GST_TYPE_BUFFER, priv, NULL);
gst_buffer_unref (priv);
}
*codec_name = g_strdup_printf ("RealAudio %d.0", raversion);
} else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_REAL_SIPR)) {
- caps = gst_caps_new_simple ("audio/x-sipro", NULL);
+ caps = gst_caps_new_empty_simple ("audio/x-sipro");
*codec_name = g_strdup ("Sipro/ACELP.NET Voice Codec");
} else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_REAL_RALF)) {
- caps = gst_caps_new_simple ("audio/x-ralf-mpeg4-generic", NULL);
+ caps = gst_caps_new_empty_simple ("audio/x-ralf-mpeg4-generic");
*codec_name = g_strdup ("Real Audio Lossless");
} else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_REAL_ATRC)) {
- caps = gst_caps_new_simple ("audio/x-vnd.sony.atrac3", NULL);
+ caps = gst_caps_new_empty_simple ("audio/x-vnd.sony.atrac3");
*codec_name = g_strdup ("Sony ATRAC3");
} else {
GST_WARNING ("Unknown codec '%s', cannot build Caps", codec_id);
* Check if we have to do something with codec_private */
if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_SUBTITLE_UTF8)) {
/* well, plain text simply does not have a lot of markup ... */
- caps = gst_caps_new_simple ("text/x-pango-markup", NULL);
+ caps = gst_caps_new_empty_simple ("text/x-pango-markup");
context->postprocess_frame = gst_matroska_demux_check_subtitle_buffer;
subtitlecontext->check_markup = TRUE;
} else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_SUBTITLE_SSA)) {
- caps = gst_caps_new_simple ("application/x-ssa", NULL);
+ caps = gst_caps_new_empty_simple ("application/x-ssa");
context->postprocess_frame = gst_matroska_demux_check_subtitle_buffer;
subtitlecontext->check_markup = FALSE;
} else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_SUBTITLE_ASS)) {
- caps = gst_caps_new_simple ("application/x-ass", NULL);
+ caps = gst_caps_new_empty_simple ("application/x-ass");
context->postprocess_frame = gst_matroska_demux_check_subtitle_buffer;
subtitlecontext->check_markup = FALSE;
} else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_SUBTITLE_USF)) {
- caps = gst_caps_new_simple ("application/x-usf", NULL);
+ caps = gst_caps_new_empty_simple ("application/x-usf");
context->postprocess_frame = gst_matroska_demux_check_subtitle_buffer;
subtitlecontext->check_markup = FALSE;
} else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_SUBTITLE_VOBSUB)) {
- caps = gst_caps_new_simple ("video/x-dvd-subpicture", NULL);
+ caps = gst_caps_new_empty_simple ("video/x-dvd-subpicture");
((GstMatroskaTrackContext *) subtitlecontext)->send_dvd_event = TRUE;
} else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_SUBTITLE_HDMVPGS)) {
- caps = gst_caps_new_simple ("subpicture/x-pgs", NULL);
+ caps = gst_caps_new_empty_simple ("subpicture/x-pgs");
} else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_SUBTITLE_KATE)) {
- caps = gst_caps_new_simple ("subtitle/x-kate", NULL);
+ caps = gst_caps_new_empty_simple ("subtitle/x-kate");
context->send_xiph_headers = TRUE;
} else {
GST_DEBUG ("Unknown subtitle stream: codec_id='%s'", codec_id);
- caps = gst_caps_new_simple ("application/x-subtitle-unknown", NULL);
+ caps = gst_caps_new_empty_simple ("application/x-subtitle-unknown");
}
if (data != NULL && size > 0) {
GstBuffer *buf;
- buf = gst_buffer_new_and_alloc (size);
- memcpy (GST_BUFFER_DATA (buf), data, size);
+ buf = gst_buffer_new_wrapped (g_memdup (data, size), size);
gst_caps_set_simple (caps, "codec_data", GST_TYPE_BUFFER, buf, NULL);
gst_buffer_unref (buf);
}
return caps;
}
+#if 0
static void
gst_matroska_demux_set_index (GstElement * element, GstIndex * index)
{
return result;
}
+#endif
static GstStateChangeReturn
gst_matroska_demux_change_state (GstElement * element,
GArray *clusters;
/* keeping track of playback position */
- gboolean segment_running;
GstClockTime last_stop_end;
GstClockTime stream_start_time;
- GstEvent *close_segment;
GstEvent *new_segment;
/* some state saving */
gboolean building_index;
guint64 index_offset;
GstEvent *seek_event;
- gboolean need_newsegment;
+ gboolean need_segment;
/* reverse playback */
GArray *seek_index;
/* some often-used info */
gchar *codec_id, *codec_name, *name, *language;
- guint8 *codec_priv;
- guint codec_priv_size;
- guint8 *codec_state;
- guint codec_state_size;
+ gpointer codec_priv;
+ gsize codec_priv_size;
+ gpointer codec_state;
+ gsize codec_state_size;
GstMatroskaTrackType type;
guint uid, num;
GstMatroskaTrackFlags flags;
#include <stdio.h>
#include <string.h>
+#include <gst/audio/audio.h>
#include <gst/riff/riff-media.h>
#include <gst/tag/tag.h>
*/
static GstStaticPadTemplate videosink_templ =
- GST_STATIC_PAD_TEMPLATE ("video_%d",
+ GST_STATIC_PAD_TEMPLATE ("video_%u",
GST_PAD_SINK,
GST_PAD_REQUEST,
GST_STATIC_CAPS ("video/mpeg, "
COMMON_VIDEO_CAPS "; "
"video/x-vp8, "
COMMON_VIDEO_CAPS "; "
- "video/x-raw-yuv, "
- "format = (fourcc) { YUY2, I420, YV12, UYVY, AYUV }, "
+ "video/x-raw, "
+ "format = (string) { YUY2, I420, YV12, UYVY, AYUV }, "
COMMON_VIDEO_CAPS "; "
"video/x-wmv, " "wmvversion = (int) [ 1, 3 ], " COMMON_VIDEO_CAPS)
);
* * require codec data, etc as needed
*/
static GstStaticPadTemplate audiosink_templ =
- GST_STATIC_PAD_TEMPLATE ("audio_%d",
+ GST_STATIC_PAD_TEMPLATE ("audio_%u",
GST_PAD_SINK,
GST_PAD_REQUEST,
GST_STATIC_CAPS ("audio/mpeg, "
COMMON_AUDIO_CAPS "; "
"audio/x-speex, "
COMMON_AUDIO_CAPS "; "
- "audio/x-raw-int, "
- "width = (int) 8, "
- "depth = (int) 8, "
- "signed = (boolean) false, "
- COMMON_AUDIO_CAPS ";"
- "audio/x-raw-int, "
- "width = (int) 16, "
- "depth = (int) 16, "
- "endianness = (int) { BIG_ENDIAN, LITTLE_ENDIAN }, "
- "signed = (boolean) true, "
- COMMON_AUDIO_CAPS ";"
- "audio/x-raw-int, "
- "width = (int) 24, "
- "depth = (int) 24, "
- "endianness = (int) { BIG_ENDIAN, LITTLE_ENDIAN }, "
- "signed = (boolean) true, "
- COMMON_AUDIO_CAPS ";"
- "audio/x-raw-int, "
- "width = (int) 32, "
- "depth = (int) 32, "
- "endianness = (int) { BIG_ENDIAN, LITTLE_ENDIAN }, "
- "signed = (boolean) true, "
- COMMON_AUDIO_CAPS ";"
- "audio/x-raw-float, "
- "width = (int) [ 32, 64 ], "
- "endianness = (int) LITTLE_ENDIAN, "
+ "audio/x-raw, "
+ "format = (string) { U8, S16BE, S16LE, S24BE, S24LE, S32BE, S32LE, F32LE, F64LE }, "
+ "layout = (string) interleaved, "
COMMON_AUDIO_CAPS ";"
"audio/x-tta, "
"width = (int) { 8, 16, 24 }, "
static GArray *used_uids;
G_LOCK_DEFINE_STATIC (used_uids);
-static void gst_matroska_mux_add_interfaces (GType type);
-
-GST_BOILERPLATE_FULL (GstMatroskaMux, gst_matroska_mux, GstElement,
- GST_TYPE_ELEMENT, gst_matroska_mux_add_interfaces);
+#define parent_class gst_matroska_mux_parent_class
+G_DEFINE_TYPE_WITH_CODE (GstMatroskaMux, gst_matroska_mux, GST_TYPE_ELEMENT,
+ G_IMPLEMENT_INTERFACE (GST_TYPE_TAG_SETTER, NULL));
/* Matroska muxer destructor */
static void gst_matroska_mux_finalize (GObject * object);
/* pad functions */
static gboolean gst_matroska_mux_handle_src_event (GstPad * pad,
- GstEvent * event);
+ GstObject * parent, GstEvent * event);
static GstPad *gst_matroska_mux_request_new_pad (GstElement * element,
- GstPadTemplate * templ, const gchar * name);
+ GstPadTemplate * templ, const gchar * name, const GstCaps * caps);
static void gst_matroska_mux_release_pad (GstElement * element, GstPad * pad);
/* gst internal change state handler */
gpointer data);
static void
-gst_matroska_mux_add_interfaces (GType type)
-{
- static const GInterfaceInfo tag_setter_info = { NULL, NULL, NULL };
-
- g_type_add_interface_static (type, GST_TYPE_TAG_SETTER, &tag_setter_info);
-}
-
-static void
-gst_matroska_mux_base_init (gpointer g_class)
-{
-}
-
-static void
gst_matroska_mux_class_init (GstMatroskaMuxClass * klass)
{
GObjectClass *gobject_class;
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
- gst_element_class_add_static_pad_template (gstelement_class,
- &videosink_templ);
- gst_element_class_add_static_pad_template (gstelement_class,
- &audiosink_templ);
- gst_element_class_add_static_pad_template (gstelement_class,
- &subtitlesink_templ);
- gst_element_class_add_static_pad_template (gstelement_class, &src_templ);
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&videosink_templ));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&audiosink_templ));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&subtitlesink_templ));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&src_templ));
gst_element_class_set_details_simple (gstelement_class, "Matroska muxer",
"Codec/Muxer",
"Muxes video/audio/subtitle streams into a matroska stream",
* Matroska muxer constructor.
*/
static void
-gst_matroska_mux_init (GstMatroskaMux * mux, GstMatroskaMuxClass * g_class)
+gst_matroska_mux_init (GstMatroskaMux * mux)
{
GstPadTemplate *templ;
templ =
- gst_element_class_get_pad_template (GST_ELEMENT_CLASS (g_class), "src");
+ gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS (mux), "src");
mux->srcpad = gst_pad_new_from_template (templ, "src");
gst_pad_set_event_function (mux->srcpad, gst_matroska_mux_handle_src_event);
* Release resources of a matroska collect pad.
*/
static void
-gst_matroska_pad_free (GstMatroskaPad * collect_pad)
+gst_matroska_pad_free (GstPad * collect_pad)
{
- gst_matroska_pad_reset (collect_pad, TRUE);
+ gst_matroska_pad_reset ((GstMatroskaPad *) collect_pad, TRUE);
}
* Returns: #TRUE on success.
*/
static gboolean
-gst_matroska_mux_handle_src_event (GstPad * pad, GstEvent * event)
+gst_matroska_mux_handle_src_event (GstPad * pad, GstObject * parent,
+ GstEvent * event)
{
GstEventType type;
break;
}
- return gst_pad_event_default (pad, event);
+ return gst_pad_event_default (pad, parent, event);
}
gst_matroska_mux_handle_sink_event (GstCollectPads2 * pads,
GstCollectData2 * data, GstEvent * event, gpointer user_data)
{
- GstMatroskaTrackContext *context;
GstMatroskaPad *collect_pad;
+ GstMatroskaTrackContext *context;
GstMatroskaMux *mux;
GstPad *pad;
GstTagList *list;
+ gboolean ret = FALSE;
mux = GST_MATROSKA_MUX (user_data);
collect_pad = (GstMatroskaPad *) data;
g_assert (context);
switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_CAPS:{
+ GstCaps *caps;
+
+ collect_pad = (GstMatroskaPad *) gst_pad_get_element_private (pad);
+ gst_event_parse_caps (event, &caps);
+
+ ret = collect_pad->capsfunc (pad, caps);
+ gst_event_unref (event);
+ event = NULL;
+ break;
+ }
case GST_EVENT_TAG:{
gchar *lang = NULL;
gst_event_unref (event);
/* handled this, don't want collectpads to forward it downstream */
event = NULL;
+ ret = TRUE;
break;
}
- case GST_EVENT_NEWSEGMENT:{
- GstFormat format;
+ case GST_EVENT_SEGMENT:{
+ const GstSegment *segment;
- gst_event_parse_new_segment (event, NULL, NULL, &format, NULL, NULL,
- NULL);
- if (format != GST_FORMAT_TIME) {
- gst_event_unref (event);
- event = NULL;
+ gst_event_parse_segment (event, &segment);
+ if (segment->format != GST_FORMAT_TIME) {
+ ret = FALSE;
}
+ gst_event_unref (event);
+ event = NULL;
+ ret = TRUE;
break;
}
case GST_EVENT_CUSTOM_DOWNSTREAM:{
/* transform into private data for stream; text form */
gst_matroska_mux_build_vobsub_private (context, clut);
}
- break;
}
+ /* fall through */
default:
+ ret = gst_pad_event_default (data->pad, GST_OBJECT (mux), event);
+ break;
+ case GST_EVENT_EOS:
+ gst_event_unref (event);
+ ret = TRUE;
break;
}
- /* now GstCollectPads2 can take care of the rest, e.g. EOS */
- if (event)
- return FALSE;
- else
- return TRUE;
+ return ret;
}
static void
GstStructure *structure;
const gchar *mimetype;
const GValue *value = NULL;
- const GstBuffer *codec_buf = NULL;
+ GstBuffer *codec_buf = NULL;
gint width, height, pixel_width, pixel_height;
gint fps_d, fps_n;
gboolean interlaced = FALSE;
/* extract codec_data, may turn out needed */
value = gst_structure_get_value (structure, "codec_data");
if (value)
- codec_buf = gst_value_get_buffer (value);
+ codec_buf = (GstBuffer *) gst_value_get_buffer (value);
/* find type */
- if (!strcmp (mimetype, "video/x-raw-yuv")) {
+ if (!strcmp (mimetype, "video/x-raw")) {
+ const gchar *fstr;
gst_matroska_mux_set_codec_id (context,
GST_MATROSKA_CODEC_ID_VIDEO_UNCOMPRESSED);
- gst_structure_get_fourcc (structure, "format", &videocontext->fourcc);
+ fstr = gst_structure_get_string (structure, "format");
+ if (fstr && strlen (fstr) == 4)
+ videocontext->fourcc = GST_STR_FOURCC (fstr);
+ } else if (!strcmp (mimetype, "image/jpeg")) {
+ gst_matroska_mux_set_codec_id (context, GST_MATROSKA_CODEC_ID_VIDEO_MJPEG);
} else if (!strcmp (mimetype, "video/x-xvid") /* MS/VfW compatibility cases */
||!strcmp (mimetype, "video/x-huffyuv")
|| !strcmp (mimetype, "video/x-divx")
}
} else if (!strcmp (mimetype, "video/x-wmv")) {
gint wmvversion;
- guint32 format;
- if (gst_structure_get_fourcc (structure, "format", &format)) {
- fourcc = format;
+ const gchar *fstr;
+
+ fstr = gst_structure_get_string (structure, "format");
+ if (fstr && strlen (fstr) == 4) {
+ fourcc = GST_STR_FOURCC (fstr);
} else if (gst_structure_get_int (structure, "wmvversion", &wmvversion)) {
if (wmvversion == 2) {
fourcc = GST_MAKE_FOURCC ('W', 'M', 'V', '2');
/* process codec private/initialization data, if any */
if (codec_buf) {
- size += GST_BUFFER_SIZE (codec_buf);
+ size += gst_buffer_get_size (codec_buf);
bih = g_realloc (bih, size);
GST_WRITE_UINT32_LE (&bih->size, size);
- memcpy ((guint8 *) bih + sizeof (gst_riff_strf_vids),
- GST_BUFFER_DATA (codec_buf), GST_BUFFER_SIZE (codec_buf));
+ gst_buffer_extract (codec_buf, 0,
+ (guint8 *) bih + sizeof (gst_riff_strf_vids), -1);
}
gst_matroska_mux_set_codec_id (context,
gst_matroska_mux_free_codec_priv (context);
/* Create avcC header */
if (codec_buf != NULL) {
- context->codec_priv_size = GST_BUFFER_SIZE (codec_buf);
+ context->codec_priv_size = gst_buffer_get_size (codec_buf);
context->codec_priv = g_malloc0 (context->codec_priv_size);
- memcpy (context->codec_priv, GST_BUFFER_DATA (codec_buf),
- context->codec_priv_size);
+ gst_buffer_extract (codec_buf, 0, context->codec_priv, -1);
}
} else if (!strcmp (mimetype, "video/x-theora")) {
const GValue *streamheader;
/* global headers may be in codec data */
if (codec_buf != NULL) {
gst_matroska_mux_free_codec_priv (context);
- context->codec_priv_size = GST_BUFFER_SIZE (codec_buf);
+ context->codec_priv_size = gst_buffer_get_size (codec_buf);
context->codec_priv = g_malloc0 (context->codec_priv_size);
- memcpy (context->codec_priv, GST_BUFFER_DATA (codec_buf),
- context->codec_priv_size);
+ gst_buffer_extract (codec_buf, 0, context->codec_priv, -1);
}
} else if (!strcmp (mimetype, "video/x-msmpeg")) {
msmpeg43:
GstBuffer *codec_data_buf = g_value_peek_pointer (mdpr_data);
- priv_data_size = GST_BUFFER_SIZE (codec_data_buf);
+ priv_data_size = gst_buffer_get_size (codec_data_buf);
priv_data = g_malloc0 (priv_data_size);
- memcpy (priv_data, GST_BUFFER_DATA (codec_data_buf), priv_data_size);
+ gst_buffer_extract (codec_data_buf, 0, priv_data, -1);
gst_matroska_mux_free_codec_priv (context);
context->codec_priv = priv_data;
priv_data_size = 1;
if (bufarr->len > 0) {
for (i = 0; i < bufarr->len - 1; i++) {
- priv_data_size += GST_BUFFER_SIZE (buf[i]) / 0xff + 1;
+ priv_data_size += gst_buffer_get_size (buf[i]) / 0xff + 1;
}
}
for (i = 0; i < bufarr->len; ++i) {
- priv_data_size += GST_BUFFER_SIZE (buf[i]);
+ priv_data_size += gst_buffer_get_size (buf[i]);
}
priv_data = g_malloc0 (priv_data_size);
if (bufarr->len > 0) {
for (bufi = 0; bufi < bufarr->len - 1; bufi++) {
- for (i = 0; i < GST_BUFFER_SIZE (buf[bufi]) / 0xff; ++i) {
+ for (i = 0; i < gst_buffer_get_size (buf[bufi]) / 0xff; ++i) {
priv_data[offset++] = 0xff;
}
- priv_data[offset++] = GST_BUFFER_SIZE (buf[bufi]) % 0xff;
+ priv_data[offset++] = gst_buffer_get_size (buf[bufi]) % 0xff;
}
}
for (i = 0; i < bufarr->len; ++i) {
- memcpy (priv_data + offset, GST_BUFFER_DATA (buf[i]),
- GST_BUFFER_SIZE (buf[i]));
- offset += GST_BUFFER_SIZE (buf[i]);
+ gst_buffer_extract (buf[i], 0, priv_data + offset, -1);
+ offset += gst_buffer_get_size (buf[i]);
}
gst_matroska_mux_free_codec_priv (context);
if (!xiphN_streamheader_to_codecdata (streamheader, context, &buf0, 3))
return FALSE;
- if (buf0 == NULL || GST_BUFFER_SIZE (buf0) < 1 + 6 + 4) {
+ if (buf0 == NULL || gst_buffer_get_size (buf0) < 1 + 6 + 4) {
GST_WARNING ("First vorbis header too small, ignoring");
} else {
- if (memcmp (GST_BUFFER_DATA (buf0) + 1, "vorbis", 6) == 0) {
+ if (gst_buffer_memcmp (buf0, 1, "vorbis", 6) == 0) {
GstMatroskaTrackAudioContext *audiocontext;
+ GstMapInfo map;
guint8 *hdr;
- hdr = GST_BUFFER_DATA (buf0) + 1 + 6 + 4;
+ gst_buffer_map (buf0, &map, GST_MAP_READ);
+ hdr = map.data + 1 + 6 + 4;
audiocontext = (GstMatroskaTrackAudioContext *) context;
audiocontext->channels = GST_READ_UINT8 (hdr);
audiocontext->samplerate = GST_READ_UINT32_LE (hdr + 1);
+ gst_buffer_unmap (buf0, &map);
}
}
if (!xiphN_streamheader_to_codecdata (streamheader, context, &buf0, 3))
return FALSE;
- if (buf0 == NULL || GST_BUFFER_SIZE (buf0) < 1 + 6 + 26) {
+ if (buf0 == NULL || gst_buffer_get_size (buf0) < 1 + 6 + 26) {
GST_WARNING ("First theora header too small, ignoring");
- } else if (memcmp (GST_BUFFER_DATA (buf0), "\200theora\003\002", 9) != 0) {
+ } else if (gst_buffer_memcmp (buf0, 0, "\200theora\003\002", 9) != 0) {
GST_WARNING ("First header not a theora identification header, ignoring");
} else {
GstMatroskaTrackVideoContext *videocontext;
guint fps_num, fps_denom, par_num, par_denom;
+ GstMapInfo map;
guint8 *hdr;
- hdr = GST_BUFFER_DATA (buf0) + 1 + 6 + 3 + 2 + 2;
+ gst_buffer_map (buf0, &map, GST_MAP_READ);
+ hdr = map.data + 1 + 6 + 3 + 2 + 2;
videocontext = (GstMatroskaTrackVideoContext *) context;
videocontext->pixel_width = GST_READ_UINT32_BE (hdr) >> 8;
videocontext->display_height = 0;
}
hdr += 3 + 3;
+
+ gst_buffer_unmap (buf0, &map);
}
if (buf0)
if (!xiphN_streamheader_to_codecdata (streamheader, context, &buf0, -1))
return FALSE;
- if (buf0 == NULL || GST_BUFFER_SIZE (buf0) < 64) { /* Kate ID header is 64 bytes */
+ if (buf0 == NULL || gst_buffer_get_size (buf0) < 64) { /* Kate ID header is 64 bytes */
GST_WARNING ("First kate header too small, ignoring");
- } else if (memcmp (GST_BUFFER_DATA (buf0), "\200kate\0\0\0", 8) != 0) {
+ } else if (gst_buffer_memcmp (buf0, 0, "\200kate\0\0\0", 8) != 0) {
GST_WARNING ("First header not a kate identification header, ignoring");
}
buffer = g_value_peek_pointer (bufval);
/* Need at least OggFLAC mapping header, fLaC marker and STREAMINFO block */
- if (GST_BUFFER_SIZE (buffer) < 9 + 4 + 4 + 34
- || memcmp (GST_BUFFER_DATA (buffer) + 1, "FLAC", 4) != 0
- || memcmp (GST_BUFFER_DATA (buffer) + 9, "fLaC", 4) != 0) {
+ if (gst_buffer_get_size (buffer) < 9 + 4 + 4 + 34
+ || gst_buffer_memcmp (buffer, 1, "FLAC", 4) != 0
+ || gst_buffer_memcmp (buffer, 9, "fLaC", 4) != 0) {
GST_WARNING ("Invalid streamheader for FLAC");
return FALSE;
}
gst_matroska_mux_free_codec_priv (context);
- context->codec_priv = g_malloc (GST_BUFFER_SIZE (buffer) - 9);
- context->codec_priv_size = GST_BUFFER_SIZE (buffer) - 9;
- memcpy (context->codec_priv, GST_BUFFER_DATA (buffer) + 9,
- GST_BUFFER_SIZE (buffer) - 9);
+ context->codec_priv_size = gst_buffer_get_size (buffer) - 9;
+ context->codec_priv = g_malloc (context->codec_priv_size);
+ gst_buffer_extract (buffer, 9, context->codec_priv, -1);
for (i = 1; i < bufarr->len; i++) {
+ guint old_size;
bufval = &g_array_index (bufarr, GValue, i);
if (G_VALUE_TYPE (bufval) != GST_TYPE_BUFFER) {
buffer = g_value_peek_pointer (bufval);
- context->codec_priv =
- g_realloc (context->codec_priv,
- context->codec_priv_size + GST_BUFFER_SIZE (buffer));
- memcpy ((guint8 *) context->codec_priv + context->codec_priv_size,
- GST_BUFFER_DATA (buffer), GST_BUFFER_SIZE (buffer));
- context->codec_priv_size =
- context->codec_priv_size + GST_BUFFER_SIZE (buffer);
+ old_size = context->codec_priv_size;
+ context->codec_priv_size += gst_buffer_get_size (buffer);
+
+ context->codec_priv = g_realloc (context->codec_priv,
+ context->codec_priv_size);
+ gst_buffer_extract (buffer, 0,
+ (guint8 *) context->codec_priv + old_size, -1);
}
return TRUE;
GArray *bufarr;
GValue *bufval;
GstBuffer *buffer;
+ guint old_size;
if (streamheader == NULL || G_VALUE_TYPE (streamheader) != GST_TYPE_ARRAY) {
GST_WARNING ("No or invalid streamheader field in the caps");
buffer = g_value_peek_pointer (bufval);
- if (GST_BUFFER_SIZE (buffer) < 80
- || memcmp (GST_BUFFER_DATA (buffer), "Speex ", 8) != 0) {
+ if (gst_buffer_get_size (buffer) < 80
+ || gst_buffer_memcmp (buffer, 0, "Speex ", 8) != 0) {
GST_WARNING ("Invalid streamheader for Speex");
return FALSE;
}
gst_matroska_mux_free_codec_priv (context);
- context->codec_priv = g_malloc (GST_BUFFER_SIZE (buffer));
- context->codec_priv_size = GST_BUFFER_SIZE (buffer);
- memcpy (context->codec_priv, GST_BUFFER_DATA (buffer),
- GST_BUFFER_SIZE (buffer));
+ context->codec_priv_size = gst_buffer_get_size (buffer);
+ context->codec_priv = g_malloc (context->codec_priv_size);
+ gst_buffer_extract (buffer, 0, context->codec_priv, -1);
bufval = &g_array_index (bufarr, GValue, 1);
buffer = g_value_peek_pointer (bufval);
- context->codec_priv =
- g_realloc (context->codec_priv,
- context->codec_priv_size + GST_BUFFER_SIZE (buffer));
- memcpy ((guint8 *) context->codec_priv + context->codec_priv_size,
- GST_BUFFER_DATA (buffer), GST_BUFFER_SIZE (buffer));
- context->codec_priv_size =
- context->codec_priv_size + GST_BUFFER_SIZE (buffer);
+ old_size = context->codec_priv_size;
+ context->codec_priv_size += gst_buffer_get_size (buffer);
+ context->codec_priv = g_realloc (context->codec_priv,
+ context->codec_priv_size);
+ gst_buffer_extract (buffer, 0, (guint8 *) context->codec_priv + old_size, -1);
return TRUE;
}
static const gchar *
-aac_codec_data_to_codec_id (const GstBuffer * buf)
+aac_codec_data_to_codec_id (GstBuffer * buf)
{
const gchar *result;
- gint profile;
+ guint8 profile;
/* default to MAIN */
profile = 1;
- if (GST_BUFFER_SIZE (buf) >= 2) {
- profile = GST_READ_UINT8 (GST_BUFFER_DATA (buf));
+ if (gst_buffer_get_size (buf) >= 2) {
+ gst_buffer_extract (buf, 0, &profile, 1);
profile >>= 3;
}
gint samplerate = 0, channels = 0;
GstStructure *structure;
const GValue *codec_data = NULL;
- const GstBuffer *buf = NULL;
+ GstBuffer *buf = NULL;
const gchar *stream_format = NULL;
mux = GST_MATROSKA_MUX (GST_PAD_PARENT (pad));
default:
goto refuse_caps;
}
- } else if (!strcmp (mimetype, "audio/x-raw-int")) {
- gint width, depth;
- gint endianness = G_LITTLE_ENDIAN;
- gboolean signedness = TRUE;
-
- if (!gst_structure_get_int (structure, "width", &width) ||
- !gst_structure_get_int (structure, "depth", &depth) ||
- !gst_structure_get_boolean (structure, "signed", &signedness)) {
- GST_DEBUG_OBJECT (mux, "broken caps, width/depth/signed field missing");
- goto refuse_caps;
- }
+ } else if (!strcmp (mimetype, "audio/x-raw")) {
+ GstAudioInfo info;
- if (depth > 8 &&
- !gst_structure_get_int (structure, "endianness", &endianness)) {
- GST_DEBUG_OBJECT (mux, "broken caps, no endianness specified");
+ gst_audio_info_init (&info);
+ if (!gst_audio_info_from_caps (&info, caps)) {
+ GST_DEBUG_OBJECT (mux,
+ "broken caps, rejected by gst_audio_info_from_caps");
goto refuse_caps;
}
- if (width != depth) {
- GST_DEBUG_OBJECT (mux, "width must be same as depth!");
- goto refuse_caps;
- }
-
- /* FIXME: where is this spec'ed out? (tpm) */
- if ((width == 8 && signedness) || (width >= 16 && !signedness)) {
- GST_DEBUG_OBJECT (mux, "8-bit PCM must be unsigned, 16-bit PCM signed");
- goto refuse_caps;
- }
-
- audiocontext->bitdepth = depth;
- if (endianness == G_BIG_ENDIAN)
- gst_matroska_mux_set_codec_id (context,
- GST_MATROSKA_CODEC_ID_AUDIO_PCM_INT_BE);
- else
- gst_matroska_mux_set_codec_id (context,
- GST_MATROSKA_CODEC_ID_AUDIO_PCM_INT_LE);
-
- } else if (!strcmp (mimetype, "audio/x-raw-float")) {
- gint width;
+ switch (GST_AUDIO_INFO_FORMAT (&info)) {
+ case GST_AUDIO_FORMAT_U8:
+ case GST_AUDIO_FORMAT_S16BE:
+ case GST_AUDIO_FORMAT_S16LE:
+ case GST_AUDIO_FORMAT_S24BE:
+ case GST_AUDIO_FORMAT_S24LE:
+ case GST_AUDIO_FORMAT_S32BE:
+ case GST_AUDIO_FORMAT_S32LE:
+ if (GST_AUDIO_INFO_WIDTH (&info) != GST_AUDIO_INFO_DEPTH (&info)) {
+ GST_DEBUG_OBJECT (mux, "width must be same as depth!");
+ goto refuse_caps;
+ }
+ if (GST_AUDIO_INFO_IS_BIG_ENDIAN (&info))
+ gst_matroska_mux_set_codec_id (context,
+ GST_MATROSKA_CODEC_ID_AUDIO_PCM_INT_BE);
+ else
+ gst_matroska_mux_set_codec_id (context,
+ GST_MATROSKA_CODEC_ID_AUDIO_PCM_INT_LE);
+ break;
+ case GST_AUDIO_FORMAT_F32LE:
+ case GST_AUDIO_FORMAT_F64LE:
+ gst_matroska_mux_set_codec_id (context,
+ GST_MATROSKA_CODEC_ID_AUDIO_PCM_FLOAT);
+ break;
- if (!gst_structure_get_int (structure, "width", &width)) {
- GST_DEBUG_OBJECT (mux, "broken caps, width field missing");
- goto refuse_caps;
+ default:
+ GST_DEBUG_OBJECT (mux, "wrong format in raw audio caps");
+ goto refuse_caps;
}
- audiocontext->bitdepth = width;
- gst_matroska_mux_set_codec_id (context,
- GST_MATROSKA_CODEC_ID_AUDIO_PCM_FLOAT);
-
+ audiocontext->bitdepth = GST_AUDIO_INFO_WIDTH (&info);
} else if (!strcmp (mimetype, "audio/x-vorbis")) {
const GValue *streamheader;
GstBuffer *codec_data_buf = g_value_peek_pointer (mdpr_data);
- priv_data_size = GST_BUFFER_SIZE (codec_data_buf);
+ priv_data_size = gst_buffer_get_size (codec_data_buf);
priv_data = g_malloc0 (priv_data_size);
- memcpy (priv_data, GST_BUFFER_DATA (codec_data_buf), priv_data_size);
+ gst_buffer_extract (codec_data_buf, 0, priv_data, -1);
gst_matroska_mux_free_codec_priv (context);
codec_priv_size = WAVEFORMATEX_SIZE;
if (buf)
- codec_priv_size += GST_BUFFER_SIZE (buf);
+ codec_priv_size += gst_buffer_get_size (buf);
/* serialize waveformatex structure */
codec_priv = g_malloc0 (codec_priv_size);
GST_WRITE_UINT16_LE (codec_priv + 12, block_align);
GST_WRITE_UINT16_LE (codec_priv + 14, 0);
if (buf)
- GST_WRITE_UINT16_LE (codec_priv + 16, GST_BUFFER_SIZE (buf));
+ GST_WRITE_UINT16_LE (codec_priv + 16, gst_buffer_get_size (buf));
else
GST_WRITE_UINT16_LE (codec_priv + 16, 0);
/* process codec private/initialization data, if any */
if (buf) {
- memcpy ((guint8 *) codec_priv + WAVEFORMATEX_SIZE,
- GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (buf));
+ gst_buffer_extract (buf, 0,
+ (guint8 *) codec_priv + WAVEFORMATEX_SIZE, -1);
}
gst_matroska_mux_set_codec_id (context, GST_MATROSKA_CODEC_ID_AUDIO_ACM);
const gchar *mimetype;
GstStructure *structure;
const GValue *value = NULL;
- const GstBuffer *buf = NULL;
+ GstBuffer *buf = NULL;
gboolean ret = TRUE;
mux = GST_MATROSKA_MUX (GST_PAD_PARENT (pad));
if (value)
buf = gst_value_get_buffer (value);
if (buf != NULL) {
+ GstMapInfo map;
guint8 *priv_data = NULL;
- guint priv_data_size = 0;
- priv_data_size = GST_BUFFER_SIZE (buf);
- if (priv_data_size > SUBTITLE_MAX_CODEC_PRIVATE) {
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+
+ if (map.size > SUBTITLE_MAX_CODEC_PRIVATE) {
GST_WARNING_OBJECT (mux, "pad %" GST_PTR_FORMAT " subtitle private data"
" exceeded maximum (%d); discarding", pad,
SUBTITLE_MAX_CODEC_PRIVATE);
+ gst_buffer_unmap (buf, &map);
return TRUE;
}
gst_matroska_mux_free_codec_priv (context);
- priv_data = g_malloc0 (priv_data_size);
- memcpy (priv_data, GST_BUFFER_DATA (buf), priv_data_size);
+ priv_data = g_malloc0 (map.size);
+ memcpy (priv_data, map.data, map.size);
context->codec_priv = priv_data;
- context->codec_priv_size = priv_data_size;
+ context->codec_priv_size = map.size;
+ gst_buffer_unmap (buf, &map);
}
- GST_DEBUG_OBJECT (pad, "codec_id %s, codec data size %u",
+ GST_DEBUG_OBJECT (pad, "codec_id %s, codec data size %" G_GSIZE_FORMAT,
GST_STR_NULL (context->codec_id), context->codec_priv_size);
exit:
*/
static GstPad *
gst_matroska_mux_request_new_pad (GstElement * element,
- GstPadTemplate * templ, const gchar * req_name)
+ GstPadTemplate * templ, const gchar * req_name, const GstCaps * caps)
{
GstElementClass *klass = GST_ELEMENT_GET_CLASS (element);
GstMatroskaMux *mux = GST_MATROSKA_MUX (element);
GstMatroskamuxPad *newpad;
gchar *name = NULL;
const gchar *pad_name = NULL;
- GstPadSetCapsFunction setcapsfunc = NULL;
+ GstMatroskaCapsFunc capsfunc = NULL;
GstMatroskaTrackContext *context = NULL;
gint pad_id;
gboolean locked = TRUE;
gchar *id = NULL;
- if (templ == gst_element_class_get_pad_template (klass, "audio_%d")) {
+ if (templ == gst_element_class_get_pad_template (klass, "audio_%u")) {
/* don't mix named and unnamed pads, if the pad already exists we fail when
* trying to add it */
- if (req_name != NULL && sscanf (req_name, "audio_%d", &pad_id) == 1) {
+ if (req_name != NULL && sscanf (req_name, "audio_%u", &pad_id) == 1) {
pad_name = req_name;
} else {
- name = g_strdup_printf ("audio_%d", mux->num_a_streams++);
+ name = g_strdup_printf ("audio_%u", mux->num_a_streams++);
pad_name = name;
}
- setcapsfunc = GST_DEBUG_FUNCPTR (gst_matroska_mux_audio_pad_setcaps);
+ capsfunc = GST_DEBUG_FUNCPTR (gst_matroska_mux_audio_pad_setcaps);
context = (GstMatroskaTrackContext *)
g_new0 (GstMatroskaTrackAudioContext, 1);
context->type = GST_MATROSKA_TRACK_TYPE_AUDIO;
context->name = g_strdup ("Audio");
- } else if (templ == gst_element_class_get_pad_template (klass, "video_%d")) {
+ } else if (templ == gst_element_class_get_pad_template (klass, "video_%u")) {
/* don't mix named and unnamed pads, if the pad already exists we fail when
* trying to add it */
- if (req_name != NULL && sscanf (req_name, "video_%d", &pad_id) == 1) {
+ if (req_name != NULL && sscanf (req_name, "video_%u", &pad_id) == 1) {
pad_name = req_name;
} else {
- name = g_strdup_printf ("video_%d", mux->num_v_streams++);
+ name = g_strdup_printf ("video_%u", mux->num_v_streams++);
pad_name = name;
}
- setcapsfunc = GST_DEBUG_FUNCPTR (gst_matroska_mux_video_pad_setcaps);
+ capsfunc = GST_DEBUG_FUNCPTR (gst_matroska_mux_video_pad_setcaps);
context = (GstMatroskaTrackContext *)
g_new0 (GstMatroskaTrackVideoContext, 1);
context->type = GST_MATROSKA_TRACK_TYPE_VIDEO;
context->name = g_strdup ("Video");
- } else if (templ == gst_element_class_get_pad_template (klass, "subtitle_%d")) {
+ } else if (templ == gst_element_class_get_pad_template (klass, "subtitle_%u")) {
/* don't mix named and unnamed pads, if the pad already exists we fail when
* trying to add it */
- if (req_name != NULL && sscanf (req_name, "subtitle_%d", &pad_id) == 1) {
+ if (req_name != NULL && sscanf (req_name, "subtitle_%u", &pad_id) == 1) {
pad_name = req_name;
} else {
- name = g_strdup_printf ("subtitle_%d", mux->num_t_streams++);
+ name = g_strdup_printf ("subtitle_%u", mux->num_t_streams++);
pad_name = name;
}
- setcapsfunc = GST_DEBUG_FUNCPTR (gst_matroska_mux_subtitle_pad_setcaps);
+ capsfunc = GST_DEBUG_FUNCPTR (gst_matroska_mux_subtitle_pad_setcaps);
context = (GstMatroskaTrackContext *)
g_new0 (GstMatroskaTrackSubtitleContext, 1);
context->type = GST_MATROSKA_TRACK_TYPE_SUBTITLE;
gst_matroska_pad_reset (collect_pad, FALSE);
collect_pad->track->codec_id = id;
- gst_pad_set_setcaps_function (GST_PAD (newpad), setcapsfunc);
+ collect_pad->capsfunc = capsfunc;
gst_pad_set_active (GST_PAD (newpad), TRUE);
if (!gst_element_add_pad (element, GST_PAD (newpad)))
goto pad_add_failed;
GTimeVal time = { 0, 0 };
if (!strcmp (mux->doctype, GST_MATROSKA_DOCTYPE_WEBM)) {
- ebml->caps = gst_caps_new_simple ("video/webm", NULL);
+ ebml->caps = gst_caps_new_empty_simple ("video/webm");
} else {
- ebml->caps = gst_caps_new_simple ("video/x-matroska", NULL);
+ ebml->caps = gst_caps_new_empty_simple ("video/x-matroska");
}
/* we start with a EBML header */
doctype = mux->doctype;
for (collected = mux->collect->data; collected;
collected = g_slist_next (collected)) {
GstMatroskaPad *collect_pad;
- GstFormat format = GST_FORMAT_TIME;
GstPad *thepad;
gint64 trackduration;
/* Query the total length of the track. */
GST_DEBUG_OBJECT (thepad, "querying peer duration");
- if (gst_pad_query_peer_duration (thepad, &format, &trackduration)) {
+ if (gst_pad_peer_query_duration (thepad, GST_FORMAT_TIME, &trackduration)) {
GST_DEBUG_OBJECT (thepad, "duration: %" GST_TIME_FORMAT,
GST_TIME_ARGS (trackduration));
if (trackduration != GST_CLOCK_TIME_NONE && trackduration > duration) {
gint16 relative_timestamp, int flags)
{
GstBuffer *hdr;
+ guint8 *data = g_malloc (4);
- hdr = gst_buffer_new_and_alloc (4);
+ hdr = gst_buffer_new_wrapped (data, 4);
/* track num - FIXME: what if num >= 0x80 (unlikely)? */
- GST_BUFFER_DATA (hdr)[0] = track->num | 0x80;
+ data[0] = track->num | 0x80;
/* time relative to clustertime */
- GST_WRITE_UINT16_BE (GST_BUFFER_DATA (hdr) + 1, relative_timestamp);
+ GST_WRITE_UINT16_BE (data + 1, relative_timestamp);
/* flags */
- GST_BUFFER_DATA (hdr)[3] = flags;
+ data[3] = flags;
return hdr;
}
{
GstMatroskaTrackVideoContext *ctx =
(GstMatroskaTrackVideoContext *) collect_pad->track;
- const guint8 *data = GST_BUFFER_DATA (buf);
- guint size = GST_BUFFER_SIZE (buf);
+ GstMapInfo map;
+ guint8 *data;
+ gsize size;
guint8 parse_code;
guint32 next_parse_offset;
GstBuffer *ret = NULL;
gboolean is_muxing_unit = FALSE;
- if (GST_BUFFER_SIZE (buf) < 13) {
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ data = map.data;
+ size = map.size;
+
+ if (size < 13) {
+ gst_buffer_unmap (buf, &map);
gst_buffer_unref (buf);
return ret;
}
/* Check if this buffer contains a picture or end-of-sequence packet */
while (size >= 13) {
if (GST_READ_UINT32_BE (data) != 0x42424344 /* 'BBCD' */ ) {
+ gst_buffer_unmap (buf, &map);
gst_buffer_unref (buf);
return ret;
}
else
ctx->dirac_unit = gst_buffer_ref (buf);
+ gst_buffer_unmap (buf, &map);
+
if (is_muxing_unit) {
- ret = gst_buffer_make_metadata_writable (ctx->dirac_unit);
+ ret = gst_buffer_make_writable (ctx->dirac_unit);
ctx->dirac_unit = NULL;
- gst_buffer_copy_metadata (ret, buf,
- GST_BUFFER_COPY_FLAGS | GST_BUFFER_COPY_TIMESTAMPS |
- GST_BUFFER_COPY_CAPS);
+ gst_buffer_copy_into (ret, buf,
+ GST_BUFFER_COPY_FLAGS | GST_BUFFER_COPY_TIMESTAMPS, 0, -1);
gst_buffer_unref (buf);
} else {
gst_buffer_unref (buf);
streamheader_buffer = gst_ebml_stop_streamheader (ebml);
if (!strcmp (mux->doctype, GST_MATROSKA_DOCTYPE_WEBM)) {
- caps = gst_caps_new_simple ("video/webm", NULL);
+ caps = gst_caps_new_empty_simple ("video/webm");
} else {
- caps = gst_caps_new_simple ("video/x-matroska", NULL);
+ caps = gst_caps_new_empty_simple ("video/x-matroska");
}
s = gst_caps_get_structure (caps, 0);
g_value_init (&streamheader, GST_TYPE_ARRAY);
g_value_init (&bufval, GST_TYPE_BUFFER);
- GST_BUFFER_FLAG_SET (streamheader_buffer, GST_BUFFER_FLAG_IN_CAPS);
+ GST_BUFFER_FLAG_SET (streamheader_buffer, GST_BUFFER_FLAG_HEADER);
gst_value_set_buffer (&bufval, streamheader_buffer);
gst_value_array_append_value (&streamheader, &bufval);
g_value_unset (&bufval);
relative_timestamp, flags);
gst_ebml_write_set_cache (ebml, 0x40);
gst_ebml_write_buffer_header (ebml, GST_MATROSKA_ID_SIMPLEBLOCK,
- GST_BUFFER_SIZE (buf) + GST_BUFFER_SIZE (hdr));
+ gst_buffer_get_size (buf) + gst_buffer_get_size (hdr));
gst_ebml_write_buffer (ebml, hdr);
gst_ebml_write_flush_cache (ebml, FALSE, GST_BUFFER_TIMESTAMP (buf));
gst_ebml_write_buffer (ebml, buf);
return gst_ebml_last_write_result (ebml);
} else {
- gst_ebml_write_set_cache (ebml, GST_BUFFER_SIZE (buf) * 2);
+ gst_ebml_write_set_cache (ebml, gst_buffer_get_size (buf) * 2);
/* write and call order slightly unnatural,
* but avoids seek and minizes pushing */
blockgroup = gst_ebml_write_master_start (ebml, GST_MATROSKA_ID_BLOCKGROUP);
if (write_duration)
gst_ebml_write_uint (ebml, GST_MATROSKA_ID_BLOCKDURATION, block_duration);
gst_ebml_write_buffer_header (ebml, GST_MATROSKA_ID_BLOCK,
- GST_BUFFER_SIZE (buf) + GST_BUFFER_SIZE (hdr));
+ gst_buffer_get_size (buf) + gst_buffer_get_size (hdr));
gst_ebml_write_buffer (ebml, hdr);
- gst_ebml_write_master_finish_full (ebml, blockgroup, GST_BUFFER_SIZE (buf));
+ gst_ebml_write_master_finish_full (ebml, blockgroup,
+ gst_buffer_get_size (buf));
gst_ebml_write_flush_cache (ebml, FALSE, GST_BUFFER_TIMESTAMP (buf));
gst_ebml_write_buffer (ebml, buf);
GST_DEBUG_OBJECT (mux, "... but streamable, nothing to finish");
}
gst_pad_push_event (mux->srcpad, gst_event_new_eos ());
- ret = GST_FLOW_UNEXPECTED;
+ ret = GST_FLOW_EOS;
goto exit;
}
guint64 pos;
} GstMatroskaMetaSeekIndex;
+typedef gboolean (*GstMatroskaCapsFunc) (GstPad *pad, GstCaps *caps);
+
/* all information needed for one matroska stream */
typedef struct
{
GstCollectData2 collect; /* we extend the CollectData */
+ GstMatroskaCapsFunc capsfunc;
GstMatroskaTrackContext *track;
guint64 duration;
static gboolean gst_matroska_parse_handle_seek_event (GstMatroskaParse * parse,
GstPad * pad, GstEvent * event);
static gboolean gst_matroska_parse_handle_src_event (GstPad * pad,
- GstEvent * event);
-static const GstQueryType *gst_matroska_parse_get_src_query_types (GstPad *
- pad);
+ GstObject * parent, GstEvent * event);
static gboolean gst_matroska_parse_handle_src_query (GstPad * pad,
- GstQuery * query);
+ GstObject * parent, GstQuery * query);
static gboolean gst_matroska_parse_handle_sink_event (GstPad * pad,
- GstEvent * event);
+ GstObject * parent, GstEvent * event);
static GstFlowReturn gst_matroska_parse_chain (GstPad * pad,
- GstBuffer * buffer);
+ GstObject * parent, GstBuffer * buffer);
static GstStateChangeReturn
gst_matroska_parse_change_state (GstElement * element,
GstStateChange transition);
+#if 0
static void
gst_matroska_parse_set_index (GstElement * element, GstIndex * index);
static GstIndex *gst_matroska_parse_get_index (GstElement * element);
+#endif
/* stream methods */
static void gst_matroska_parse_reset (GstElement * element);
guint64 offset);
GType gst_matroska_parse_get_type (void);
-GST_BOILERPLATE (GstMatroskaParse, gst_matroska_parse, GstElement,
- GST_TYPE_ELEMENT);
-
-static void
-gst_matroska_parse_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_static_pad_template (element_class, &src_templ);
- gst_element_class_add_static_pad_template (element_class, &sink_templ);
-
- gst_element_class_set_details_simple (element_class, "Matroska parser",
- "Codec/Parser",
- "Parses Matroska/WebM streams into video/audio/subtitles",
- "GStreamer maintainers <gstreamer-devel@lists.sourceforge.net>");
-}
+#define parent_class gst_matroska_parse_parent_class
+G_DEFINE_TYPE (GstMatroskaParse, gst_matroska_parse, GST_TYPE_ELEMENT);
static void
gst_matroska_parse_finalize (GObject * object)
gstelement_class->query =
GST_DEBUG_FUNCPTR (gst_matroska_parse_element_query);
+#if 0
gstelement_class->set_index =
GST_DEBUG_FUNCPTR (gst_matroska_parse_set_index);
gstelement_class->get_index =
GST_DEBUG_FUNCPTR (gst_matroska_parse_get_index);
+#endif
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&src_templ));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&sink_templ));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "Matroska parser", "Codec/Parser",
+ "Parses Matroska/WebM streams into video/audio/subtitles",
+ "GStreamer maintainers <gstreamer-devel@lists.sourceforge.net>");
}
static void
-gst_matroska_parse_init (GstMatroskaParse * parse,
- GstMatroskaParseClass * klass)
+gst_matroska_parse_init (GstMatroskaParse * parse)
{
parse->common.sinkpad = gst_pad_new_from_static_template (&sink_templ,
"sink");
parse->srcpad = gst_pad_new_from_static_template (&src_templ, "src");
gst_pad_set_event_function (parse->srcpad,
GST_DEBUG_FUNCPTR (gst_matroska_parse_handle_src_event));
- gst_pad_set_query_type_function (parse->srcpad,
- GST_DEBUG_FUNCPTR (gst_matroska_parse_get_src_query_types));
gst_pad_set_query_function (parse->srcpad,
GST_DEBUG_FUNCPTR (gst_matroska_parse_handle_src_query));
gst_pad_use_fixed_caps (parse->srcpad);
parse->common.adapter = gst_adapter_new ();
+ GST_OBJECT_FLAG_SET (parse, GST_ELEMENT_FLAG_INDEXABLE);
+
/* finish off */
gst_matroska_parse_reset (GST_ELEMENT (parse));
}
gst_event_unref (parse->new_segment);
parse->new_segment = NULL;
}
-
+#if 0
if (parse->common.element_index) {
gst_object_unref (parse->common.element_index);
parse->common.element_index = NULL;
}
parse->common.element_index_writer_id = -1;
+#endif
if (parse->common.global_tags) {
gst_tag_list_free (parse->common.global_tags);
}
- parse->common.global_tags = gst_tag_list_new ();
+ parse->common.global_tags = gst_tag_list_new_empty ();
if (parse->common.cached_buffer) {
gst_buffer_unref (parse->common.cached_buffer);
}
if (context->type == 0 || context->codec_id == NULL || (ret != GST_FLOW_OK
- && ret != GST_FLOW_UNEXPECTED)) {
- if (ret == GST_FLOW_OK || ret == GST_FLOW_UNEXPECTED)
+ && ret != GST_FLOW_EOS)) {
+ if (ret == GST_FLOW_OK || ret == GST_FLOW_EOS)
GST_WARNING_OBJECT (ebml, "Unknown stream/codec in track entry header");
parse->common.num_streams--;
return ret;
}
-static const GstQueryType *
-gst_matroska_parse_get_src_query_types (GstPad * pad)
-{
- static const GstQueryType query_types[] = {
- GST_QUERY_POSITION,
- GST_QUERY_DURATION,
- GST_QUERY_SEEKING,
- 0
- };
-
- return query_types;
-}
-
static gboolean
gst_matroska_parse_query (GstMatroskaParse * parse, GstPad * pad,
GstQuery * query)
gst_query_set_position (query, GST_FORMAT_TIME, context->pos);
else
gst_query_set_position (query, GST_FORMAT_TIME,
- parse->common.segment.last_stop);
+ parse->common.segment.position);
GST_OBJECT_UNLOCK (parse);
} else if (format == GST_FORMAT_DEFAULT && context
&& context->default_duration) {
break;
}
default:
- res = gst_pad_query_default (pad, query);
+ res = gst_pad_query_default (pad, (GstObject *) parse, query);
break;
}
}
static gboolean
-gst_matroska_parse_handle_src_query (GstPad * pad, GstQuery * query)
+gst_matroska_parse_handle_src_query (GstPad * pad, GstObject * parent,
+ GstQuery * query)
{
gboolean ret;
- GstMatroskaParse *parse = GST_MATROSKA_PARSE (gst_pad_get_parent (pad));
+ GstMatroskaParse *parse = GST_MATROSKA_PARSE (parent);
ret = gst_matroska_parse_query (parse, pad, query);
- gst_object_unref (parse);
-
return ret;
}
GstFlowReturn ret = GST_FLOW_OK;
const guint chunk = 64 * 1024;
GstBuffer *buf = NULL;
+ GstMapInfo map;
+ gpointer data;
+ gsize size;
guint64 length;
guint32 id;
guint needed;
ret = gst_pad_pull_range (parse->common.sinkpad, newpos, chunk, &buf);
if (ret != GST_FLOW_OK)
break;
- GST_DEBUG_OBJECT (parse, "read buffer size %d at offset %" G_GINT64_FORMAT,
- GST_BUFFER_SIZE (buf), newpos);
- gst_byte_reader_init_from_buffer (&reader, buf);
+ GST_DEBUG_OBJECT (parse,
+ "read buffer size %" G_GSIZE_FORMAT " at offset %" G_GINT64_FORMAT,
+ gst_buffer_get_size (buf), newpos);
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ data = map.data;
+ size = map.size;
+ gst_byte_reader_init (&reader, data, size);
cluster_pos = 0;
resume:
cluster_pos = gst_byte_reader_masked_scan_uint32 (&reader, 0xffffffff,
- GST_MATROSKA_ID_CLUSTER, cluster_pos,
- GST_BUFFER_SIZE (buf) - cluster_pos);
+ GST_MATROSKA_ID_CLUSTER, cluster_pos, size - cluster_pos);
if (cluster_pos >= 0) {
newpos += cluster_pos;
GST_DEBUG_OBJECT (parse,
goto resume;
} else {
/* partial cluster id may have been in tail of buffer */
- newpos += MAX (GST_BUFFER_SIZE (buf), 4) - 3;
+ newpos += MAX (size, 4) - 3;
+ gst_buffer_unmap (buf, &map);
gst_buffer_unref (buf);
buf = NULL;
}
}
if (buf) {
+ gst_buffer_unmap (buf, &map);
gst_buffer_unref (buf);
buf = NULL;
}
if (event) {
GST_DEBUG_OBJECT (parse, "configuring seek");
- gst_segment_set_seek (&seeksegment, rate, format, flags,
+ gst_segment_do_seek (&seeksegment, rate, format, flags,
cur_type, cur, stop_type, stop, &update);
}
/* check sanity before we start flushing and all that */
GST_OBJECT_LOCK (parse);
if ((entry = gst_matroska_read_common_do_index_seek (&parse->common, track,
- seeksegment.last_stop, &parse->seek_index, &parse->seek_entry)) ==
+ seeksegment.position, &parse->seek_index, &parse->seek_entry)) ==
NULL) {
/* pull mode without index can scan later on */
GST_DEBUG_OBJECT (parse, "No matching seek entry in index");
}
static gboolean
-gst_matroska_parse_handle_src_event (GstPad * pad, GstEvent * event)
+gst_matroska_parse_handle_src_event (GstPad * pad, GstObject * parent,
+ GstEvent * event)
{
- GstMatroskaParse *parse = GST_MATROSKA_PARSE (gst_pad_get_parent (pad));
+ GstMatroskaParse *parse = GST_MATROSKA_PARSE (parent);
gboolean res = TRUE;
switch (GST_EVENT_TYPE (event)) {
GstClockTimeDiff diff;
GstClockTime timestamp;
- gst_event_parse_qos (event, &proportion, &diff, ×tamp);
+ gst_event_parse_qos (event, NULL, &proportion, &diff, ×tamp);
GST_OBJECT_LOCK (parse);
videocontext->earliest_time = timestamp + diff;
break;
}
- gst_object_unref (parse);
-
return res;
}
guint32 id;
guint64 block_duration = 0;
GstBuffer *buf = NULL;
+ GstMapInfo map;
gint stream_num = -1, n, laces = 0;
guint size = 0;
gint *lace_size = NULL;
if ((ret = gst_ebml_read_buffer (ebml, &id, &buf)) != GST_FLOW_OK)
break;
- data = GST_BUFFER_DATA (buf);
- size = GST_BUFFER_SIZE (buf);
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ data = map.data;
+ size = map.size;
/* first byte(s): blocknum */
if ((n = gst_matroska_ebmlnum_uint (data, size, &num)) < 0)
}
/* need to refresh segment info ASAP */
if (GST_CLOCK_TIME_IS_VALID (lace_time) && parse->need_newsegment) {
+ GstSegment segment;
GST_DEBUG_OBJECT (parse,
"generating segment starting at %" GST_TIME_FORMAT,
GST_TIME_ARGS (lace_time));
/* pretend we seeked here */
- gst_segment_set_seek (&parse->common.segment, parse->common.segment.rate,
+ gst_segment_do_seek (&parse->common.segment, parse->common.segment.rate,
GST_FORMAT_TIME, 0, GST_SEEK_TYPE_SET, lace_time,
GST_SEEK_TYPE_SET, GST_CLOCK_TIME_NONE, NULL);
/* now convey our segment notion downstream */
- gst_matroska_parse_send_event (parse, gst_event_new_new_segment (FALSE,
- parse->common.segment.rate, parse->common.segment.format,
- parse->common.segment.start, parse->common.segment.stop,
- parse->common.segment.start));
+ segment = parse->common.segment;
+ segment.position = segment.start;
+ gst_matroska_parse_send_event (parse, gst_event_new_segment (&segment));
parse->need_newsegment = FALSE;
}
ret = gst_pad_push (stream->pad, sub);
if (parse->segment.rate < 0) {
- if (lace_time > parse->segment.stop && ret == GST_FLOW_UNEXPECTED) {
- /* In reverse playback we can get a GST_FLOW_UNEXPECTED when
+ if (lace_time > parse->segment.stop && ret == GST_FLOW_EOS) {
+ /* In reverse playback we can get a GST_FLOW_EOS when
* we are at the end of the segment, so we just need to jump
* back to the previous section. */
GST_DEBUG_OBJECT (parse, "downstream has reached end of segment");
}
done:
- if (buf)
+ if (buf) {
+ gst_buffer_unmap (buf, &map);
gst_buffer_unref (buf);
+ }
g_free (lace_size);
return ret;
}
}
- if (ret != GST_FLOW_OK && ret != GST_FLOW_UNEXPECTED)
+ if (ret != GST_FLOW_OK && ret != GST_FLOW_EOS)
return ret;
if (!seek_id || seek_pos == (guint64) - 1) {
}
/* initializes @ebml with @bytes from input stream at current offset.
- * Returns UNEXPECTED if insufficient available,
+ * Returns EOS if insufficient available,
* ERROR if too much was attempted to read. */
static inline GstFlowReturn
gst_matroska_parse_take (GstMatroskaParse * parse, guint64 bytes,
if (gst_adapter_available (parse->common.adapter) >= bytes)
buffer = gst_adapter_take_buffer (parse->common.adapter, bytes);
else
- ret = GST_FLOW_UNEXPECTED;
+ ret = GST_FLOW_EOS;
if (G_LIKELY (buffer)) {
gst_ebml_read_init (ebml, GST_ELEMENT_CAST (parse), buffer,
parse->common.offset);
/* try harder to query upstream size if we didn't get it the first time */
if (seekable && stop == -1) {
- GstFormat fmt = GST_FORMAT_BYTES;
-
GST_DEBUG_OBJECT (parse, "doing duration query to fix up unset stop");
- gst_pad_query_peer_duration (parse->common.sinkpad, &fmt, &stop);
+ gst_pad_peer_query_duration (parse->common.sinkpad, GST_FORMAT_BYTES,
+ &stop);
}
/* if upstream doesn't know the size, it's likely that it's not seekable in
GstBuffer *buf;
buf = gst_buffer_span (parse->streamheader, 0, buffer,
- GST_BUFFER_SIZE (parse->streamheader) + GST_BUFFER_SIZE (buffer));
+ gst_buffer_get_size (parse->streamheader) +
+ gst_buffer_get_size (buffer));
gst_buffer_unref (parse->streamheader);
parse->streamheader = buf;
} else {
parse->streamheader = gst_buffer_ref (buffer);
}
- GST_DEBUG ("%d", GST_BUFFER_SIZE (parse->streamheader));
+ GST_DEBUG ("%" G_GSIZE_FORMAT, gst_buffer_get_size (parse->streamheader));
}
static GstFlowReturn
GValue bufval = { 0 };
GstBuffer *buf;
- caps = gst_caps_new_simple ("video/x-matroska", NULL);
+ caps = gst_caps_new_empty_simple ("video/x-matroska");
s = gst_caps_get_structure (caps, 0);
g_value_init (&streamheader, GST_TYPE_ARRAY);
g_value_init (&bufval, GST_TYPE_BUFFER);
buf = gst_buffer_copy (parse->streamheader);
- GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_IN_CAPS);
+ GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_HEADER);
gst_value_set_buffer (&bufval, buf);
gst_buffer_unref (buf);
gst_value_array_append_value (&streamheader, &bufval);
gst_pad_set_caps (parse->srcpad, caps);
buf = gst_buffer_copy (parse->streamheader);
- gst_buffer_set_caps (buf, caps);
gst_caps_unref (caps);
GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DISCONT);
- GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_IN_CAPS);
+ GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_HEADER);
GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
ret = gst_pad_push (parse->srcpad, buf);
} else {
GST_BUFFER_TIMESTAMP (buffer) = parse->last_timestamp;
}
- gst_buffer_set_caps (buffer, GST_PAD_CAPS (parse->srcpad));
ret = gst_pad_push (parse->srcpad, gst_buffer_ref (buffer));
return ret;
buffer = gst_adapter_take_buffer (parse->adapter, length + needed);
gst_pad_push (parse->srcpad, buffer);
} else {
- ret = GST_FLOW_UNEXPECTED;
+ ret = GST_FLOW_EOS;
}
//GST_READ_CHECK (gst_matroska_parse_take (parse, read, &ebml));
goto parse_failed;
GST_DEBUG_OBJECT (parse, "ClusterTimeCode: %" G_GUINT64_FORMAT, num);
parse->cluster_time = num;
+#if 0
if (parse->common.element_index) {
if (parse->common.element_index_writer_id == -1)
gst_index_get_writer_id (parse->common.element_index,
GST_FORMAT_TIME, parse->cluster_time,
GST_FORMAT_BYTES, parse->cluster_offset, NULL);
}
+#endif
gst_matroska_parse_output (parse, ebml.buf, FALSE);
break;
}
ret = gst_matroska_read_common_peek_id_length_pull (&parse->common,
GST_ELEMENT_CAST (parse), &id, &length, &needed);
- if (ret == GST_FLOW_UNEXPECTED)
+ if (ret == GST_FLOW_EOS)
goto eos;
if (ret != GST_FLOW_OK) {
if (gst_matroska_parse_check_parse_error (parse))
length, needed);
ret = gst_matroska_parse_parse_id (parse, id, length, needed);
- if (ret == GST_FLOW_UNEXPECTED)
+ if (ret == GST_FLOW_EOS)
goto eos;
if (ret != GST_FLOW_OK)
goto pause;
}
GST_INFO_OBJECT (parse, "All streams are EOS");
- ret = GST_FLOW_UNEXPECTED;
+ ret = GST_FLOW_EOS;
goto eos;
}
if (G_UNLIKELY (parse->offset ==
gst_matroska_read_common_get_length (&parse->common))) {
GST_LOG_OBJECT (parse, "Reached end of stream");
- ret = GST_FLOW_UNEXPECTED;
+ ret = GST_FLOW_EOS;
goto eos;
}
parse->segment_running = FALSE;
gst_pad_pause_task (parse->common.sinkpad);
- if (ret == GST_FLOW_UNEXPECTED) {
+ if (ret == GST_FLOW_EOS) {
/* perform EOS logic */
/* Close the segment, i.e. update segment stop with the duration
} else {
push_eos = TRUE;
}
- } else if (ret == GST_FLOW_NOT_LINKED || ret < GST_FLOW_UNEXPECTED) {
+ } else if (ret == GST_FLOW_NOT_LINKED || ret < GST_FLOW_EOS) {
/* for fatal errors we post an error message */
GST_ELEMENT_ERROR (parse, STREAM, FAILED, (NULL),
("stream stopped, reason %s", reason));
/* send EOS, and prevent hanging if no streams yet */
GST_LOG_OBJECT (parse, "Sending EOS, at end of stream");
if (!gst_matroska_parse_send_event (parse, gst_event_new_eos ()) &&
- (ret == GST_FLOW_UNEXPECTED)) {
+ (ret == GST_FLOW_EOS)) {
GST_ELEMENT_ERROR (parse, STREAM, DEMUX,
(NULL), ("got eos but no streams (yet)"));
}
}
static GstFlowReturn
-gst_matroska_parse_chain (GstPad * pad, GstBuffer * buffer)
+gst_matroska_parse_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer)
{
- GstMatroskaParse *parse = GST_MATROSKA_PARSE (GST_PAD_PARENT (pad));
+ GstMatroskaParse *parse = GST_MATROSKA_PARSE (parent);
guint available;
GstFlowReturn ret = GST_FLOW_OK;
guint needed = 0;
ret = gst_matroska_read_common_peek_id_length_push (&parse->common,
GST_ELEMENT_CAST (parse), &id, &length, &needed);
- if (G_UNLIKELY (ret != GST_FLOW_OK && ret != GST_FLOW_UNEXPECTED))
+ if (G_UNLIKELY (ret != GST_FLOW_OK && ret != GST_FLOW_EOS))
return ret;
GST_LOG_OBJECT (parse, "Offset %" G_GUINT64_FORMAT ", Element id 0x%x, "
return GST_FLOW_OK;
ret = gst_matroska_parse_parse_id (parse, id, length, needed);
- if (ret == GST_FLOW_UNEXPECTED) {
+ if (ret == GST_FLOW_EOS) {
/* need more data */
return GST_FLOW_OK;
} else if (ret != GST_FLOW_OK) {
}
static gboolean
-gst_matroska_parse_handle_sink_event (GstPad * pad, GstEvent * event)
+gst_matroska_parse_handle_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event)
{
gboolean res = TRUE;
GstMatroskaParse *parse = GST_MATROSKA_PARSE (GST_PAD_PARENT (pad));
"have event type %s: %p on sink pad", GST_EVENT_TYPE_NAME (event), event);
switch (GST_EVENT_TYPE (event)) {
- case GST_EVENT_NEWSEGMENT:
+ case GST_EVENT_SEGMENT:
{
- GstFormat format;
- gdouble rate, arate;
- gint64 start, stop, time = 0;
- gboolean update;
- GstSegment segment;
+ const GstSegment *segment;
/* some debug output */
- gst_segment_init (&segment, GST_FORMAT_UNDEFINED);
- gst_event_parse_new_segment_full (event, &update, &rate, &arate, &format,
- &start, &stop, &time);
- gst_segment_set_newsegment_full (&segment, update, rate, arate, format,
- start, stop, time);
+ gst_event_parse_segment (event, &segment);
GST_DEBUG_OBJECT (parse,
- "received format %d newsegment %" GST_SEGMENT_FORMAT, format,
- &segment);
+ "received format %d newsegment %" GST_SEGMENT_FORMAT,
+ segment->format, segment);
if (parse->common.state < GST_MATROSKA_READ_STATE_DATA) {
GST_DEBUG_OBJECT (parse, "still starting");
}
/* we only expect a BYTE segment, e.g. following a seek */
- if (format != GST_FORMAT_BYTES) {
+ if (segment->format != GST_FORMAT_BYTES) {
GST_DEBUG_OBJECT (parse, "unsupported segment format, ignoring");
goto exit;
}
/* clear current segment leftover */
gst_adapter_clear (parse->common.adapter);
/* and some streaming setup */
- parse->common.offset = start;
+ parse->common.offset = segment->start;
/* do not know where we are;
* need to come across a cluster and generate newsegment */
- parse->common.segment.last_stop = GST_CLOCK_TIME_NONE;
+ parse->common.segment.position = GST_CLOCK_TIME_NONE;
parse->cluster_time = GST_CLOCK_TIME_NONE;
parse->cluster_offset = 0;
parse->need_newsegment = TRUE;
/* but keep some of the upstream segment */
- parse->common.segment.rate = rate;
+ parse->common.segment.rate = segment->rate;
exit:
/* chain will send initial newsegment after pads have been added,
* or otherwise come up with one */
gst_matroska_read_common_reset_streams (&parse->common,
GST_CLOCK_TIME_NONE, TRUE);
GST_OBJECT_UNLOCK (parse);
- parse->common.segment.last_stop = GST_CLOCK_TIME_NONE;
+ parse->common.segment.position = GST_CLOCK_TIME_NONE;
parse->cluster_time = GST_CLOCK_TIME_NONE;
parse->cluster_offset = 0;
/* fall-through */
}
default:
- res = gst_pad_event_default (pad, event);
+ res = gst_pad_event_default (pad, parent, event);
break;
}
return res;
}
+#if 0
static void
gst_matroska_parse_set_index (GstElement * element, GstIndex * index)
{
return result;
}
+#endif
static GstStateChangeReturn
gst_matroska_parse_change_state (GstElement * element,
static gboolean
gst_matroska_decompress_data (GstMatroskaTrackEncoding * enc,
- guint8 ** data_out, guint * size_out,
+ gpointer * data_out, gsize * size_out,
GstMatroskaTrackCompressionAlgorithm algo)
{
guint8 *new_data = NULL;
for (i = 0; i < encodings->len; i++) {
GstMatroskaTrackEncoding *enc =
&g_array_index (encodings, GstMatroskaTrackEncoding, i);
- guint8 *data = NULL;
- guint size;
+ gpointer data = NULL;
+ gsize size;
if ((enc->scope & GST_MATROSKA_TRACK_ENCODING_SCOPE_NEXT_CONTENT_ENCODING)
== 0)
}
gboolean
-gst_matroska_decode_data (GArray * encodings, guint8 ** data_out,
- guint * size_out, GstMatroskaTrackEncodingScope scope, gboolean free)
+gst_matroska_decode_data (GArray * encodings, gpointer * data_out,
+ gsize * size_out, GstMatroskaTrackEncodingScope scope, gboolean free)
{
- guint8 *data;
- guint size;
+ gpointer data;
+ gsize size;
gboolean ret = TRUE;
gint i;
for (i = 0; i < encodings->len; i++) {
GstMatroskaTrackEncoding *enc =
&g_array_index (encodings, GstMatroskaTrackEncoding, i);
- guint8 *new_data = NULL;
- guint new_size = 0;
+ gpointer new_data = NULL;
+ gsize new_size = 0;
if ((enc->scope & scope) == 0)
continue;
gst_tag_list_insert (common->global_tags, taglist, GST_TAG_MERGE_APPEND);
gst_tag_list_free (taglist);
} else {
+ GstEvent *tag_event = gst_event_new_tag (taglist);
+ gint i;
+
/* hm, already sent, no need to cache and wait anymore */
GST_DEBUG_OBJECT (common, "Sending late global tags %" GST_PTR_FORMAT,
taglist);
- gst_element_found_tags (el, taglist);
+
+ for (i = 0; i < common->src->len; i++) {
+ GstMatroskaTrackContext *stream;
+
+ stream = g_ptr_array_index (common->src, i);
+ gst_pad_push_event (stream->pad, gst_event_ref (tag_event));
+ }
+
+ gst_event_unref (tag_event);
}
}
gint64
gst_matroska_read_common_get_length (GstMatroskaReadCommon * common)
{
- GstFormat fmt = GST_FORMAT_BYTES;
gint64 end = -1;
- if (!gst_pad_query_peer_duration (common->sinkpad, &fmt, &end) ||
- fmt != GST_FORMAT_BYTES || end < 0)
+ if (!gst_pad_peer_query_duration (common->sinkpad, GST_FORMAT_BYTES,
+ &end) || end < 0)
GST_DEBUG_OBJECT (common, "no upstream length");
return end;
if (filename && mimetype && data && datalen > 0) {
GstTagImageType image_type = GST_TAG_IMAGE_TYPE_NONE;
GstBuffer *tagbuffer = NULL;
- GstCaps *caps;
+ GstSample *tagsample = NULL;
+ GstStructure *info = NULL;
+ GstCaps *caps = NULL;
gchar *filename_lc = g_utf8_strdown (filename, -1);
GST_DEBUG_OBJECT (common, "Creating tag for attachment with "
/* First try to create an image tag buffer from this */
if (image_type != GST_TAG_IMAGE_TYPE_NONE) {
- tagbuffer =
- gst_tag_image_data_to_image_buffer (data, datalen, image_type);
+ tagsample =
+ gst_tag_image_data_to_image_sample (data, datalen, image_type);
- if (!tagbuffer)
+ if (!tagsample)
image_type = GST_TAG_IMAGE_TYPE_NONE;
+ else {
+ data = NULL;
+ tagbuffer = gst_buffer_ref (gst_sample_get_buffer (tagsample));
+ caps = gst_caps_ref (gst_sample_get_caps (tagsample));
+ info = gst_structure_copy (gst_sample_get_info (tagsample));
+ gst_sample_unref (tagsample);
+ }
}
/* if this failed create an attachment buffer */
if (!tagbuffer) {
- tagbuffer = gst_buffer_new_and_alloc (datalen);
-
- memcpy (GST_BUFFER_DATA (tagbuffer), data, datalen);
- GST_BUFFER_SIZE (tagbuffer) = datalen;
+ tagbuffer = gst_buffer_new_wrapped (g_memdup (data, datalen), datalen);
caps = gst_type_find_helper_for_buffer (NULL, tagbuffer, NULL);
if (caps == NULL)
- caps = gst_caps_new_simple (mimetype, NULL);
- gst_buffer_set_caps (tagbuffer, caps);
- gst_caps_unref (caps);
+ caps = gst_caps_new_empty_simple (mimetype);
}
- /* Set filename and description on the caps */
- caps = GST_BUFFER_CAPS (tagbuffer);
- gst_caps_set_simple (caps, "filename", G_TYPE_STRING, filename, NULL);
+ /* Set filename and description in the info */
+ if (info == NULL)
+ info = gst_structure_new_empty ("GstTagImageInfo");
+
+ gst_structure_set (info, "filename", G_TYPE_STRING, filename, NULL);
if (description)
- gst_caps_set_simple (caps, "description", G_TYPE_STRING, description,
- NULL);
+ gst_structure_set (info, "description", G_TYPE_STRING, description, NULL);
+
+ tagsample = gst_sample_new (tagbuffer, caps, NULL, info);
GST_DEBUG_OBJECT (common,
- "Created attachment buffer with caps: %" GST_PTR_FORMAT, caps);
+ "Created attachment sample: %" GST_PTR_FORMAT, tagsample);
/* and append to the tag list */
if (image_type != GST_TAG_IMAGE_TYPE_NONE)
- gst_tag_list_add (taglist, GST_TAG_MERGE_APPEND, GST_TAG_IMAGE, tagbuffer,
+ gst_tag_list_add (taglist, GST_TAG_MERGE_APPEND, GST_TAG_IMAGE, tagsample,
NULL);
else
gst_tag_list_add (taglist, GST_TAG_MERGE_APPEND, GST_TAG_ATTACHMENT,
- tagbuffer, NULL);
+ tagsample, NULL);
- /* the tag list adds it own ref */
- gst_buffer_unref (tagbuffer);
+ /* the list adds it own ref */
+ gst_sample_unref (tagsample);
}
g_free (filename);
return ret;
}
- taglist = gst_tag_list_new ();
+ taglist = gst_tag_list_new_empty ();
while (ret == GST_FLOW_OK && gst_ebml_read_has_remaining (ebml, 1, TRUE)) {
if ((ret = gst_ebml_peek_id (ebml, &id)) != GST_FLOW_OK)
}
}
- if ((ret == GST_FLOW_OK || ret == GST_FLOW_UNEXPECTED)
+ if ((ret == GST_FLOW_OK || ret == GST_FLOW_EOS)
&& idx.pos != (guint64) - 1 && idx.track > 0) {
g_array_append_val (common->index, idx);
(*nentries)++;
- } else if (ret == GST_FLOW_OK || ret == GST_FLOW_UNEXPECTED) {
+ } else if (ret == GST_FLOW_OK || ret == GST_FLOW_EOS) {
GST_DEBUG_OBJECT (common, "CueTrackPositions without valid content");
}
gint track_num;
GstMatroskaTrackContext *ctx;
+#if 0
if (common->element_index) {
gint writer_id;
GST_ASSOCIATION_FLAG_KEY_UNIT, GST_FORMAT_TIME, idx->time,
GST_FORMAT_BYTES, idx->pos + common->ebml_segment_start, NULL);
}
+#endif
if (idx->track == 0)
continue;
break;
GST_DEBUG_OBJECT (common, "Title: %s", GST_STR_NULL (text));
- taglist = gst_tag_list_new ();
- gst_tag_list_add (taglist, GST_TAG_MERGE_APPEND, GST_TAG_TITLE, text,
- NULL);
+ taglist = gst_tag_list_new (GST_TAG_TITLE, text, NULL);
gst_matroska_read_common_found_global_tag (common, el, taglist);
g_free (text);
break;
dur_u = gst_gdouble_to_guint64 (dur_f *
gst_guint64_to_gdouble (common->time_scale));
if (GST_CLOCK_TIME_IS_VALID (dur_u) && dur_u <= G_MAXINT64)
- gst_segment_set_duration (&common->segment, GST_FORMAT_TIME, dur_u);
+ common->segment.duration = dur_u;
}
DEBUG_ELEMENT_STOP (common, ebml, "SegmentInfo", ret);
GType dest_type = gst_tag_get_type (tagname_gst);
/* Ensure that any date string is complete */
- if (dest_type == GST_TYPE_DATE) {
+ if (dest_type == G_TYPE_DATE) {
guint year = 1901, month = 1, day = 1;
/* Dates can be yyyy-MM-dd, yyyy-MM or yyyy, but we need
return ret;
}
- taglist = gst_tag_list_new ();
+ taglist = gst_tag_list_new_empty ();
while (ret == GST_FLOW_OK && gst_ebml_read_has_remaining (ebml, 1, TRUE)) {
if ((ret = gst_ebml_peek_id (ebml, &id)) != GST_FLOW_OK)
gst_matroska_read_common_peek_adapter (GstMatroskaReadCommon * common, guint
peek, const guint8 ** data)
{
- *data = gst_adapter_peek (common->adapter, peek);
+ /* Caller needs to gst_adapter_unmap. */
+ *data = gst_adapter_map (common->adapter, peek);
if (*data == NULL)
- return GST_FLOW_UNEXPECTED;
+ return GST_FLOW_EOS;
return GST_FLOW_OK;
}
* We do it mainly to avoid pulling buffers of 1 byte all the time */
if (common->cached_buffer) {
guint64 cache_offset = GST_BUFFER_OFFSET (common->cached_buffer);
- guint cache_size = GST_BUFFER_SIZE (common->cached_buffer);
+ gsize cache_size = gst_buffer_get_size (common->cached_buffer);
if (cache_offset <= common->offset &&
(common->offset + size) <= (cache_offset + cache_size)) {
if (p_buf)
- *p_buf = gst_buffer_create_sub (common->cached_buffer,
- common->offset - cache_offset, size);
- if (bytes)
- *bytes = GST_BUFFER_DATA (common->cached_buffer) + common->offset -
- cache_offset;
+ *p_buf = gst_buffer_copy_region (common->cached_buffer,
+ GST_BUFFER_COPY_ALL, common->offset - cache_offset, size);
+ if (bytes) {
+ if (!common->cached_data) {
+ gst_buffer_map (common->cached_buffer, &common->cached_map,
+ GST_MAP_READ);
+ common->cached_data = common->cached_map.data;
+ }
+ *bytes = common->cached_data + common->offset - cache_offset;
+ }
return GST_FLOW_OK;
}
/* not enough data in the cache, free cache and get a new one */
+ if (common->cached_data) {
+ gst_buffer_unmap (common->cached_buffer, &common->cached_map);
+ common->cached_data = NULL;
+ }
gst_buffer_unref (common->cached_buffer);
common->cached_buffer = NULL;
}
return ret;
}
- if (GST_BUFFER_SIZE (common->cached_buffer) >= size) {
+ if (gst_buffer_get_size (common->cached_buffer) >= size) {
if (p_buf)
- *p_buf = gst_buffer_create_sub (common->cached_buffer, 0, size);
- if (bytes)
- *bytes = GST_BUFFER_DATA (common->cached_buffer);
+ *p_buf = gst_buffer_copy_region (common->cached_buffer,
+ GST_BUFFER_COPY_ALL, 0, size);
+ if (bytes) {
+ gst_buffer_map (common->cached_buffer, &common->cached_map, GST_MAP_READ);
+ common->cached_data = common->cached_map.data;
+ *bytes = common->cached_data;
+ }
return GST_FLOW_OK;
}
return ret;
}
- if (GST_BUFFER_SIZE (common->cached_buffer) < size) {
+ if (gst_buffer_get_size (common->cached_buffer) < size) {
GST_WARNING_OBJECT (common, "Dropping short buffer at offset %"
- G_GUINT64_FORMAT ": wanted %u bytes, got %u bytes", common->offset,
- size, GST_BUFFER_SIZE (common->cached_buffer));
+ G_GUINT64_FORMAT ": wanted %u bytes, got %" G_GSIZE_FORMAT " bytes",
+ common->offset, size, gst_buffer_get_size (common->cached_buffer));
gst_buffer_unref (common->cached_buffer);
common->cached_buffer = NULL;
*p_buf = NULL;
if (bytes)
*bytes = NULL;
- return GST_FLOW_UNEXPECTED;
+ return GST_FLOW_EOS;
}
if (p_buf)
- *p_buf = gst_buffer_create_sub (common->cached_buffer, 0, size);
- if (bytes)
- *bytes = GST_BUFFER_DATA (common->cached_buffer);
+ *p_buf = gst_buffer_copy_region (common->cached_buffer,
+ GST_BUFFER_COPY_ALL, 0, size);
+ if (bytes) {
+ gst_buffer_map (common->cached_buffer, &common->cached_map, GST_MAP_READ);
+ common->cached_data = common->cached_map.data;
+ *bytes = common->cached_data;
+ }
return GST_FLOW_OK;
}
gst_matroska_read_common_peek_id_length_push (GstMatroskaReadCommon * common,
GstElement * el, guint32 * _id, guint64 * _length, guint * _needed)
{
- return gst_ebml_peek_id_length (_id, _length, _needed,
+ GstFlowReturn ret;
+
+ ret = gst_ebml_peek_id_length (_id, _length, _needed,
(GstPeekData) gst_matroska_read_common_peek_adapter, (gpointer) common,
el, common->offset);
+
+ gst_adapter_unmap (common->adapter);
+
+ return ret;
}
static GstFlowReturn
}
DEBUG_ELEMENT_STOP (common, ebml, "ContentEncoding", ret);
- if (ret != GST_FLOW_OK && ret != GST_FLOW_UNEXPECTED)
+ if (ret != GST_FLOW_OK && ret != GST_FLOW_EOS)
return ret;
/* TODO: Check if the combination of values is valid */
}
DEBUG_ELEMENT_STOP (common, ebml, "ContentEncodings", ret);
- if (ret != GST_FLOW_OK && ret != GST_FLOW_UNEXPECTED)
+ if (ret != GST_FLOW_OK && ret != GST_FLOW_EOS)
return ret;
/* Sort encodings according to their order */
} GstMatroskaReadState;
typedef struct _GstMatroskaReadCommon {
+#if 0
GstIndex *element_index;
gint element_index_writer_id;
+#endif
/* pads */
GstPad *sinkpad;
/* pull mode caching */
GstBuffer *cached_buffer;
+ guint8 *cached_data;
+ GstMapInfo cached_map;
/* push and pull mode */
guint64 offset;
} GstMatroskaReadCommon;
GstFlowReturn gst_matroska_decode_content_encodings (GArray * encodings);
-gboolean gst_matroska_decode_data (GArray * encodings, guint8 ** data_out,
- guint * size_out, GstMatroskaTrackEncodingScope scope, gboolean free);
+gboolean gst_matroska_decode_data (GArray * encodings, gpointer * data_out,
+ gsize * size_out, GstMatroskaTrackEncodingScope scope, gboolean free);
gint gst_matroska_index_seek_find (GstMatroskaIndex * i1, GstClockTime * time,
gpointer user_data);
GstMatroskaIndex * gst_matroska_read_common_do_index_seek (
* ]| This pipeline re-encodes a video file of any format into a WebM file.
* |[
* gst-launch-0.10 webmmux name=mux ! filesink location=test.webm \
- * videotestsrc num-buffers=250 ! video/x-raw-yuv,framerate=25/1 ! ffmpegcolorspace ! vp8enc ! queue ! mux.video_0 \
- * audiotestsrc samplesperbuffer=44100 num-buffers=10 ! audio/x-raw-float,rate=44100 ! vorbisenc ! queue ! mux.audio_0
+ * videotestsrc num-buffers=250 ! video/x-raw,framerate=25/1 ! ffmpegcolorspace ! vp8enc ! queue ! mux.video_0 \
+ * audiotestsrc samplesperbuffer=44100 num-buffers=10 ! audio/x-raw,rate=44100 ! vorbisenc ! queue ! mux.audio_0
* ]| This pipeline muxes a test video and a sine wave into a WebM file.
* </refsect2>
*/
"channels = (int) [ 1, MAX ], " \
"rate = (int) [ 1, MAX ]"
-GST_BOILERPLATE (GstWebMMux, gst_webm_mux, GstMatroskaMux,
- GST_TYPE_MATROSKA_MUX);
+G_DEFINE_TYPE (GstWebMMux, gst_webm_mux, GST_TYPE_MATROSKA_MUX);
static GstStaticPadTemplate webm_src_templ = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
);
static GstStaticPadTemplate webm_videosink_templ =
-GST_STATIC_PAD_TEMPLATE ("video_%d",
+GST_STATIC_PAD_TEMPLATE ("video_%u",
GST_PAD_SINK,
GST_PAD_REQUEST,
GST_STATIC_CAPS ("video/x-vp8, " COMMON_VIDEO_CAPS)
);
static GstStaticPadTemplate webm_audiosink_templ =
-GST_STATIC_PAD_TEMPLATE ("audio_%d",
+GST_STATIC_PAD_TEMPLATE ("audio_%u",
GST_PAD_SINK,
GST_PAD_REQUEST,
GST_STATIC_CAPS ("audio/x-vorbis, " COMMON_AUDIO_CAPS)
);
static void
-gst_webm_mux_base_init (gpointer g_class)
-{
-}
-
-static void
gst_webm_mux_class_init (GstWebMMuxClass * klass)
{
GstElementClass *gstelement_class = (GstElementClass *) klass;
- gst_element_class_add_static_pad_template (gstelement_class,
- &webm_videosink_templ);
- gst_element_class_add_static_pad_template (gstelement_class,
- &webm_audiosink_templ);
- gst_element_class_add_static_pad_template (gstelement_class,
- &webm_src_templ);
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&webm_videosink_templ));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&webm_audiosink_templ));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&webm_src_templ));
gst_element_class_set_details_simple (gstelement_class, "WebM muxer",
"Codec/Muxer",
"Muxes video and audio streams into a WebM stream",
}
static void
-gst_webm_mux_init (GstWebMMux * mux, GstWebMMuxClass * g_class)
+gst_webm_mux_init (GstWebMMux * mux)
{
GST_MATROSKA_MUX (mux)->doctype = GST_MATROSKA_DOCTYPE_WEBM;
}
{
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
- gst_element_class_add_static_pad_template (element_class,
- &median_sink_factory);
- gst_element_class_add_static_pad_template (element_class,
- &median_src_factory);
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&median_sink_factory));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&median_src_factory));
gst_element_class_set_details_simple (element_class, "Median effect",
"Filter/Effect/Video",
"Apply a median filter to an image",
{
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
- gst_element_class_add_static_pad_template (element_class, &src_template);
- gst_element_class_add_static_pad_template (element_class,
- &sink_template);
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&src_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&sink_template));
gst_element_class_set_details_simple (element_class, "Monoscope",
"Visualization",
"Displays a highly stabilised waveform of audio input",
return multi_file_sync_next_type;
}
-GST_BOILERPLATE (GstMultiFileSink, gst_multi_file_sink, GstBaseSink,
- GST_TYPE_BASE_SINK);
-
-static void
-gst_multi_file_sink_base_init (gpointer g_class)
-{
- GstElementClass *gstelement_class = GST_ELEMENT_CLASS (g_class);
-
- GST_DEBUG_CATEGORY_INIT (gst_multi_file_sink_debug, "multifilesink", 0,
- "multifilesink element");
-
- gst_element_class_add_static_pad_template (gstelement_class, &sinktemplate);
- gst_element_class_set_details_simple (gstelement_class, "Multi-File Sink",
- "Sink/File",
- "Write buffers to a sequentially named set of files",
- "David Schleef <ds@schleef.org>");
-}
+#define gst_multi_file_sink_parent_class parent_class
+G_DEFINE_TYPE (GstMultiFileSink, gst_multi_file_sink, GST_TYPE_BASE_SINK);
static void
gst_multi_file_sink_class_init (GstMultiFileSinkClass * klass)
{
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
+ GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
GstBaseSinkClass *gstbasesink_class = GST_BASE_SINK_CLASS (klass);
gobject_class->set_property = gst_multi_file_sink_set_property;
gstbasesink_class->set_caps =
GST_DEBUG_FUNCPTR (gst_multi_file_sink_set_caps);
gstbasesink_class->event = GST_DEBUG_FUNCPTR (gst_multi_file_sink_event);
+
+ GST_DEBUG_CATEGORY_INIT (gst_multi_file_sink_debug, "multifilesink", 0,
+ "multifilesink element");
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&sinktemplate));
+ gst_element_class_set_details_simple (gstelement_class, "Multi-File Sink",
+ "Sink/File",
+ "Write buffers to a sequentially named set of files",
+ "David Schleef <ds@schleef.org>");
}
static void
-gst_multi_file_sink_init (GstMultiFileSink * multifilesink,
- GstMultiFileSinkClass * g_class)
+gst_multi_file_sink_init (GstMultiFileSink * multifilesink)
{
multifilesink->filename = g_strdup (DEFAULT_LOCATION);
multifilesink->index = DEFAULT_INDEX;
for (i = 0; i < sink->n_streamheaders; i++) {
GstBuffer *hdr;
+ GstMapInfo map;
int ret;
hdr = sink->streamheaders[i];
-
- ret = fwrite (GST_BUFFER_DATA (hdr), GST_BUFFER_SIZE (hdr), 1, sink->file);
+ gst_buffer_map (hdr, &map, GST_MAP_READ);
+ ret = fwrite (map.data, map.size, 1, sink->file);
+ gst_buffer_unmap (hdr, &map);
if (ret != 1)
return FALSE;
- sink->cur_file_size += GST_BUFFER_SIZE (hdr);
+ sink->cur_file_size += map.size;
}
return TRUE;
gst_multi_file_sink_render (GstBaseSink * sink, GstBuffer * buffer)
{
GstMultiFileSink *multifilesink;
- guint size;
- guint8 *data;
+ GstMapInfo map;
gchar *filename;
gboolean ret;
GError *error = NULL;
- size = GST_BUFFER_SIZE (buffer);
- data = GST_BUFFER_DATA (buffer);
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
multifilesink = GST_MULTI_FILE_SINK (sink);
filename = g_strdup_printf (multifilesink->filename,
multifilesink->index);
- ret = g_file_set_contents (filename, (char *) data, size, &error);
+ ret = g_file_set_contents (filename, (char *) map.data, map.size, &error);
if (!ret)
goto write_error;
goto stdio_write_error;
}
- ret = fwrite (GST_BUFFER_DATA (buffer), GST_BUFFER_SIZE (buffer), 1,
- multifilesink->file);
+ ret = fwrite (map.data, map.size, 1, multifilesink->file);
if (ret != 1)
goto stdio_write_error;
gst_multi_file_sink_write_stream_headers (multifilesink);
}
- ret = fwrite (GST_BUFFER_DATA (buffer), GST_BUFFER_SIZE (buffer), 1,
- multifilesink->file);
+ ret = fwrite (map.data, map.size, 1, multifilesink->file);
if (ret != 1)
goto stdio_write_error;
goto stdio_write_error;
}
- ret = fwrite (GST_BUFFER_DATA (buffer), GST_BUFFER_SIZE (buffer), 1,
- multifilesink->file);
+ if (!gst_multi_file_sink_write_stream_headers (multifilesink))
+ goto stdio_write_error;
+
+ ret = fwrite (map.data, map.size, 1, multifilesink->file);
+
if (ret != 1)
goto stdio_write_error;
case GST_MULTI_FILE_SINK_NEXT_MAX_SIZE:{
guint64 new_size;
- new_size = multifilesink->cur_file_size + GST_BUFFER_SIZE (buffer);
+ new_size = multifilesink->cur_file_size + map.size;
if (new_size > multifilesink->max_file_size) {
GST_INFO_OBJECT (multifilesink, "current size: %" G_GUINT64_FORMAT
gst_multi_file_sink_write_stream_headers (multifilesink);
}
- ret = fwrite (GST_BUFFER_DATA (buffer), GST_BUFFER_SIZE (buffer), 1,
- multifilesink->file);
+ ret = fwrite (map.data, map.size, 1, multifilesink->file);
if (ret != 1)
goto stdio_write_error;
- multifilesink->cur_file_size += GST_BUFFER_SIZE (buffer);
+ multifilesink->cur_file_size += map.size;
break;
}
default:
g_assert_not_reached ();
}
+ gst_buffer_unmap (buffer, &map);
return GST_FLOW_OK;
/* ERRORS */
g_error_free (error);
g_free (filename);
+ gst_buffer_unmap (buffer, &map);
return GST_FLOW_ERROR;
}
stdio_write_error:
GST_ELEMENT_ERROR (multifilesink, RESOURCE, WRITE,
("Error while writing to file."), ("%s", g_strerror (errno)));
}
+ gst_buffer_unmap (buffer, &map);
return GST_FLOW_ERROR;
}
-static GstBufferListItem
-buffer_list_calc_size (GstBuffer ** buf, guint group, guint idx, gpointer data)
+static gboolean
+buffer_list_calc_size (GstBuffer ** buf, guint idx, gpointer data)
{
guint *p_size = data;
- guint buf_size;
+ gsize buf_size;
- buf_size = GST_BUFFER_SIZE (*buf);
- GST_TRACE ("buffer %u in group %u has size %u", idx, group, buf_size);
+ buf_size = gst_buffer_get_size (*buf);
+ GST_TRACE ("buffer %u has size %" G_GSIZE_FORMAT, idx, buf_size);
*p_size += buf_size;
- return GST_BUFFER_LIST_CONTINUE;
+ return TRUE;
}
-static GstBufferListItem
-buffer_list_copy_data (GstBuffer ** buf, guint group, guint idx, gpointer data)
+static gboolean
+buffer_list_copy_data (GstBuffer ** buf, guint idx, gpointer data)
{
GstBuffer *dest = data;
+ guint num, i;
+
+ if (idx == 0)
+ gst_buffer_copy_into (dest, *buf, GST_BUFFER_COPY_METADATA, 0, -1);
- if (group == 0 && idx == 0)
- gst_buffer_copy_metadata (dest, *buf, GST_BUFFER_COPY_ALL);
+ num = gst_buffer_n_memory (*buf);
+ for (i = 0; i < num; ++i) {
+ GstMemory *mem;
- memcpy (GST_BUFFER_DATA (dest) + GST_BUFFER_SIZE (dest),
- GST_BUFFER_DATA (*buf), GST_BUFFER_SIZE (*buf));
- GST_BUFFER_SIZE (dest) += GST_BUFFER_SIZE (*buf);
+ mem = gst_buffer_get_memory (*buf, i);
+ gst_buffer_take_memory (dest, -1, mem);
+ }
- return GST_BUFFER_LIST_CONTINUE;
+ return TRUE;
}
/* Our assumption for now is that the buffers in a buffer list should always
/* copy all buffers in the list into one single buffer, so we can use
* the normal render function (FIXME: optimise to avoid the memcpy) */
- buf = gst_buffer_new_and_alloc (size);
- GST_BUFFER_SIZE (buf) = 0;
+ buf = gst_buffer_new ();
gst_buffer_list_foreach (list, buffer_list_copy_data, buf);
- g_assert (GST_BUFFER_SIZE (buf) == size);
+ g_assert (gst_buffer_get_size (buf) == size);
gst_multi_file_sink_render (sink, buf);
gst_buffer_unref (buf);
{
GstMultiFileSink *multifilesink;
gchar *filename;
- gboolean res = TRUE;
multifilesink = GST_MULTI_FILE_SINK (sink);
}
out:
- return res;
+ return GST_BASE_SINK_CLASS (parent_class)->event (sink, event);
+ /* ERRORS */
stdio_write_error:
- GST_ELEMENT_ERROR (multifilesink, RESOURCE, WRITE,
- ("Error while writing to file."), (NULL));
- return FALSE;
+ {
+ GST_ELEMENT_ERROR (multifilesink, RESOURCE, WRITE,
+ ("Error while writing to file."), (NULL));
+ gst_event_unref (event);
+ return FALSE;
+ }
}
static gboolean
const GValue * value, GParamSpec * pspec);
static void gst_multi_file_src_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
-static GstCaps *gst_multi_file_src_getcaps (GstBaseSrc * src);
+static GstCaps *gst_multi_file_src_getcaps (GstBaseSrc * src, GstCaps * filter);
static gboolean gst_multi_file_src_query (GstBaseSrc * src, GstQuery * query);
#define DEFAULT_LOCATION "%05d"
#define DEFAULT_INDEX 0
+#define gst_multi_file_src_parent_class parent_class
+G_DEFINE_TYPE (GstMultiFileSrc, gst_multi_file_src, GST_TYPE_PUSH_SRC);
-GST_BOILERPLATE (GstMultiFileSrc, gst_multi_file_src, GstPushSrc,
- GST_TYPE_PUSH_SRC);
-
-static void
-gst_multi_file_src_base_init (gpointer g_class)
-{
- GstElementClass *gstelement_class = GST_ELEMENT_CLASS (g_class);
-
- GST_DEBUG_CATEGORY_INIT (gst_multi_file_src_debug, "multifilesrc", 0,
- "multifilesrc element");
-
- gst_element_class_add_static_pad_template (gstelement_class,
- &gst_multi_file_src_pad_template);
- gst_element_class_set_details_simple (gstelement_class, "Multi-File Source",
- "Source/File",
- "Read a sequentially named set of files into buffers",
- "David Schleef <ds@schleef.org>");
-}
static void
gst_multi_file_src_class_init (GstMultiFileSrcClass * klass)
{
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
+ GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
GstPushSrcClass *gstpushsrc_class = GST_PUSH_SRC_CLASS (klass);
GstBaseSrcClass *gstbasesrc_class = GST_BASE_SRC_CLASS (klass);
GST_LOG ("No large file support, sizeof (off_t) = %" G_GSIZE_FORMAT,
sizeof (off_t));
}
+
+ GST_DEBUG_CATEGORY_INIT (gst_multi_file_src_debug, "multifilesrc", 0,
+ "multifilesrc element");
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_multi_file_src_pad_template));
+ gst_element_class_set_details_simple (gstelement_class, "Multi-File Source",
+ "Source/File",
+ "Read a sequentially named set of files into buffers",
+ "David Schleef <ds@schleef.org>");
}
static void
-gst_multi_file_src_init (GstMultiFileSrc * multifilesrc,
- GstMultiFileSrcClass * g_class)
+gst_multi_file_src_init (GstMultiFileSrc * multifilesrc)
{
multifilesrc->start_index = DEFAULT_INDEX;
multifilesrc->index = DEFAULT_INDEX;
}
static GstCaps *
-gst_multi_file_src_getcaps (GstBaseSrc * src)
+gst_multi_file_src_getcaps (GstBaseSrc * src, GstCaps * filter)
{
GstMultiFileSrc *multi_file_src = GST_MULTI_FILE_SRC (src);
GST_DEBUG_OBJECT (src, "returning %" GST_PTR_FORMAT, multi_file_src->caps);
if (multi_file_src->caps) {
- return gst_caps_ref (multi_file_src->caps);
+ if (filter)
+ return gst_caps_intersect_full (filter, multi_file_src->caps,
+ GST_CAPS_INTERSECT_FIRST);
+ else
+ return gst_caps_ref (multi_file_src->caps);
} else {
- return gst_caps_new_any ();
+ if (filter)
+ return gst_caps_ref (filter);
+ else
+ return gst_caps_new_any ();
}
}
if (error != NULL)
g_error_free (error);
- return GST_FLOW_UNEXPECTED;
+ return GST_FLOW_EOS;
}
} else {
- return GST_FLOW_UNEXPECTED;
+ return GST_FLOW_EOS;
}
} else {
goto handle_error;
}
buf = gst_buffer_new ();
- GST_BUFFER_DATA (buf) = (unsigned char *) data;
- GST_BUFFER_MALLOCDATA (buf) = GST_BUFFER_DATA (buf);
- GST_BUFFER_SIZE (buf) = size;
+ gst_buffer_take_memory (buf, -1,
+ gst_memory_new_wrapped (0, data, g_free, size, 0, size));
GST_BUFFER_OFFSET (buf) = multifilesrc->offset;
GST_BUFFER_OFFSET_END (buf) = multifilesrc->offset + size;
multifilesrc->offset += size;
- gst_buffer_set_caps (buf, multifilesrc->caps);
GST_DEBUG_OBJECT (multifilesrc, "read file \"%s\".", filename);
static gboolean gst_split_file_src_start (GstBaseSrc * basesrc);
static gboolean gst_split_file_src_stop (GstBaseSrc * basesrc);
static gboolean gst_split_file_src_can_seek (GstBaseSrc * basesrc);
-static gboolean gst_split_file_src_check_get_range (GstBaseSrc * basesrc);
static gboolean gst_split_file_src_get_size (GstBaseSrc * basesrc, guint64 * s);
static gboolean gst_split_file_src_unlock (GstBaseSrc * basesrc);
static GstFlowReturn gst_split_file_src_create (GstBaseSrc * basesrc,
GST_DEBUG_CATEGORY_STATIC (splitfilesrc_debug);
#define GST_CAT_DEFAULT splitfilesrc_debug
-GST_BOILERPLATE (GstSplitFileSrc, gst_split_file_src, GstBaseSrc,
- GST_TYPE_BASE_SRC);
-
-static void
-gst_split_file_src_base_init (gpointer g_class)
-{
- GstElementClass *gstelement_class = GST_ELEMENT_CLASS (g_class);
-
- GST_DEBUG_CATEGORY_INIT (splitfilesrc_debug, "splitfilesrc", 0,
- "splitfilesrc element");
-
- gst_element_class_add_pad_template (gstelement_class,
- gst_static_pad_template_get (&gst_split_file_src_pad_template));
-
- gst_element_class_set_details_simple (gstelement_class, "Split-File Source",
- "Source/File",
- "Read a sequentially named set of files as if it was one large file",
- "Tim-Philipp Müller <tim.muller@collabora.co.uk>");
-}
+G_DEFINE_TYPE (GstSplitFileSrc, gst_split_file_src, GST_TYPE_BASE_SRC);
#ifdef G_OS_WIN32
#define WIN32_BLURB " Location string must be in UTF-8 encoding (on Windows)."
gst_split_file_src_class_init (GstSplitFileSrcClass * klass)
{
GstBaseSrcClass *gstbasesrc_class = GST_BASE_SRC_CLASS (klass);
+ GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
gobject_class->set_property = gst_split_file_src_set_property;
gstbasesrc_class->unlock = GST_DEBUG_FUNCPTR (gst_split_file_src_unlock);
gstbasesrc_class->is_seekable =
GST_DEBUG_FUNCPTR (gst_split_file_src_can_seek);
- gstbasesrc_class->check_get_range =
- GST_DEBUG_FUNCPTR (gst_split_file_src_check_get_range);
+
+ GST_DEBUG_CATEGORY_INIT (splitfilesrc_debug, "splitfilesrc", 0,
+ "splitfilesrc element");
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_split_file_src_pad_template));
+
+ gst_element_class_set_details_simple (gstelement_class, "Split-File Source",
+ "Source/File",
+ "Read a sequentially named set of files as if it was one large file",
+ "Tim-Philipp Müller <tim.muller@collabora.co.uk>");
}
static void
-gst_split_file_src_init (GstSplitFileSrc * splitfilesrc,
- GstSplitFileSrcClass * g_class)
+gst_split_file_src_init (GstSplitFileSrc * splitfilesrc)
{
}
g_free (src->location);
src->location = NULL;
- G_OBJECT_CLASS (parent_class)->finalize (obj);
+ G_OBJECT_CLASS (gst_split_file_src_parent_class)->finalize (obj);
}
static gboolean
}
static gboolean
-gst_split_file_src_check_get_range (GstBaseSrc * basesrc)
-{
- return TRUE;
-}
-
-static gboolean
gst_split_file_src_unlock (GstBaseSrc * basesrc)
{
/* This is not actually that useful, since all normal file
GstBuffer *buf;
GError *err = NULL;
guint64 read_offset;
+ GstMapInfo map;
guint8 *data;
guint to_read;
cur_part = src->parts[src->cur_part];
if (offset < cur_part.start || offset > cur_part.stop) {
if (!gst_split_file_src_find_part_for_offset (src, offset, &src->cur_part))
- return GST_FLOW_UNEXPECTED;
+ return GST_FLOW_EOS;
cur_part = src->parts[src->cur_part];
}
"%" G_GUINT64_FORMAT ", %s)", src->cur_part, cur_part.start,
cur_part.stop, cur_part.path);
- buf = gst_buffer_new_and_alloc (size);
+ buf = gst_buffer_new_allocate (NULL, size, 0);
GST_BUFFER_OFFSET (buf) = offset;
- data = GST_BUFFER_DATA (buf);
+ gst_buffer_map (buf, &map, GST_MAP_WRITE);
+ data = map.data;
cancel = src->cancellable;
if (read < to_read) {
if (src->cur_part == src->num_parts - 1) {
/* last file part, stop reading and truncate buffer */
- GST_BUFFER_SIZE (buf) = offset - GST_BUFFER_OFFSET (buf);
+ gst_buffer_set_size (buf, offset - GST_BUFFER_OFFSET (buf));
break;
} else {
goto file_part_changed;
GST_BUFFER_OFFSET_END (buf) = offset;
+ gst_buffer_unmap (buf, &map);
+
*buffer = buf;
- GST_LOG_OBJECT (src, "read %u bytes into buf %p", GST_BUFFER_SIZE (buf), buf);
+ GST_LOG_OBJECT (src, "read %" G_GSIZE_FORMAT " bytes into buf %p",
+ gst_buffer_get_size (buf), buf);
return GST_FLOW_OK;
/* ERRORS */
GST_DEBUG_OBJECT (src, "I/O operation cancelled from another thread");
g_error_free (err);
gst_buffer_unref (buf);
- return GST_FLOW_WRONG_STATE;
+ return GST_FLOW_FLUSHING;
}
}
};
static GstStaticPadTemplate multipart_demux_src_template_factory =
-GST_STATIC_PAD_TEMPLATE ("src_%d",
+GST_STATIC_PAD_TEMPLATE ("src_%u",
GST_PAD_SRC,
GST_PAD_SOMETIMES,
GST_STATIC_CAPS_ANY);
};
-static GstFlowReturn gst_multipart_demux_chain (GstPad * pad, GstBuffer * buf);
+static GstFlowReturn gst_multipart_demux_chain (GstPad * pad,
+ GstObject * parent, GstBuffer * buf);
static GstStateChangeReturn gst_multipart_demux_change_state (GstElement *
element, GstStateChange transition);
static void gst_multipart_demux_finalize (GObject * object);
-GST_BOILERPLATE (GstMultipartDemux, gst_multipart_demux, GstElement,
- GST_TYPE_ELEMENT);
-
-static void
-gst_multipart_demux_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_static_pad_template (element_class,
- &multipart_demux_sink_template_factory);
- gst_element_class_add_static_pad_template (element_class,
- &multipart_demux_src_template_factory);
- gst_element_class_set_details_simple (element_class, "Multipart demuxer",
- "Codec/Demuxer",
- "demux multipart streams",
- "Wim Taymans <wim.taymans@gmail.com>, Sjoerd Simons <sjoerd@luon.net>");
-}
+#define gst_multipart_demux_parent_class parent_class
+G_DEFINE_TYPE (GstMultipartDemux, gst_multipart_demux, GST_TYPE_ELEMENT);
static void
gst_multipart_demux_class_init (GstMultipartDemuxClass * klass)
}
gstelement_class->change_state = gst_multipart_demux_change_state;
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&multipart_demux_sink_template_factory));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&multipart_demux_src_template_factory));
+ gst_element_class_set_details_simple (gstelement_class, "Multipart demuxer",
+ "Codec/Demuxer",
+ "demux multipart streams",
+ "Wim Taymans <wim.taymans@gmail.com>, Sjoerd Simons <sjoerd@luon.net>");
}
static void
-gst_multipart_demux_init (GstMultipartDemux * multipart,
- GstMultipartDemuxClass * g_class)
+gst_multipart_demux_init (GstMultipartDemux * multipart)
{
/* create the sink pad */
multipart->sinkpad =
GST_DEBUG_OBJECT (demux, "creating pad with mime: %s", mime);
- name = g_strdup_printf ("src_%d", demux->numpads);
+ name = g_strdup_printf ("src_%u", demux->numpads);
pad =
gst_pad_new_from_static_template (&multipart_demux_src_template_factory,
name);
guint8 *end, *next;
datalen = gst_adapter_available (multipart->adapter);
- data = gst_adapter_peek (multipart->adapter, datalen);
+ data = gst_adapter_map (multipart->adapter, datalen);
dataend = data + datalen;
/* Skip leading whitespace, pos endposition should at least leave space for
for (pos = (guint8 *) data; pos < dataend - 4 && g_ascii_isspace (*pos);
pos++);
- if (pos >= dataend - 4) {
- return MULTIPART_NEED_MORE_DATA;
- }
+ if (pos >= dataend - 4)
+ goto need_more_data;
if (G_UNLIKELY (pos[0] != '-' || pos[1] != '-')) {
GST_DEBUG_OBJECT (multipart, "No boundary available");
/* First the boundary */
if (!get_line_end (pos, dataend, &end, &next))
- return MULTIPART_NEED_MORE_DATA;
+ goto need_more_data;
/* Ignore the leading -- */
boundary_len = end - pos - 2;
* invalid */
if (G_UNLIKELY (boundary_len == multipart->boundary_len + 2 &&
!strncmp (boundary, multipart->boundary, multipart->boundary_len) &&
- !strncmp (boundary + multipart->boundary_len, "--", 2))) {
- return MULTIPART_DATA_EOS;
- }
+ !strncmp (boundary + multipart->boundary_len, "--", 2)))
+ goto eos;
+
GST_DEBUG_OBJECT (multipart,
"Boundary length doesn't match detected boundary (%d <> %d",
boundary_len, multipart->boundary_len);
goto wrong_header;
}
-
pos = next;
while (get_line_end (pos, dataend, &end, &next)) {
guint len = end - pos;
GST_DEBUG_OBJECT (multipart,
"Parsed the header - boundary: %s, mime-type: %s, content-length: %d",
multipart->boundary, multipart->mime_type, multipart->content_length);
+ gst_adapter_unmap (multipart->adapter);
return next - data;
}
}
pos = next;
}
+
+need_more_data:
GST_DEBUG_OBJECT (multipart, "Need more data for the header");
+ gst_adapter_unmap (multipart->adapter);
+
return MULTIPART_NEED_MORE_DATA;
wrong_header:
{
GST_ELEMENT_ERROR (multipart, STREAM, DEMUX, (NULL),
("Boundary not found in the multipart header"));
+ gst_adapter_unmap (multipart->adapter);
return MULTIPART_DATA_ERROR;
}
+eos:
+ {
+ GST_DEBUG_OBJECT (multipart, "we are EOS");
+ gst_adapter_unmap (multipart->adapter);
+ return MULTIPART_DATA_EOS;
+ }
}
static gint
len = multipart->content_length;
if (gst_adapter_available (multipart->adapter) >= len + 2) {
*datalen = len;
- data = gst_adapter_peek (multipart->adapter, len + 1);
+ data = gst_adapter_map (multipart->adapter, len + 1);
/* If data[len] contains \r then assume a newline is \r\n */
if (data[len] == '\r')
len += 2;
else if (data[len] == '\n')
len += 1;
- /* Don't check if boundary is actually there, but let the header parsing
+
+ gst_adapter_unmap (multipart->adapter);
+ /* Don't check if boundary is actually there, but let the header parsing
* bail out if it isn't */
return len;
} else {
len = gst_adapter_available (multipart->adapter);
if (len == 0)
return MULTIPART_NEED_MORE_DATA;
- data = gst_adapter_peek (multipart->adapter, len);
+ data = gst_adapter_map (multipart->adapter, len);
dataend = data + len;
for (pos = data + multipart->scanpos;
len -= 1;
*datalen = len;
+ gst_adapter_unmap (multipart->adapter);
multipart->scanpos = 0;
return pos - data;
}
}
+ gst_adapter_unmap (multipart->adapter);
multipart->scanpos = pos - data;
return MULTIPART_NEED_MORE_DATA;
}
static GstFlowReturn
-gst_multipart_demux_chain (GstPad * pad, GstBuffer * buf)
+gst_multipart_demux_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
{
GstMultipartDemux *multipart;
GstAdapter *adapter;
gint size = 1;
GstFlowReturn res;
- multipart = GST_MULTIPART_DEMUX (gst_pad_get_parent (pad));
+ multipart = GST_MULTIPART_DEMUX (parent);
adapter = multipart->adapter;
res = GST_FLOW_OK;
outbuf = gst_adapter_take_buffer (adapter, datalen);
gst_adapter_flush (adapter, size - datalen);
- gst_buffer_set_caps (outbuf, GST_PAD_CAPS (srcpad->pad));
if (created) {
GstTagList *tags;
+ GstSegment segment;
+
+ gst_segment_init (&segment, GST_FORMAT_TIME);
/* Push new segment, first buffer has 0 timestamp */
- gst_pad_push_event (srcpad->pad,
- gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME, 0, -1, 0));
+ gst_pad_push_event (srcpad->pad, gst_event_new_segment (&segment));
- tags =
- gst_tag_list_new_full (GST_TAG_CONTAINER_FORMAT, "Multipart", NULL);
+ tags = gst_tag_list_new (GST_TAG_CONTAINER_FORMAT, "Multipart", NULL);
gst_pad_push_event (srcpad->pad, gst_event_new_tag (tags));
GST_BUFFER_TIMESTAMP (outbuf) = 0;
GST_DEBUG_OBJECT (multipart,
"pushing buffer with timestamp %" GST_TIME_FORMAT,
GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)));
- GST_DEBUG_OBJECT (multipart, "buffer has caps %" GST_PTR_FORMAT,
- GST_BUFFER_CAPS (outbuf));
res = gst_pad_push (srcpad->pad, outbuf);
res = gst_multipart_combine_flows (multipart, srcpad, res);
if (res != GST_FLOW_OK)
}
nodata:
- gst_object_unref (multipart);
-
if (G_UNLIKELY (size == MULTIPART_DATA_ERROR))
return GST_FLOW_ERROR;
if (G_UNLIKELY (size == MULTIPART_DATA_EOS))
- return GST_FLOW_UNEXPECTED;
+ return GST_FLOW_EOS;
return res;
}
GstPad *sinkpad;
GSList *srcpads;
- gint numpads;
+ guint numpads;
GstAdapter *adapter;
GST_STATIC_CAPS ("multipart/x-mixed-replace")
);
-static GstStaticPadTemplate sink_factory = GST_STATIC_PAD_TEMPLATE ("sink_%d",
+static GstStaticPadTemplate sink_factory = GST_STATIC_PAD_TEMPLATE ("sink_%u",
GST_PAD_SINK,
GST_PAD_REQUEST,
GST_STATIC_CAPS_ANY /* we can take anything, really */
{NULL, NULL}
};
-static void gst_multipart_mux_base_init (gpointer g_class);
-static void gst_multipart_mux_class_init (GstMultipartMuxClass * klass);
-static void gst_multipart_mux_init (GstMultipartMux * multipart_mux);
-
static void gst_multipart_mux_finalize (GObject * object);
static gboolean gst_multipart_mux_handle_src_event (GstPad * pad,
- GstEvent * event);
+ GstObject * parent, GstEvent * event);
static GstPad *gst_multipart_mux_request_new_pad (GstElement * element,
- GstPadTemplate * templ, const gchar * name);
+ GstPadTemplate * templ, const gchar * name, const GstCaps * caps);
static GstStateChangeReturn gst_multipart_mux_change_state (GstElement *
element, GstStateChange transition);
static void gst_multipart_mux_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
-static GstElementClass *parent_class = NULL;
-
-GType
-gst_multipart_mux_get_type (void)
-{
- static GType multipart_mux_type = 0;
-
- if (!multipart_mux_type) {
- static const GTypeInfo multipart_mux_info = {
- sizeof (GstMultipartMuxClass),
- gst_multipart_mux_base_init,
- NULL,
- (GClassInitFunc) gst_multipart_mux_class_init,
- NULL,
- NULL,
- sizeof (GstMultipartMux),
- 0,
- (GInstanceInitFunc) gst_multipart_mux_init,
- };
-
- multipart_mux_type =
- g_type_register_static (GST_TYPE_ELEMENT, "GstMultipartMux",
- &multipart_mux_info, 0);
- }
- return multipart_mux_type;
-}
-
-static void
-gst_multipart_mux_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_static_pad_template (element_class, &src_factory);
- gst_element_class_add_static_pad_template (element_class, &sink_factory);
-
- gst_element_class_set_details_simple (element_class, "Multipart muxer",
- "Codec/Muxer", "mux multipart streams", "Wim Taymans <wim@fluendo.com>");
-}
+#define gst_multipart_mux_parent_class parent_class
+G_DEFINE_TYPE (GstMultipartMux, gst_multipart_mux, GST_TYPE_ELEMENT);
static void
gst_multipart_mux_class_init (GstMultipartMuxClass * klass)
gstelement_class->request_new_pad = gst_multipart_mux_request_new_pad;
gstelement_class->change_state = gst_multipart_mux_change_state;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&src_factory));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&sink_factory));
+
+ gst_element_class_set_details_simple (gstelement_class, "Multipart muxer",
+ "Codec/Muxer", "mux multipart streams", "Wim Taymans <wim@fluendo.com>");
+
/* populate mime types */
klass->mimetypes = g_hash_table_new (g_str_hash, g_str_equal);
for (i = 0; mimetypes[i].key; i++) {
static GstPad *
gst_multipart_mux_request_new_pad (GstElement * element,
- GstPadTemplate * templ, const gchar * req_name)
+ GstPadTemplate * templ, const gchar * req_name, const GstCaps * caps)
{
GstMultipartMux *multipart_mux;
GstPad *newpad;
GstElementClass *klass = GST_ELEMENT_GET_CLASS (element);
gchar *name;
- if (templ != gst_element_class_get_pad_template (klass, "sink_%d"))
+ if (templ != gst_element_class_get_pad_template (klass, "sink_%u"))
goto wrong_template;
multipart_mux = GST_MULTIPART_MUX (element);
/* create new pad with the name */
- name = g_strdup_printf ("sink_%02d", multipart_mux->numpads);
+ name = g_strdup_printf ("sink_%u", multipart_mux->numpads);
newpad = gst_pad_new_from_template (templ, name);
g_free (name);
sizeof (GstMultipartPadData));
/* save a pointer to our data in the pad */
+ multipartpad->pad = newpad;
gst_pad_set_element_private (newpad, multipartpad);
multipart_mux->numpads++;
}
/* handle events */
static gboolean
-gst_multipart_mux_handle_src_event (GstPad * pad, GstEvent * event)
+gst_multipart_mux_handle_src_event (GstPad * pad, GstObject * parent,
+ GstEvent * event)
{
- GstMultipartMux *multipart_mux;
GstEventType type;
- multipart_mux = GST_MULTIPART_MUX (gst_pad_get_parent (pad));
-
type = event ? GST_EVENT_TYPE (event) : GST_EVENT_UNKNOWN;
switch (type) {
break;
}
- gst_object_unref (multipart_mux);
-
- return gst_pad_event_default (pad, event);
+ return gst_pad_event_default (pad, parent, event);
}
static const gchar *
GstBuffer *footerbuf = NULL;
GstBuffer *databuf = NULL;
GstStructure *structure = NULL;
+ GstCaps *caps;
const gchar *mime;
GST_DEBUG_OBJECT (mux, "all pads are collected");
/* see if we need to push a segment */
if (mux->need_segment) {
- GstEvent *event;
GstClockTime time;
+ GstSegment segment;
if (best->timestamp != -1)
time = best->timestamp;
/* for the segment, we take the first timestamp we see, we don't know the
* length and the position is 0 */
- event = gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME,
- time, -1, 0);
+ gst_segment_init (&segment, GST_FORMAT_TIME);
+ segment.start = time;
- gst_pad_push_event (mux->srcpad, event);
+ gst_pad_push_event (mux->srcpad, gst_event_new_segment (&segment));
mux->need_segment = FALSE;
}
- structure = gst_caps_get_structure (GST_BUFFER_CAPS (best->buffer), 0);
- if (!structure)
+ caps = gst_pad_get_current_caps (best->pad);
+ if (caps == NULL)
goto no_caps;
+ structure = gst_caps_get_structure (caps, 0);
+ if (!structure) {
+ gst_caps_unref (caps);
+ goto no_caps;
+ }
+
/* get the mime type for the structure */
mime = gst_multipart_mux_get_mime (mux, structure);
+ gst_caps_unref (caps);
header = g_strdup_printf ("--%s\r\nContent-Type: %s\r\n"
- "Content-Length: %u\r\n\r\n",
- mux->boundary, mime, GST_BUFFER_SIZE (best->buffer));
+ "Content-Length: %" G_GSIZE_FORMAT "\r\n\r\n",
+ mux->boundary, mime, gst_buffer_get_size (best->buffer));
headerlen = strlen (header);
- ret = gst_pad_alloc_buffer_and_set_caps (mux->srcpad, GST_BUFFER_OFFSET_NONE,
- headerlen, GST_PAD_CAPS (mux->srcpad), &headerbuf);
- if (ret != GST_FLOW_OK)
- goto alloc_failed;
-
- memcpy (GST_BUFFER_DATA (headerbuf), header, headerlen);
+ headerbuf = gst_buffer_new_allocate (NULL, headerlen, 1);
+ gst_buffer_fill (headerbuf, 0, header, headerlen);
g_free (header);
/* the header has the same timestamp as the data buffer (which we will push
/* take best->buffer, we don't need to unref it later as we will push it
* now. */
- databuf = gst_buffer_make_metadata_writable (best->buffer);
+ databuf = gst_buffer_make_writable (best->buffer);
best->buffer = NULL;
- gst_buffer_set_caps (databuf, GST_PAD_CAPS (mux->srcpad));
/* we need to updated the timestamp to match the running_time */
GST_BUFFER_TIMESTAMP (databuf) = best->timestamp;
GST_BUFFER_OFFSET (databuf) = mux->offset;
- mux->offset += GST_BUFFER_SIZE (databuf);
+ mux->offset += gst_buffer_get_size (databuf);
GST_BUFFER_OFFSET_END (databuf) = mux->offset;
GST_BUFFER_FLAG_SET (databuf, GST_BUFFER_FLAG_DELTA_UNIT);
- GST_DEBUG_OBJECT (mux, "pushing %u bytes data buffer",
- GST_BUFFER_SIZE (databuf));
+ GST_DEBUG_OBJECT (mux, "pushing %" G_GSIZE_FORMAT " bytes data buffer",
+ gst_buffer_get_size (databuf));
ret = gst_pad_push (mux->srcpad, databuf);
if (ret != GST_FLOW_OK)
/* push always takes ownership of the buffer, even after an error, so we
* don't need to unref headerbuf here. */
goto beach;
- ret = gst_pad_alloc_buffer_and_set_caps (mux->srcpad, GST_BUFFER_OFFSET_NONE,
- 2, GST_PAD_CAPS (mux->srcpad), &footerbuf);
- if (ret != GST_FLOW_OK)
- goto alloc_failed;
-
- memcpy (GST_BUFFER_DATA (footerbuf), "\r\n", 2);
+ footerbuf = gst_buffer_new_allocate (NULL, 2, 1);
+ gst_buffer_fill (footerbuf, 0, "\r\n", 2);
/* the footer has the same timestamp as the data buffer and has a
* duration of 0 */
{
GST_DEBUG_OBJECT (mux, "Pushing EOS");
gst_pad_push_event (mux->srcpad, gst_event_new_eos ());
- ret = GST_FLOW_UNEXPECTED;
+ ret = GST_FLOW_EOS;
goto beach;
}
nego_error:
ret = GST_FLOW_NOT_NEGOTIATED;
goto beach;
}
-alloc_failed:
- {
- GST_WARNING_OBJECT (mux,
- "failed allocating a %" G_GSIZE_FORMAT " bytes buffer", headerlen);
- g_free (header);
- goto beach;
- }
}
static void
GstBuffer *buffer; /* the queued buffer for this pad */
GstClockTime timestamp; /* its timestamp, converted to running_time so that we can
correctly sort over multiple segments. */
+ GstPad *pad;
}
GstMultipartPadData;
libgstreplaygain_la_CFLAGS = \
$(GST_PLUGINS_BASE_CFLAGS) $(GST_BASE_CFLAGS) $(GST_CFLAGS)
libgstreplaygain_la_LIBADD = \
- $(GST_PLUGINS_BASE_LIBS) -lgstpbutils-0.10 \
+ $(GST_PLUGINS_BASE_LIBS) -lgstpbutils-$(GST_MAJORMINOR) -lgstaudio-$(GST_MAJORMINOR)\
$(GST_BASE_LIBS) $(GST_LIBS) $(LIBM)
libgstreplaygain_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
libgstreplaygain_la_LIBTOOLFLAGS = --tag=disable-static
#include <gst/gst.h>
#include <gst/base/gstbasetransform.h>
+#include <gst/audio/audio.h>
#include "gstrganalysis.h"
#include "replaygain.h"
/* The ReplayGain algorithm is intended for use with mono and stereo
* audio. The used implementation has filter coefficients for the
* "usual" sample rates in the 8000 to 48000 Hz range. */
-#define REPLAY_GAIN_CAPS \
- "channels = (int) { 1, 2 }, " \
+#define REPLAY_GAIN_CAPS "audio/x-raw," \
+ "format = (string) { "GST_AUDIO_NE(F32)","GST_AUDIO_NE(S16)" }, " \
+ "layout = (string) interleaved, " \
+ "channels = (int) 1, " \
+ "rate = (int) { 8000, 11025, 12000, 16000, 22050, 24000, 32000, " \
+ "44100, 48000 }; " \
+ "audio/x-raw," \
+ "format = (string) { "GST_AUDIO_NE(F32)","GST_AUDIO_NE(S16)" }, " \
+ "layout = (string) interleaved, " \
+ "channels = (int) 2, " \
+ "channel-mask = (bitmask) 0x3, " \
"rate = (int) { 8000, 11025, 12000, 16000, 22050, 24000, 32000, " \
"44100, 48000 }"
static GstStaticPadTemplate sink_factory = GST_STATIC_PAD_TEMPLATE ("sink",
- GST_PAD_SINK, GST_PAD_ALWAYS, GST_STATIC_CAPS ("audio/x-raw-float, "
- "width = (int) 32, " "endianness = (int) BYTE_ORDER, "
- REPLAY_GAIN_CAPS "; "
- "audio/x-raw-int, "
- "width = (int) 16, " "depth = (int) [ 1, 16 ], "
- "signed = (boolean) true, " "endianness = (int) BYTE_ORDER, "
- REPLAY_GAIN_CAPS));
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (REPLAY_GAIN_CAPS));
static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src",
- GST_PAD_SRC, GST_PAD_ALWAYS, GST_STATIC_CAPS ("audio/x-raw-float, "
- "width = (int) 32, " "endianness = (int) BYTE_ORDER, "
- REPLAY_GAIN_CAPS "; "
- "audio/x-raw-int, "
- "width = (int) 16, " "depth = (int) [ 1, 16 ], "
- "signed = (boolean) true, " "endianness = (int) BYTE_ORDER, "
- REPLAY_GAIN_CAPS));
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (REPLAY_GAIN_CAPS));
-GST_BOILERPLATE (GstRgAnalysis, gst_rg_analysis, GstBaseTransform,
- GST_TYPE_BASE_TRANSFORM);
+#define gst_rg_analysis_parent_class parent_class
+G_DEFINE_TYPE (GstRgAnalysis, gst_rg_analysis, GST_TYPE_BASE_TRANSFORM);
static void gst_rg_analysis_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
GstCaps * incaps, GstCaps * outcaps);
static GstFlowReturn gst_rg_analysis_transform_ip (GstBaseTransform * base,
GstBuffer * buf);
-static gboolean gst_rg_analysis_event (GstBaseTransform * base,
+static gboolean gst_rg_analysis_sink_event (GstBaseTransform * base,
GstEvent * event);
static gboolean gst_rg_analysis_stop (GstBaseTransform * base);
GstTagList ** tag_list);
static void
-gst_rg_analysis_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_static_pad_template (element_class, &src_factory);
- gst_element_class_add_static_pad_template (element_class, &sink_factory);
- gst_element_class_set_details_simple (element_class, "ReplayGain analysis",
- "Filter/Analyzer/Audio",
- "Perform the ReplayGain analysis",
- "Ren\xc3\xa9 Stadler <mail@renestadler.de>");
-
- GST_DEBUG_CATEGORY_INIT (gst_rg_analysis_debug, "rganalysis", 0,
- "ReplayGain analysis element");
-}
-
-static void
gst_rg_analysis_class_init (GstRgAnalysisClass * klass)
{
GObjectClass *gobject_class;
+ GstElementClass *element_class;
GstBaseTransformClass *trans_class;
gobject_class = (GObjectClass *) klass;
+ element_class = (GstElementClass *) klass;
+
gobject_class->set_property = gst_rg_analysis_set_property;
gobject_class->get_property = gst_rg_analysis_get_property;
trans_class->start = GST_DEBUG_FUNCPTR (gst_rg_analysis_start);
trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_rg_analysis_set_caps);
trans_class->transform_ip = GST_DEBUG_FUNCPTR (gst_rg_analysis_transform_ip);
- trans_class->event = GST_DEBUG_FUNCPTR (gst_rg_analysis_event);
+ trans_class->sink_event = GST_DEBUG_FUNCPTR (gst_rg_analysis_sink_event);
trans_class->stop = GST_DEBUG_FUNCPTR (gst_rg_analysis_stop);
trans_class->passthrough_on_same_caps = TRUE;
+
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&src_factory));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&sink_factory));
+ gst_element_class_set_details_simple (element_class, "ReplayGain analysis",
+ "Filter/Analyzer/Audio",
+ "Perform the ReplayGain analysis",
+ "Ren\xc3\xa9 Stadler <mail@renestadler.de>");
+
+ GST_DEBUG_CATEGORY_INIT (gst_rg_analysis_debug, "rganalysis", 0,
+ "ReplayGain analysis element");
}
static void
-gst_rg_analysis_init (GstRgAnalysis * filter, GstRgAnalysisClass * gclass)
+gst_rg_analysis_init (GstRgAnalysis * filter)
{
GstBaseTransform *base = GST_BASE_TRANSFORM (filter);
GstCaps * out_caps)
{
GstRgAnalysis *filter = GST_RG_ANALYSIS (base);
- GstStructure *structure;
- const gchar *name;
- gint n_channels, sample_rate, sample_bit_size, sample_size;
+ GstAudioInfo info;
+ gint rate, channels;
g_return_val_if_fail (filter->ctx != NULL, FALSE);
"set_caps in %" GST_PTR_FORMAT " out %" GST_PTR_FORMAT,
in_caps, out_caps);
- structure = gst_caps_get_structure (in_caps, 0);
- name = gst_structure_get_name (structure);
-
- if (!gst_structure_get_int (structure, "width", &sample_bit_size)
- || !gst_structure_get_int (structure, "channels", &n_channels)
- || !gst_structure_get_int (structure, "rate", &sample_rate))
+ if (!gst_audio_info_from_caps (&info, in_caps))
goto invalid_format;
- if (!rg_analysis_set_sample_rate (filter->ctx, sample_rate))
- goto invalid_format;
+ rate = GST_AUDIO_INFO_RATE (&info);
- if (sample_bit_size % 8 != 0)
+ if (!rg_analysis_set_sample_rate (filter->ctx, rate))
goto invalid_format;
- sample_size = sample_bit_size / 8;
-
- if (g_str_equal (name, "audio/x-raw-float")) {
- if (sample_size != sizeof (gfloat))
- goto invalid_format;
-
- /* The depth is not variable for float formats of course. It just
- * makes the transform function nice and simple if the
- * rg_analysis_analyze_* functions have a common signature. */
- filter->depth = sizeof (gfloat) * 8;
+ channels = GST_AUDIO_INFO_CHANNELS (&info);
- if (n_channels == 1)
- filter->analyze = rg_analysis_analyze_mono_float;
- else if (n_channels == 2)
- filter->analyze = rg_analysis_analyze_stereo_float;
- else
- goto invalid_format;
+ if (channels < 1 || channels > 2)
+ goto invalid_format;
- } else if (g_str_equal (name, "audio/x-raw-int")) {
+ switch (GST_AUDIO_INFO_FORMAT (&info)) {
+ case GST_AUDIO_FORMAT_F32:
+ /* The depth is not variable for float formats of course. It just
+ * makes the transform function nice and simple if the
+ * rg_analysis_analyze_* functions have a common signature. */
+ filter->depth = sizeof (gfloat) * 8;
- if (sample_size != sizeof (gint16))
- goto invalid_format;
+ if (channels == 1)
+ filter->analyze = rg_analysis_analyze_mono_float;
+ else
+ filter->analyze = rg_analysis_analyze_stereo_float;
- if (!gst_structure_get_int (structure, "depth", &filter->depth))
- goto invalid_format;
- if (filter->depth < 1 || filter->depth > 16)
- goto invalid_format;
+ break;
+ case GST_AUDIO_FORMAT_S16:
+ filter->depth = sizeof (gint16) * 8;
- if (n_channels == 1)
- filter->analyze = rg_analysis_analyze_mono_int16;
- else if (n_channels == 2)
- filter->analyze = rg_analysis_analyze_stereo_int16;
- else
+ if (channels == 1)
+ filter->analyze = rg_analysis_analyze_mono_int16;
+ else
+ filter->analyze = rg_analysis_analyze_stereo_int16;
+ break;
+ default:
goto invalid_format;
-
- } else {
-
- goto invalid_format;
}
return TRUE;
gst_rg_analysis_transform_ip (GstBaseTransform * base, GstBuffer * buf)
{
GstRgAnalysis *filter = GST_RG_ANALYSIS (base);
+ GstMapInfo map;
- g_return_val_if_fail (filter->ctx != NULL, GST_FLOW_WRONG_STATE);
+ g_return_val_if_fail (filter->ctx != NULL, GST_FLOW_FLUSHING);
g_return_val_if_fail (filter->analyze != NULL, GST_FLOW_NOT_NEGOTIATED);
if (filter->skip)
return GST_FLOW_OK;
- GST_LOG_OBJECT (filter, "processing buffer of size %u",
- GST_BUFFER_SIZE (buf));
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ GST_LOG_OBJECT (filter, "processing buffer of size %" G_GSIZE_FORMAT,
+ map.size);
rg_analysis_start_buffer (filter->ctx, GST_BUFFER_TIMESTAMP (buf));
- filter->analyze (filter->ctx, GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (buf),
- filter->depth);
+ filter->analyze (filter->ctx, map.data, map.size, filter->depth);
+
+ gst_buffer_unmap (buf, &map);
return GST_FLOW_OK;
}
static gboolean
-gst_rg_analysis_event (GstBaseTransform * base, GstEvent * event)
+gst_rg_analysis_sink_event (GstBaseTransform * base, GstEvent * event)
{
GstRgAnalysis *filter = GST_RG_ANALYSIS (base);
break;
}
- return GST_BASE_TRANSFORM_CLASS (parent_class)->event (base, event);
+ return GST_BASE_TRANSFORM_CLASS (parent_class)->sink_event (base, event);
}
static gboolean
gst_tag_list_add (tag_list, GST_TAG_MERGE_APPEND,
GST_TAG_REFERENCE_LEVEL, filter->reference_level, NULL);
/* This steals our reference to the list: */
- gst_element_found_tags_for_pad (GST_ELEMENT (filter),
- GST_BASE_TRANSFORM_SRC_PAD (GST_BASE_TRANSFORM (filter)), tag_list);
+ gst_pad_push_event (GST_BASE_TRANSFORM_SRC_PAD (GST_BASE_TRANSFORM
+ (filter)), gst_event_new_tag (tag_list));
}
}
if (track_success) {
if (*tag_list == NULL)
- *tag_list = gst_tag_list_new ();
+ *tag_list = gst_tag_list_new_empty ();
gst_tag_list_add (*tag_list, GST_TAG_MERGE_APPEND,
GST_TAG_TRACK_PEAK, track_peak, GST_TAG_TRACK_GAIN, track_gain, NULL);
}
if (album_success) {
if (*tag_list == NULL)
- *tag_list = gst_tag_list_new ();
+ *tag_list = gst_tag_list_new_empty ();
gst_tag_list_add (*tag_list, GST_TAG_MERGE_APPEND,
GST_TAG_ALBUM_PEAK, album_peak, GST_TAG_ALBUM_GAIN, album_gain, NULL);
}
#include <gst/gst.h>
#include <math.h>
+#include <gst/audio/audio.h>
#include "gstrglimiter.h"
};
static GstStaticPadTemplate sink_factory = GST_STATIC_PAD_TEMPLATE ("sink",
- GST_PAD_SINK, GST_PAD_ALWAYS, GST_STATIC_CAPS ("audio/x-raw-float, "
- "width = (int) 32, channels = (int) [1, MAX], "
- "rate = (int) [1, MAX], endianness = (int) BYTE_ORDER"));
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "format = (string) " GST_AUDIO_NE (F32) ", "
+ "layout = (string) { interleaved, non-interleaved }, "
+ "channels = (int) [1, MAX], " "rate = (int) [1, MAX]"));
static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src",
- GST_PAD_SRC, GST_PAD_ALWAYS, GST_STATIC_CAPS ("audio/x-raw-float, "
- "width = (int) 32, channels = (int) [1, MAX], "
- "rate = (int) [1, MAX], endianness = (int) BYTE_ORDER"));
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "format = (string) " GST_AUDIO_NE (F32) ", "
+ "layout = (string) { interleaved, non-interleaved}, "
+ "channels = (int) [1, MAX], " "rate = (int) [1, MAX]"));
-GST_BOILERPLATE (GstRgLimiter, gst_rg_limiter, GstBaseTransform,
- GST_TYPE_BASE_TRANSFORM);
+#define gst_rg_limiter_parent_class parent_class
+G_DEFINE_TYPE (GstRgLimiter, gst_rg_limiter, GST_TYPE_BASE_TRANSFORM);
static void gst_rg_limiter_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
GstBuffer * buf);
static void
-gst_rg_limiter_base_init (gpointer g_class)
-{
- GstElementClass *element_class = g_class;
-
- gst_element_class_add_static_pad_template (element_class, &src_factory);
- gst_element_class_add_static_pad_template (element_class, &sink_factory);
- gst_element_class_set_details_simple (element_class, "ReplayGain limiter",
- "Filter/Effect/Audio",
- "Apply signal compression to raw audio data",
- "Ren\xc3\xa9 Stadler <mail@renestadler.de>");
-
- GST_DEBUG_CATEGORY_INIT (gst_rg_limiter_debug, "rglimiter", 0,
- "ReplayGain limiter element");
-}
-
-static void
gst_rg_limiter_class_init (GstRgLimiterClass * klass)
{
GObjectClass *gobject_class;
+ GstElementClass *element_class;
GstBaseTransformClass *trans_class;
gobject_class = (GObjectClass *) klass;
+ element_class = (GstElementClass *) klass;
gobject_class->set_property = gst_rg_limiter_set_property;
gobject_class->get_property = gst_rg_limiter_get_property;
trans_class = GST_BASE_TRANSFORM_CLASS (klass);
trans_class->transform_ip = GST_DEBUG_FUNCPTR (gst_rg_limiter_transform_ip);
trans_class->passthrough_on_same_caps = FALSE;
+
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&src_factory));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&sink_factory));
+ gst_element_class_set_details_simple (element_class, "ReplayGain limiter",
+ "Filter/Effect/Audio",
+ "Apply signal compression to raw audio data",
+ "Ren\xc3\xa9 Stadler <mail@renestadler.de>");
+
+ GST_DEBUG_CATEGORY_INIT (gst_rg_limiter_debug, "rglimiter", 0,
+ "ReplayGain limiter element");
}
static void
-gst_rg_limiter_init (GstRgLimiter * filter, GstRgLimiterClass * gclass)
+gst_rg_limiter_init (GstRgLimiter * filter)
{
GstBaseTransform *base = GST_BASE_TRANSFORM (filter);
{
GstRgLimiter *filter = GST_RG_LIMITER (base);
gfloat *input;
+ GstMapInfo map;
guint count;
guint i;
if (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_GAP))
return GST_FLOW_OK;
- input = (gfloat *) GST_BUFFER_DATA (buf);
- count = GST_BUFFER_SIZE (buf) / sizeof (gfloat);
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ input = (gfloat *) map.data;
+ count = gst_buffer_get_size (buf) / sizeof (gfloat);
for (i = count; i--;) {
if (*input > THRES)
input++;
}
+ gst_buffer_unmap (buf, &map);
+
return GST_FLOW_OK;
}
#include <gst/gst.h>
#include <gst/pbutils/pbutils.h>
+#include <gst/audio/audio.h>
#include <math.h>
#include "gstrgvolume.h"
/* Same template caps as GstVolume, for I don't like having just ANY caps. */
+#define FORMAT "{ "GST_AUDIO_NE(F32)","GST_AUDIO_NE(S16)" }"
+
static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
- GST_PAD_SINK, GST_PAD_ALWAYS, GST_STATIC_CAPS ("audio/x-raw-float, "
- "rate = (int) [ 1, MAX ], "
- "channels = (int) [ 1, MAX ], "
- "endianness = (int) BYTE_ORDER, "
- "width = (int) 32; "
- "audio/x-raw-int, "
- "channels = (int) [ 1, MAX ], "
- "rate = (int) [ 1, MAX ], "
- "endianness = (int) BYTE_ORDER, "
- "width = (int) 16, " "depth = (int) 16, " "signed = (bool) TRUE"));
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "format = (string) " FORMAT ", "
+ "layout = (string) { interleaved, non-interleaved }, "
+ "rate = (int) [ 1, MAX ], " "channels = (int) [ 1, MAX ]"));
static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
- GST_PAD_SRC, GST_PAD_ALWAYS, GST_STATIC_CAPS ("audio/x-raw-float, "
- "rate = (int) [ 1, MAX ], "
- "channels = (int) [ 1, MAX ], "
- "endianness = (int) BYTE_ORDER, "
- "width = (int) 32; "
- "audio/x-raw-int, "
- "channels = (int) [ 1, MAX ], "
- "rate = (int) [ 1, MAX ], "
- "endianness = (int) BYTE_ORDER, "
- "width = (int) 16, " "depth = (int) 16, " "signed = (bool) TRUE"));
-
-GST_BOILERPLATE (GstRgVolume, gst_rg_volume, GstBin, GST_TYPE_BIN);
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "format = (string) " FORMAT ", "
+ "layout = (string) { interleaved, non-interleaved }, "
+ "rate = (int) [ 1, MAX ], " "channels = (int) [ 1, MAX ]"));
+
+#define gst_rg_volume_parent_class parent_class
+G_DEFINE_TYPE (GstRgVolume, gst_rg_volume, GST_TYPE_BIN);
static void gst_rg_volume_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static GstStateChangeReturn gst_rg_volume_change_state (GstElement * element,
GstStateChange transition);
-static gboolean gst_rg_volume_sink_event (GstPad * pad, GstEvent * event);
+static gboolean gst_rg_volume_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
static GstEvent *gst_rg_volume_tag_event (GstRgVolume * self, GstEvent * event);
static void gst_rg_volume_reset (GstRgVolume * self);
gdouble * target_gain, gdouble * result_gain);
static void
-gst_rg_volume_base_init (gpointer g_class)
-{
- GstElementClass *element_class = g_class;
-
- gst_element_class_add_static_pad_template (element_class, &src_template);
- gst_element_class_add_static_pad_template (element_class,
- &sink_template);
- gst_element_class_set_details_simple (element_class, "ReplayGain volume",
- "Filter/Effect/Audio",
- "Apply ReplayGain volume adjustment",
- "Ren\xc3\xa9 Stadler <mail@renestadler.de>");
-
- GST_DEBUG_CATEGORY_INIT (gst_rg_volume_debug, "rgvolume", 0,
- "ReplayGain volume element");
-}
-
-static void
gst_rg_volume_class_init (GstRgVolumeClass * klass)
{
GObjectClass *gobject_class;
* This element internally uses a volume element, which also supports
* operating on integer audio formats. These formats do not allow exceeding
* digital full scale. If extra headroom is used, make sure that the raw
- * audio data format is floating point (audio/x-raw-float). Otherwise,
+ * audio data format is floating point (F32). Otherwise,
* clipping distortion might be introduced as part of the volume adjustment
* itself.
*/
* mess with our internals. */
bin_class->add_element = NULL;
bin_class->remove_element = NULL;
+
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&src_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&sink_template));
+ gst_element_class_set_details_simple (element_class, "ReplayGain volume",
+ "Filter/Effect/Audio",
+ "Apply ReplayGain volume adjustment",
+ "Ren\xc3\xa9 Stadler <mail@renestadler.de>");
+
+ GST_DEBUG_CATEGORY_INIT (gst_rg_volume_debug, "rgvolume", 0,
+ "ReplayGain volume element");
}
static void
-gst_rg_volume_init (GstRgVolume * self, GstRgVolumeClass * gclass)
+gst_rg_volume_init (GstRgVolume * self)
{
GObjectClass *volume_class;
GstPad *volume_pad, *ghost_pad;
/* Event function for the ghost sink pad. */
static gboolean
-gst_rg_volume_sink_event (GstPad * pad, GstEvent * event)
+gst_rg_volume_sink_event (GstPad * pad, GstObject * parent, GstEvent * event)
{
GstRgVolume *self;
GstPad *volume_sink_pad;
GstEvent *send_event = event;
gboolean res;
- self = GST_RG_VOLUME (gst_pad_get_parent_element (pad));
+ self = GST_RG_VOLUME (parent);
volume_sink_pad = gst_ghost_pad_get_target (GST_GHOST_PAD (pad));
switch (GST_EVENT_TYPE (event)) {
res = TRUE;
gst_object_unref (volume_sink_pad);
- gst_object_unref (self);
+
return res;
}
fnv1hash.c \
gstrtp.c \
gstrtpchannels.c \
- gstrtpdepay.c \
gstrtpac3depay.c \
gstrtpac3pay.c \
gstrtpbvdepay.c \
gstrtpvrawdepay.c \
gstrtpvrawpay.c
-
-if HAVE_WINSOCK2_H
-WINSOCK2_LIBS = -lws2_32
-else
-WINSOCK2_LIBS =
-endif
-
libgstrtp_la_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_BASE_CFLAGS) $(GST_CFLAGS)
libgstrtp_la_LIBADD = $(GST_PLUGINS_BASE_LIBS) \
-lgstaudio-@GST_MAJORMINOR@ \
+ -lgstvideo-@GST_MAJORMINOR@ \
-lgsttag-@GST_MAJORMINOR@ \
-lgstrtp-@GST_MAJORMINOR@ \
-lgstpbutils-@GST_MAJORMINOR@ \
$(GST_BASE_LIBS) $(GST_LIBS) \
- $(WINSOCK2_LIBS) $(LIBM)
+ $(LIBM)
libgstrtp_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
libgstrtp_la_LIBTOOLFLAGS = --tag=disable-static
gstrtpmp4gpay.h \
gstrtpmp4adepay.h \
gstrtpmp4apay.h \
- gstrtpdepay.h \
gstasteriskh263.h \
gstrtpqcelpdepay.h \
gstrtpqdmdepay.h \
The ssrc value currently in use. (default = the SSRC of the first RTP
packet)
- clock-base: (uint) [0 - MAXINT]
+ timestamp-offset: (uint) [0 - MAXINT]
The RTP time representing time npt-start. (default = rtptime of first RTP
packet).
- seqnum-base: (uint) [0 - MAXINT]
+ seqnum-offset: (uint) [0 - MAXINT]
The RTP sequence number representing the first rtp packet. When this
parameter is given, all sequence numbers below this seqnum should be
ignored. (default = seqnum of first RTP packet).
#define __GST_FNV1_HASH_H__
#include <gst/gst.h>
-#include <gst/rtp/gstbasertppayload.h>
+#include <gst/rtp/gstrtpbasepayload.h>
#include <gst/base/gstadapter.h>
G_BEGIN_DECLS
#include <gst/rtp/gstrtpbuffer.h>
#include "gstasteriskh263.h"
-/* Cygwin has both netinet/in.h and winsock2.h, but
- * only one can be included, so prefer the unix one */
-#ifdef HAVE_NETINET_IN_H
-# include <netinet/in.h>
-#else
-#ifdef HAVE_WINSOCK2_H
-# include <winsock2.h>
-#endif
-#endif
-
#define GST_ASTERISKH263_HEADER_LEN 6
typedef struct _GstAsteriskH263Header
guint16 length; /* Length */
} GstAsteriskH263Header;
-#define GST_ASTERISKH263_HEADER_TIMESTAMP(buf) (((GstAsteriskH263Header *)(GST_BUFFER_DATA (buf)))->timestamp)
-#define GST_ASTERISKH263_HEADER_LENGTH(buf) (((GstAsteriskH263Header *)(GST_BUFFER_DATA (buf)))->length)
+#define GST_ASTERISKH263_HEADER_TIMESTAMP(data) (((GstAsteriskH263Header *)(data))->timestamp)
+#define GST_ASTERISKH263_HEADER_LENGTH(data) (((GstAsteriskH263Header *)(data))->length)
static GstStaticPadTemplate gst_asteriskh263_src_template =
GST_STATIC_PAD_TEMPLATE ("src",
static void gst_asteriskh263_finalize (GObject * object);
-static GstFlowReturn gst_asteriskh263_chain (GstPad * pad, GstBuffer * buffer);
+static GstFlowReturn gst_asteriskh263_chain (GstPad * pad, GstObject * parent,
+ GstBuffer * buffer);
static GstStateChangeReturn gst_asteriskh263_change_state (GstElement *
element, GstStateChange transition);
-GST_BOILERPLATE (GstAsteriskh263, gst_asteriskh263, GstElement,
- GST_TYPE_ELEMENT);
-
-static void
-gst_asteriskh263_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_static_pad_template (element_class,
- &gst_asteriskh263_src_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_asteriskh263_sink_template);
-
- gst_element_class_set_details_simple (element_class,
- "RTP Asterisk H263 depayloader", "Codec/Depayloader/Network/RTP",
- "Extracts H263 video from RTP and encodes in Asterisk H263 format",
- "Neil Stratford <neils@vipadia.com>");
-}
+#define gst_asteriskh263_parent_class parent_class
+G_DEFINE_TYPE (GstAsteriskh263, gst_asteriskh263, GST_TYPE_ELEMENT);
static void
gst_asteriskh263_class_init (GstAsteriskh263Class * klass)
gobject_class->finalize = gst_asteriskh263_finalize;
gstelement_class->change_state = gst_asteriskh263_change_state;
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_asteriskh263_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_asteriskh263_sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP Asterisk H263 depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts H263 video from RTP and encodes in Asterisk H263 format",
+ "Neil Stratford <neils@vipadia.com>");
}
static void
-gst_asteriskh263_init (GstAsteriskh263 * asteriskh263,
- GstAsteriskh263Class * klass)
+gst_asteriskh263_init (GstAsteriskh263 * asteriskh263)
{
asteriskh263->srcpad =
gst_pad_new_from_static_template (&gst_asteriskh263_src_template, "src");
}
static GstFlowReturn
-gst_asteriskh263_chain (GstPad * pad, GstBuffer * buf)
+gst_asteriskh263_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
{
GstAsteriskh263 *asteriskh263;
GstBuffer *outbuf;
GstFlowReturn ret;
- asteriskh263 = GST_ASTERISK_H263 (GST_OBJECT_PARENT (pad));
+ asteriskh263 = GST_ASTERISK_H263 (parent);
if (!gst_rtp_buffer_validate (buf))
goto bad_packet;
guint32 timestamp;
guint32 samples;
guint16 asterisk_len;
+ GstRTPBuffer rtp = { NULL };
+ GstMapInfo map;
+
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
- payload_len = gst_rtp_buffer_get_payload_len (buf);
- payload = gst_rtp_buffer_get_payload (buf);
+ payload_len = gst_rtp_buffer_get_payload_len (&rtp);
+ payload = gst_rtp_buffer_get_payload (&rtp);
- M = gst_rtp_buffer_get_marker (buf);
- timestamp = gst_rtp_buffer_get_timestamp (buf);
+ M = gst_rtp_buffer_get_marker (&rtp);
+ timestamp = gst_rtp_buffer_get_timestamp (&rtp);
+
+ gst_rtp_buffer_unmap (&rtp);
outbuf = gst_buffer_new_and_alloc (payload_len +
GST_ASTERISKH263_HEADER_LEN);
samples = timestamp - asteriskh263->lastts;
asteriskh263->lastts = timestamp;
- GST_ASTERISKH263_HEADER_TIMESTAMP (outbuf) = g_htonl (samples);
- GST_ASTERISKH263_HEADER_LENGTH (outbuf) = g_htons (asterisk_len);
+ gst_buffer_map (outbuf, &map, GST_MAP_WRITE);
+ GST_ASTERISKH263_HEADER_TIMESTAMP (map.data) = g_htonl (samples);
+ GST_ASTERISKH263_HEADER_LENGTH (map.data) = g_htons (asterisk_len);
/* copy the data into place */
- memcpy (GST_BUFFER_DATA (outbuf) + GST_ASTERISKH263_HEADER_LEN, payload,
- payload_len);
+ memcpy (map.data + GST_ASTERISKH263_HEADER_LEN, payload, payload_len);
+
+ gst_buffer_unmap (outbuf, &map);
GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
- gst_buffer_set_caps (outbuf,
- (GstCaps *) gst_pad_get_pad_template_caps (asteriskh263->srcpad));
+ if (!gst_pad_has_current_caps (asteriskh263->srcpad)) {
+ GstCaps *caps;
+
+ caps = gst_caps_copy
+ (gst_pad_get_pad_template_caps (asteriskh263->srcpad));
+ gst_pad_set_caps (asteriskh263->srcpad, caps);
+ gst_caps_unref (caps);
+ }
ret = gst_pad_push (asteriskh263->srcpad, outbuf);
#include "config.h"
#endif
-#include "gstrtpdepay.h"
#include "gstrtpac3depay.h"
#include "gstrtpac3pay.h"
#include "gstrtpbvdepay.h"
static gboolean
plugin_init (GstPlugin * plugin)
{
- if (!gst_rtp_depay_plugin_init (plugin))
- return FALSE;
-
if (!gst_rtp_ac3_depay_plugin_init (plugin))
return FALSE;
#include <stdlib.h>
#include <gst/audio/audio.h>
-#include <gst/audio/multichannel.h>
#include "gstrtpL16depay.h"
#include "gstrtpchannels.h"
GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("audio/x-raw-int, "
- "endianness = (int) BIG_ENDIAN, "
- "signed = (boolean) true, "
- "width = (int) 16, "
- "depth = (int) 16, "
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "format = (string) S16_BE, "
+ "layout = (string) interleaved, "
"rate = (int) [ 1, MAX ], " "channels = (int) [ 1, MAX ]")
);
)
);
-GST_BOILERPLATE (GstRtpL16Depay, gst_rtp_L16_depay, GstBaseRTPDepayload,
- GST_TYPE_BASE_RTP_DEPAYLOAD);
+#define gst_rtp_L16_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpL16Depay, gst_rtp_L16_depay, GST_TYPE_RTP_BASE_DEPAYLOAD);
-static gboolean gst_rtp_L16_depay_setcaps (GstBaseRTPDepayload * depayload,
+static gboolean gst_rtp_L16_depay_setcaps (GstRTPBaseDepayload * depayload,
GstCaps * caps);
-static GstBuffer *gst_rtp_L16_depay_process (GstBaseRTPDepayload * depayload,
+static GstBuffer *gst_rtp_L16_depay_process (GstRTPBaseDepayload * depayload,
GstBuffer * buf);
static void
-gst_rtp_L16_depay_base_init (gpointer klass)
+gst_rtp_L16_depay_class_init (GstRtpL16DepayClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstElementClass *gstelement_class;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_L16_depay_src_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_L16_depay_sink_template);
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
- gst_element_class_set_details_simple (element_class, "RTP audio depayloader",
- "Codec/Depayloader/Network/RTP",
- "Extracts raw audio from RTP packets",
- "Zeeshan Ali <zak147@yahoo.com>," "Wim Taymans <wim.taymans@gmail.com>");
-}
+ gstrtpbasedepayload_class->set_caps = gst_rtp_L16_depay_setcaps;
+ gstrtpbasedepayload_class->process = gst_rtp_L16_depay_process;
-static void
-gst_rtp_L16_depay_class_init (GstRtpL16DepayClass * klass)
-{
- GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_L16_depay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_L16_depay_sink_template));
- gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
-
- gstbasertpdepayload_class->set_caps = gst_rtp_L16_depay_setcaps;
- gstbasertpdepayload_class->process = gst_rtp_L16_depay_process;
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP audio depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts raw audio from RTP packets",
+ "Zeeshan Ali <zak147@yahoo.com>," "Wim Taymans <wim.taymans@gmail.com>");
GST_DEBUG_CATEGORY_INIT (rtpL16depay_debug, "rtpL16depay", 0,
"Raw Audio RTP Depayloader");
}
static void
-gst_rtp_L16_depay_init (GstRtpL16Depay * rtpL16depay,
- GstRtpL16DepayClass * klass)
+gst_rtp_L16_depay_init (GstRtpL16Depay * rtpL16depay)
{
/* needed because of GST_BOILERPLATE */
}
}
static gboolean
-gst_rtp_L16_depay_setcaps (GstBaseRTPDepayload * depayload, GstCaps * caps)
+gst_rtp_L16_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
{
GstStructure *structure;
GstRtpL16Depay *rtpL16depay;
gboolean res;
const gchar *channel_order;
const GstRTPChannelOrder *order;
+ GstAudioInfo *info;
rtpL16depay = GST_RTP_L16_DEPAY (depayload);
}
depayload->clock_rate = clock_rate;
- rtpL16depay->rate = clock_rate;
- rtpL16depay->channels = channels;
- srccaps = gst_caps_new_simple ("audio/x-raw-int",
- "endianness", G_TYPE_INT, G_BIG_ENDIAN,
- "signed", G_TYPE_BOOLEAN, TRUE,
- "width", G_TYPE_INT, 16,
- "depth", G_TYPE_INT, 16,
- "rate", G_TYPE_INT, clock_rate, "channels", G_TYPE_INT, channels, NULL);
+ info = &rtpL16depay->info;
+ gst_audio_info_init (info);
+ info->finfo = gst_audio_format_get_info (GST_AUDIO_FORMAT_S16BE);
+ info->rate = clock_rate;
+ info->channels = channels;
+ info->bpf = (info->finfo->width / 8) * channels;
/* add channel positions */
channel_order = gst_structure_get_string (structure, "channel-order");
order = gst_rtp_channels_get_by_order (channels, channel_order);
+ rtpL16depay->order = order;
if (order) {
- gst_audio_set_channel_positions (gst_caps_get_structure (srccaps, 0),
- order->pos);
+ memcpy (info->position, order->pos,
+ sizeof (GstAudioChannelPosition) * channels);
+ gst_audio_channel_positions_to_valid_order (info->position, info->channels);
} else {
- GstAudioChannelPosition *pos;
-
GST_ELEMENT_WARNING (rtpL16depay, STREAM, DECODE,
(NULL), ("Unknown channel order '%s' for %d channels",
GST_STR_NULL (channel_order), channels));
/* create default NONE layout */
- pos = gst_rtp_channels_create_default (channels);
- gst_audio_set_channel_positions (gst_caps_get_structure (srccaps, 0), pos);
- g_free (pos);
+ gst_rtp_channels_create_default (channels, info->position);
}
+ srccaps = gst_audio_info_to_caps (info);
res = gst_pad_set_caps (depayload->srcpad, srccaps);
gst_caps_unref (srccaps);
}
static GstBuffer *
-gst_rtp_L16_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
+gst_rtp_L16_depay_process (GstRTPBaseDepayload * depayload, GstBuffer * buf)
{
GstRtpL16Depay *rtpL16depay;
GstBuffer *outbuf;
gint payload_len;
gboolean marker;
+ GstRTPBuffer rtp = { NULL };
rtpL16depay = GST_RTP_L16_DEPAY (depayload);
- payload_len = gst_rtp_buffer_get_payload_len (buf);
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
+ payload_len = gst_rtp_buffer_get_payload_len (&rtp);
if (payload_len <= 0)
goto empty_packet;
GST_DEBUG_OBJECT (rtpL16depay, "got payload of %d bytes", payload_len);
- outbuf = gst_rtp_buffer_get_payload_buffer (buf);
- marker = gst_rtp_buffer_get_marker (buf);
+ outbuf = gst_rtp_buffer_get_payload_buffer (&rtp);
+ marker = gst_rtp_buffer_get_marker (&rtp);
if (marker) {
/* mark talk spurt with DISCONT */
GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
}
+ outbuf = gst_buffer_make_writable (outbuf);
+ if (rtpL16depay->order &&
+ !gst_audio_buffer_reorder_channels (outbuf,
+ rtpL16depay->info.finfo->format, rtpL16depay->info.channels,
+ rtpL16depay->info.position, rtpL16depay->order->pos)) {
+ goto reorder_failed;
+ }
+
+ gst_rtp_buffer_unmap (&rtp);
+
return outbuf;
/* ERRORS */
{
GST_ELEMENT_WARNING (rtpL16depay, STREAM, DECODE,
("Empty Payload."), (NULL));
+ gst_rtp_buffer_unmap (&rtp);
+ return NULL;
+ }
+reorder_failed:
+ {
+ GST_ELEMENT_ERROR (rtpL16depay, STREAM, DECODE,
+ ("Channel reordering failed."), (NULL));
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
}
#define __GST_RTP_L16_DEPAY_H__
#include <gst/gst.h>
-#include <gst/rtp/gstbasertpdepayload.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
+#include <gst/audio/audio.h>
+
+#include "gstrtpchannels.h"
G_BEGIN_DECLS
/* Definition of structure storing data for this element. */
struct _GstRtpL16Depay
{
- GstBaseRTPDepayload depayload;
+ GstRTPBaseDepayload depayload;
- guint rate;
- guint channels;
+ GstAudioInfo info;
+ const GstRTPChannelOrder *order;
};
/* Standard definition defining a class for this element. */
struct _GstRtpL16DepayClass
{
- GstBaseRTPDepayloadClass parent_class;
+ GstRTPBaseDepayloadClass parent_class;
};
GType gst_rtp_L16_depay_get_type (void);
#include <string.h>
#include <gst/audio/audio.h>
-#include <gst/audio/multichannel.h>
#include <gst/rtp/gstrtpbuffer.h>
#include "gstrtpL16pay.h"
GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("audio/x-raw-int, "
- "endianness = (int) BIG_ENDIAN, "
- "signed = (boolean) true, "
- "width = (int) 16, "
- "depth = (int) 16, "
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "format = (string) S16_BE, "
+ "layout = (string) interleaved, "
"rate = (int) [ 1, MAX ], " "channels = (int) [ 1, MAX ]")
);
"clock-rate = (int) 44100")
);
-static gboolean gst_rtp_L16_pay_setcaps (GstBaseRTPPayload * basepayload,
+static gboolean gst_rtp_L16_pay_setcaps (GstRTPBasePayload * basepayload,
GstCaps * caps);
-static GstCaps *gst_rtp_L16_pay_getcaps (GstBaseRTPPayload * rtppayload,
- GstPad * pad);
+static GstCaps *gst_rtp_L16_pay_getcaps (GstRTPBasePayload * rtppayload,
+ GstPad * pad, GstCaps * filter);
+static GstFlowReturn
+gst_rtp_L16_pay_handle_buffer (GstRTPBasePayload * basepayload,
+ GstBuffer * buffer);
-GST_BOILERPLATE (GstRtpL16Pay, gst_rtp_L16_pay, GstBaseRTPAudioPayload,
- GST_TYPE_BASE_RTP_AUDIO_PAYLOAD);
+#define gst_rtp_L16_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpL16Pay, gst_rtp_L16_pay, GST_TYPE_RTP_BASE_AUDIO_PAYLOAD);
static void
-gst_rtp_L16_pay_base_init (gpointer klass)
+gst_rtp_L16_pay_class_init (GstRtpL16PayClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstElementClass *gstelement_class;
+ GstRTPBasePayloadClass *gstrtpbasepayload_class;
+
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasepayload_class = (GstRTPBasePayloadClass *) klass;
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_L16_pay_src_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_L16_pay_sink_template);
+ gstrtpbasepayload_class->set_caps = gst_rtp_L16_pay_setcaps;
+ gstrtpbasepayload_class->get_caps = gst_rtp_L16_pay_getcaps;
+ gstrtpbasepayload_class->handle_buffer = gst_rtp_L16_pay_handle_buffer;
- gst_element_class_set_details_simple (element_class, "RTP audio payloader",
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_L16_pay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_L16_pay_sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class, "RTP audio payloader",
"Codec/Payloader/Network/RTP",
"Payload-encode Raw audio into RTP packets (RFC 3551)",
"Wim Taymans <wim.taymans@gmail.com>");
-}
-
-static void
-gst_rtp_L16_pay_class_init (GstRtpL16PayClass * klass)
-{
- GstBaseRTPPayloadClass *gstbasertppayload_class;
-
- gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
-
- gstbasertppayload_class->set_caps = gst_rtp_L16_pay_setcaps;
- gstbasertppayload_class->get_caps = gst_rtp_L16_pay_getcaps;
GST_DEBUG_CATEGORY_INIT (rtpL16pay_debug, "rtpL16pay", 0,
"L16 RTP Payloader");
}
static void
-gst_rtp_L16_pay_init (GstRtpL16Pay * rtpL16pay, GstRtpL16PayClass * klass)
+gst_rtp_L16_pay_init (GstRtpL16Pay * rtpL16pay)
{
- GstBaseRTPAudioPayload *basertpaudiopayload;
+ GstRTPBaseAudioPayload *rtpbaseaudiopayload;
- basertpaudiopayload = GST_BASE_RTP_AUDIO_PAYLOAD (rtpL16pay);
+ rtpbaseaudiopayload = GST_RTP_BASE_AUDIO_PAYLOAD (rtpL16pay);
- /* tell basertpaudiopayload that this is a sample based codec */
- gst_base_rtp_audio_payload_set_sample_based (basertpaudiopayload);
+ /* tell rtpbaseaudiopayload that this is a sample based codec */
+ gst_rtp_base_audio_payload_set_sample_based (rtpbaseaudiopayload);
}
static gboolean
-gst_rtp_L16_pay_setcaps (GstBaseRTPPayload * basepayload, GstCaps * caps)
+gst_rtp_L16_pay_setcaps (GstRTPBasePayload * basepayload, GstCaps * caps)
{
GstRtpL16Pay *rtpL16pay;
- GstStructure *structure;
- gint channels, rate;
gboolean res;
gchar *params;
- GstAudioChannelPosition *pos;
+ GstAudioInfo *info;
const GstRTPChannelOrder *order;
- GstBaseRTPAudioPayload *basertpaudiopayload;
+ GstRTPBaseAudioPayload *rtpbaseaudiopayload;
- basertpaudiopayload = GST_BASE_RTP_AUDIO_PAYLOAD (basepayload);
+ rtpbaseaudiopayload = GST_RTP_BASE_AUDIO_PAYLOAD (basepayload);
rtpL16pay = GST_RTP_L16_PAY (basepayload);
- structure = gst_caps_get_structure (caps, 0);
-
- /* first parse input caps */
- if (!gst_structure_get_int (structure, "rate", &rate))
- goto no_rate;
-
- if (!gst_structure_get_int (structure, "channels", &channels))
- goto no_channels;
+ info = &rtpL16pay->info;
+ gst_audio_info_init (info);
+ if (!gst_audio_info_from_caps (info, caps))
+ goto invalid_caps;
- /* get the channel order */
- pos = gst_audio_get_channel_positions (structure);
- if (pos)
- order = gst_rtp_channels_get_by_pos (channels, pos);
- else
- order = NULL;
+ order = gst_rtp_channels_get_by_pos (info->channels, info->position);
+ rtpL16pay->order = order;
- gst_basertppayload_set_options (basepayload, "audio", TRUE, "L16", rate);
- params = g_strdup_printf ("%d", channels);
+ gst_rtp_base_payload_set_options (basepayload, "audio", TRUE, "L16",
+ info->rate);
+ params = g_strdup_printf ("%d", info->channels);
- if (!order && channels > 2) {
+ if (!order && info->channels > 2) {
GST_ELEMENT_WARNING (rtpL16pay, STREAM, DECODE,
- (NULL), ("Unknown channel order for %d channels", channels));
+ (NULL), ("Unknown channel order for %d channels", info->channels));
}
if (order && order->name) {
- res = gst_basertppayload_set_outcaps (basepayload,
+ res = gst_rtp_base_payload_set_outcaps (basepayload,
"encoding-params", G_TYPE_STRING, params, "channels", G_TYPE_INT,
- channels, "channel-order", G_TYPE_STRING, order->name, NULL);
+ info->channels, "channel-order", G_TYPE_STRING, order->name, NULL);
} else {
- res = gst_basertppayload_set_outcaps (basepayload,
+ res = gst_rtp_base_payload_set_outcaps (basepayload,
"encoding-params", G_TYPE_STRING, params, "channels", G_TYPE_INT,
- channels, NULL);
+ info->channels, NULL);
}
g_free (params);
- g_free (pos);
-
- rtpL16pay->rate = rate;
- rtpL16pay->channels = channels;
/* octet-per-sample is 2 * channels for L16 */
- gst_base_rtp_audio_payload_set_sample_options (basertpaudiopayload,
- 2 * rtpL16pay->channels);
+ gst_rtp_base_audio_payload_set_sample_options (rtpbaseaudiopayload,
+ 2 * info->channels);
return res;
/* ERRORS */
-no_rate:
+invalid_caps:
{
- GST_DEBUG_OBJECT (rtpL16pay, "no rate given");
- return FALSE;
- }
-no_channels:
- {
- GST_DEBUG_OBJECT (rtpL16pay, "no channels given");
+ GST_DEBUG_OBJECT (rtpL16pay, "invalid caps");
return FALSE;
}
}
static GstCaps *
-gst_rtp_L16_pay_getcaps (GstBaseRTPPayload * rtppayload, GstPad * pad)
+gst_rtp_L16_pay_getcaps (GstRTPBasePayload * rtppayload, GstPad * pad,
+ GstCaps * filter)
{
GstCaps *otherpadcaps;
GstCaps *caps;
}
gst_caps_unref (otherpadcaps);
}
+
+ if (filter) {
+ GstCaps *tcaps = caps;
+
+ caps = gst_caps_intersect_full (filter, tcaps, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (tcaps);
+ }
+
return caps;
}
+static GstFlowReturn
+gst_rtp_L16_pay_handle_buffer (GstRTPBasePayload * basepayload,
+ GstBuffer * buffer)
+{
+ GstRtpL16Pay *rtpL16pay;
+
+ rtpL16pay = GST_RTP_L16_PAY (basepayload);
+ buffer = gst_buffer_make_writable (buffer);
+
+ if (rtpL16pay->order &&
+ !gst_audio_buffer_reorder_channels (buffer, rtpL16pay->info.finfo->format,
+ rtpL16pay->info.channels, rtpL16pay->info.position,
+ rtpL16pay->order->pos)) {
+ return GST_FLOW_ERROR;
+ }
+
+ return GST_RTP_BASE_PAYLOAD_CLASS (parent_class)->handle_buffer (basepayload,
+ buffer);
+}
+
gboolean
gst_rtp_L16_pay_plugin_init (GstPlugin * plugin)
{
#define __GST_RTP_L16_PAY_H__
#include <gst/gst.h>
-#include <gst/rtp/gstbasertpaudiopayload.h>
+#include <gst/rtp/gstrtpbaseaudiopayload.h>
+
+#include "gstrtpchannels.h"
G_BEGIN_DECLS
struct _GstRtpL16Pay
{
- GstBaseRTPAudioPayload payload;
+ GstRTPBaseAudioPayload payload;
- gint rate;
- gint channels;
+ GstAudioInfo info;
+ const GstRTPChannelOrder *order;
};
struct _GstRtpL16PayClass
{
- GstBaseRTPAudioPayloadClass parent_class;
+ GstRTPBaseAudioPayloadClass parent_class;
};
GType gst_rtp_L16_pay_get_type (void);
"encoding-name = (string) \"AC3\"")
);
-GST_BOILERPLATE (GstRtpAC3Depay, gst_rtp_ac3_depay, GstBaseRTPDepayload,
- GST_TYPE_BASE_RTP_DEPAYLOAD);
+G_DEFINE_TYPE (GstRtpAC3Depay, gst_rtp_ac3_depay, GST_TYPE_RTP_BASE_DEPAYLOAD);
-static gboolean gst_rtp_ac3_depay_setcaps (GstBaseRTPDepayload * depayload,
+static gboolean gst_rtp_ac3_depay_setcaps (GstRTPBaseDepayload * depayload,
GstCaps * caps);
-static GstBuffer *gst_rtp_ac3_depay_process (GstBaseRTPDepayload * depayload,
+static GstBuffer *gst_rtp_ac3_depay_process (GstRTPBaseDepayload * depayload,
GstBuffer * buf);
static void
-gst_rtp_ac3_depay_base_init (gpointer klass)
+gst_rtp_ac3_depay_class_init (GstRtpAC3DepayClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstElementClass *gstelement_class;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
+
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_ac3_depay_src_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_ac3_depay_sink_template);
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_ac3_depay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_ac3_depay_sink_template));
- gst_element_class_set_details_simple (element_class, "RTP AC3 depayloader",
+ gst_element_class_set_details_simple (gstelement_class, "RTP AC3 depayloader",
"Codec/Depayloader/Network/RTP",
"Extracts AC3 audio from RTP packets (RFC 4184)",
"Wim Taymans <wim.taymans@gmail.com>");
-}
-
-static void
-gst_rtp_ac3_depay_class_init (GstRtpAC3DepayClass * klass)
-{
- GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
-
- gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
- gstbasertpdepayload_class->set_caps = gst_rtp_ac3_depay_setcaps;
- gstbasertpdepayload_class->process = gst_rtp_ac3_depay_process;
+ gstrtpbasedepayload_class->set_caps = gst_rtp_ac3_depay_setcaps;
+ gstrtpbasedepayload_class->process = gst_rtp_ac3_depay_process;
GST_DEBUG_CATEGORY_INIT (rtpac3depay_debug, "rtpac3depay", 0,
"AC3 Audio RTP Depayloader");
}
static void
-gst_rtp_ac3_depay_init (GstRtpAC3Depay * rtpac3depay,
- GstRtpAC3DepayClass * klass)
+gst_rtp_ac3_depay_init (GstRtpAC3Depay * rtpac3depay)
{
- /* needed because of GST_BOILERPLATE */
+ /* needed because of G_DEFINE_TYPE */
}
static gboolean
-gst_rtp_ac3_depay_setcaps (GstBaseRTPDepayload * depayload, GstCaps * caps)
+gst_rtp_ac3_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
{
GstStructure *structure;
gint clock_rate;
clock_rate = 90000; /* default */
depayload->clock_rate = clock_rate;
- srccaps = gst_caps_new_simple ("audio/ac3", NULL);
+ srccaps = gst_caps_new_empty_simple ("audio/ac3");
res = gst_pad_set_caps (depayload->srcpad, srccaps);
gst_caps_unref (srccaps);
};
static GstBuffer *
-gst_rtp_ac3_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
+gst_rtp_ac3_depay_process (GstRTPBaseDepayload * depayload, GstBuffer * buf)
{
GstRtpAC3Depay *rtpac3depay;
GstBuffer *outbuf;
+ GstRTPBuffer rtp = { NULL, };
+ guint8 *payload;
+ guint16 FT, NF;
rtpac3depay = GST_RTP_AC3_DEPAY (depayload);
- {
- guint8 *payload;
- guint16 FT, NF;
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
- if (gst_rtp_buffer_get_payload_len (buf) < 2)
- goto empty_packet;
+ if (gst_rtp_buffer_get_payload_len (&rtp) < 2)
+ goto empty_packet;
- payload = gst_rtp_buffer_get_payload (buf);
+ payload = gst_rtp_buffer_get_payload (&rtp);
- /* strip off header
- *
- * 0 1
- * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5
- * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
- * | MBZ | FT| NF |
- * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
- */
- FT = payload[0] & 0x3;
- NF = payload[1];
+ /* strip off header
+ *
+ * 0 1
+ * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | MBZ | FT| NF |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ */
+ FT = payload[0] & 0x3;
+ NF = payload[1];
- GST_DEBUG_OBJECT (rtpac3depay, "FT: %d, NF: %d", FT, NF);
+ GST_DEBUG_OBJECT (rtpac3depay, "FT: %d, NF: %d", FT, NF);
- /* We don't bother with fragmented packets yet */
- outbuf = gst_rtp_buffer_get_payload_subbuffer (buf, 2, -1);
+ /* We don't bother with fragmented packets yet */
+ outbuf = gst_rtp_buffer_get_payload_subbuffer (&rtp, 2, -1);
- if (outbuf)
- GST_DEBUG_OBJECT (rtpac3depay, "pushing buffer of size %d",
- GST_BUFFER_SIZE (outbuf));
+ gst_rtp_buffer_unmap (&rtp);
- return outbuf;
- }
+ if (outbuf)
+ GST_DEBUG_OBJECT (rtpac3depay, "pushing buffer of size %" G_GSIZE_FORMAT,
+ gst_buffer_get_size (outbuf));
- return NULL;
+ return outbuf;
/* ERRORS */
empty_packet:
{
GST_ELEMENT_WARNING (rtpac3depay, STREAM, DECODE,
("Empty Payload."), (NULL));
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
}
#define __GST_RTP_AC3_DEPAY_H__
#include <gst/gst.h>
-#include <gst/rtp/gstbasertpdepayload.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
G_BEGIN_DECLS
struct _GstRtpAC3Depay
{
- GstBaseRTPDepayload depayload;
+ GstRTPBaseDepayload depayload;
};
struct _GstRtpAC3DepayClass
{
- GstBaseRTPDepayloadClass parent_class;
+ GstRTPBaseDepayloadClass parent_class;
};
GType gst_rtp_ac3_depay_get_type (void);
static GstStateChangeReturn gst_rtp_ac3_pay_change_state (GstElement * element,
GstStateChange transition);
-static gboolean gst_rtp_ac3_pay_setcaps (GstBaseRTPPayload * payload,
+static gboolean gst_rtp_ac3_pay_setcaps (GstRTPBasePayload * payload,
GstCaps * caps);
-static gboolean gst_rtp_ac3_pay_handle_event (GstPad * pad, GstEvent * event);
+static gboolean gst_rtp_ac3_pay_sink_event (GstRTPBasePayload * payload,
+ GstEvent * event);
static GstFlowReturn gst_rtp_ac3_pay_flush (GstRtpAC3Pay * rtpac3pay);
-static GstFlowReturn gst_rtp_ac3_pay_handle_buffer (GstBaseRTPPayload * payload,
+static GstFlowReturn gst_rtp_ac3_pay_handle_buffer (GstRTPBasePayload * payload,
GstBuffer * buffer);
-GST_BOILERPLATE (GstRtpAC3Pay, gst_rtp_ac3_pay, GstBaseRTPPayload,
- GST_TYPE_BASE_RTP_PAYLOAD)
-
- static void gst_rtp_ac3_pay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_ac3_pay_src_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_ac3_pay_sink_template);
-
- gst_element_class_set_details_simple (element_class,
- "RTP AC3 audio payloader", "Codec/Payloader/Network/RTP",
- "Payload AC3 audio as RTP packets (RFC 4184)",
- "Wim Taymans <wim.taymans@gmail.com>");
-}
+#define gst_rtp_ac3_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpAC3Pay, gst_rtp_ac3_pay, GST_TYPE_RTP_BASE_PAYLOAD);
static void
gst_rtp_ac3_pay_class_init (GstRtpAC3PayClass * klass)
{
GObjectClass *gobject_class;
GstElementClass *gstelement_class;
- GstBaseRTPPayloadClass *gstbasertppayload_class;
+ GstRTPBasePayloadClass *gstrtpbasepayload_class;
+
+ GST_DEBUG_CATEGORY_INIT (rtpac3pay_debug, "rtpac3pay", 0,
+ "AC3 Audio RTP Depayloader");
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
- gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
+ gstrtpbasepayload_class = (GstRTPBasePayloadClass *) klass;
gobject_class->finalize = gst_rtp_ac3_pay_finalize;
gstelement_class->change_state = gst_rtp_ac3_pay_change_state;
- gstbasertppayload_class->set_caps = gst_rtp_ac3_pay_setcaps;
- gstbasertppayload_class->handle_event = gst_rtp_ac3_pay_handle_event;
- gstbasertppayload_class->handle_buffer = gst_rtp_ac3_pay_handle_buffer;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_ac3_pay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_ac3_pay_sink_template));
- GST_DEBUG_CATEGORY_INIT (rtpac3pay_debug, "rtpac3pay", 0,
- "AC3 Audio RTP Depayloader");
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP AC3 audio payloader", "Codec/Payloader/Network/RTP",
+ "Payload AC3 audio as RTP packets (RFC 4184)",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
+ gstrtpbasepayload_class->set_caps = gst_rtp_ac3_pay_setcaps;
+ gstrtpbasepayload_class->sink_event = gst_rtp_ac3_pay_sink_event;
+ gstrtpbasepayload_class->handle_buffer = gst_rtp_ac3_pay_handle_buffer;
}
static void
-gst_rtp_ac3_pay_init (GstRtpAC3Pay * rtpac3pay, GstRtpAC3PayClass * klass)
+gst_rtp_ac3_pay_init (GstRtpAC3Pay * rtpac3pay)
{
rtpac3pay->adapter = gst_adapter_new ();
}
}
static gboolean
-gst_rtp_ac3_pay_setcaps (GstBaseRTPPayload * payload, GstCaps * caps)
+gst_rtp_ac3_pay_setcaps (GstRTPBasePayload * payload, GstCaps * caps)
{
gboolean res;
gint rate;
if (!gst_structure_get_int (structure, "rate", &rate))
rate = 90000; /* default */
- gst_basertppayload_set_options (payload, "audio", TRUE, "AC3", rate);
- res = gst_basertppayload_set_outcaps (payload, NULL);
+ gst_rtp_base_payload_set_options (payload, "audio", TRUE, "AC3", rate);
+ res = gst_rtp_base_payload_set_outcaps (payload, NULL);
return res;
}
static gboolean
-gst_rtp_ac3_pay_handle_event (GstPad * pad, GstEvent * event)
+gst_rtp_ac3_pay_sink_event (GstRTPBasePayload * payload, GstEvent * event)
{
+ gboolean res;
GstRtpAC3Pay *rtpac3pay;
- rtpac3pay = GST_RTP_AC3_PAY (gst_pad_get_parent (pad));
+ rtpac3pay = GST_RTP_AC3_PAY (payload);
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_EOS:
break;
}
- gst_object_unref (rtpac3pay);
+ res = GST_RTP_BASE_PAYLOAD_CLASS (parent_class)->sink_event (payload, event);
- /* FALSE to let the parent handle the event as well */
- return FALSE;
+ return res;
}
struct frmsize_s
/* number of frames */
NF = rtpac3pay->NF;
- mtu = GST_BASE_RTP_PAYLOAD_MTU (rtpac3pay);
+ mtu = GST_RTP_BASE_PAYLOAD_MTU (rtpac3pay);
GST_LOG_OBJECT (rtpac3pay, "flushing %u bytes", avail);
guint8 *payload;
guint payload_len;
guint packet_len;
+ GstRTPBuffer rtp = { NULL, };
/* this will be the total length of the packet */
packet_len = gst_rtp_buffer_calc_packet_len (2 + avail, 0, 0);
* 3: other fragment
* NF: amount of frames if FT = 0, else number of fragments.
*/
+ gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
GST_LOG_OBJECT (rtpac3pay, "FT %u, NF %u", FT, NF);
- payload = gst_rtp_buffer_get_payload (outbuf);
+ payload = gst_rtp_buffer_get_payload (&rtp);
payload[0] = (FT & 3);
payload[1] = NF;
payload_len -= 2;
avail -= payload_len;
if (avail == 0)
- gst_rtp_buffer_set_marker (outbuf, TRUE);
+ gst_rtp_buffer_set_marker (&rtp, TRUE);
+ gst_rtp_buffer_unmap (&rtp);
GST_BUFFER_TIMESTAMP (outbuf) = rtpac3pay->first_ts;
GST_BUFFER_DURATION (outbuf) = rtpac3pay->duration;
- ret = gst_basertppayload_push (GST_BASE_RTP_PAYLOAD (rtpac3pay), outbuf);
+ ret = gst_rtp_base_payload_push (GST_RTP_BASE_PAYLOAD (rtpac3pay), outbuf);
}
return ret;
}
static GstFlowReturn
-gst_rtp_ac3_pay_handle_buffer (GstBaseRTPPayload * basepayload,
+gst_rtp_ac3_pay_handle_buffer (GstRTPBasePayload * basepayload,
GstBuffer * buffer)
{
GstRtpAC3Pay *rtpac3pay;
GstFlowReturn ret;
- guint size, avail, left, NF;
- guint8 *data, *p;
+ gsize avail, left, NF;
+ GstMapInfo map;
+ guint8 *p;
guint packet_len;
GstClockTime duration, timestamp;
rtpac3pay = GST_RTP_AC3_PAY (basepayload);
- size = GST_BUFFER_SIZE (buffer);
- data = GST_BUFFER_DATA (buffer);
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
duration = GST_BUFFER_DURATION (buffer);
timestamp = GST_BUFFER_TIMESTAMP (buffer);
/* count the amount of incomming packets */
NF = 0;
- left = size;
- p = data;
+ left = map.size;
+ p = map.data;
while (TRUE) {
guint bsid, fscod, frmsizecod, frame_size;
break;
NF++;
- GST_DEBUG_OBJECT (rtpac3pay, "found frame %u of size %u", NF, frame_size);
+ GST_DEBUG_OBJECT (rtpac3pay, "found frame %" G_GSIZE_FORMAT " of size %u",
+ NF, frame_size);
p += frame_size;
left -= frame_size;
}
+ gst_buffer_unmap (buffer, &map);
if (NF == 0)
goto no_frames;
/* get packet length of previous data and this new data,
* payload length includes a 4 byte header */
- packet_len = gst_rtp_buffer_calc_packet_len (2 + avail + size, 0, 0);
+ packet_len = gst_rtp_buffer_calc_packet_len (2 + avail + map.size, 0, 0);
/* if this buffer is going to overflow the packet, flush what we
* have. */
- if (gst_basertppayload_is_filled (basepayload,
+ if (gst_rtp_base_payload_is_filled (basepayload,
packet_len, rtpac3pay->duration + duration)) {
ret = gst_rtp_ac3_pay_flush (rtpac3pay);
avail = 0;
#define __GST_RTP_AC3_PAY_H__
#include <gst/gst.h>
-#include <gst/rtp/gstbasertppayload.h>
+#include <gst/rtp/gstrtpbasepayload.h>
#include <gst/base/gstadapter.h>
G_BEGIN_DECLS
struct _GstRtpAC3Pay
{
- GstBaseRTPPayload payload;
+ GstRTPBasePayload payload;
GstAdapter *adapter;
GstClockTime first_ts;
struct _GstRtpAC3PayClass
{
- GstBaseRTPPayloadClass parent_class;
+ GstRTPBasePayloadClass parent_class;
};
GType gst_rtp_ac3_pay_get_type (void);
"audio/AMR-WB, " "channels = (int) 1," "rate = (int) 16000")
);
-static gboolean gst_rtp_amr_depay_setcaps (GstBaseRTPDepayload * depayload,
+static gboolean gst_rtp_amr_depay_setcaps (GstRTPBaseDepayload * depayload,
GstCaps * caps);
-static GstBuffer *gst_rtp_amr_depay_process (GstBaseRTPDepayload * depayload,
+static GstBuffer *gst_rtp_amr_depay_process (GstRTPBaseDepayload * depayload,
GstBuffer * buf);
-GST_BOILERPLATE (GstRtpAMRDepay, gst_rtp_amr_depay, GstBaseRTPDepayload,
- GST_TYPE_BASE_RTP_DEPAYLOAD);
+#define gst_rtp_amr_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpAMRDepay, gst_rtp_amr_depay, GST_TYPE_RTP_BASE_DEPAYLOAD);
static void
-gst_rtp_amr_depay_base_init (gpointer klass)
+gst_rtp_amr_depay_class_init (GstRtpAMRDepayClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstElementClass *gstelement_class;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
+
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_amr_depay_src_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_amr_depay_sink_template);
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_amr_depay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_amr_depay_sink_template));
- gst_element_class_set_details_simple (element_class, "RTP AMR depayloader",
+ gst_element_class_set_details_simple (gstelement_class, "RTP AMR depayloader",
"Codec/Depayloader/Network/RTP",
"Extracts AMR or AMR-WB audio from RTP packets (RFC 3267)",
"Wim Taymans <wim.taymans@gmail.com>");
-}
-static void
-gst_rtp_amr_depay_class_init (GstRtpAMRDepayClass * klass)
-{
- GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
-
- gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
-
- gstbasertpdepayload_class->process = gst_rtp_amr_depay_process;
- gstbasertpdepayload_class->set_caps = gst_rtp_amr_depay_setcaps;
+ gstrtpbasedepayload_class->process = gst_rtp_amr_depay_process;
+ gstrtpbasedepayload_class->set_caps = gst_rtp_amr_depay_setcaps;
GST_DEBUG_CATEGORY_INIT (rtpamrdepay_debug, "rtpamrdepay", 0,
"AMR/AMR-WB RTP Depayloader");
}
static void
-gst_rtp_amr_depay_init (GstRtpAMRDepay * rtpamrdepay,
- GstRtpAMRDepayClass * klass)
+gst_rtp_amr_depay_init (GstRtpAMRDepay * rtpamrdepay)
{
- GstBaseRTPDepayload *depayload;
+ GstRTPBaseDepayload *depayload;
- depayload = GST_BASE_RTP_DEPAYLOAD (rtpamrdepay);
+ depayload = GST_RTP_BASE_DEPAYLOAD (rtpamrdepay);
- gst_pad_use_fixed_caps (GST_BASE_RTP_DEPAYLOAD_SRCPAD (depayload));
+ gst_pad_use_fixed_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload));
}
static gboolean
-gst_rtp_amr_depay_setcaps (GstBaseRTPDepayload * depayload, GstCaps * caps)
+gst_rtp_amr_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
{
GstStructure *structure;
GstCaps *srccaps;
srccaps = gst_caps_new_simple (type,
"channels", G_TYPE_INT, rtpamrdepay->channels,
"rate", G_TYPE_INT, clock_rate, NULL);
- res = gst_pad_set_caps (GST_BASE_RTP_DEPAYLOAD_SRCPAD (depayload), srccaps);
+ res = gst_pad_set_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload), srccaps);
gst_caps_unref (srccaps);
return res;
};
static GstBuffer *
-gst_rtp_amr_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
+gst_rtp_amr_depay_process (GstRTPBaseDepayload * depayload, GstBuffer * buf)
{
GstRtpAMRDepay *rtpamrdepay;
const gint *frame_size;
GstBuffer *outbuf = NULL;
gint payload_len;
+ GstRTPBuffer rtp = { NULL };
+ GstMapInfo map;
rtpamrdepay = GST_RTP_AMR_DEPAY (depayload);
else
frame_size = wb_frame_size;
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
+
/* when we get here, 1 channel, 8000/16000 Hz, octet aligned, no CRC,
* no robust sorting, no interleaving data is to be depayloaded */
{
gint amr_len;
gint ILL, ILP;
- payload_len = gst_rtp_buffer_get_payload_len (buf);
+ payload_len = gst_rtp_buffer_get_payload_len (&rtp);
/* need at least 2 bytes for the header */
if (payload_len < 2)
goto too_small;
- payload = gst_rtp_buffer_get_payload (buf);
+ payload = gst_rtp_buffer_get_payload (&rtp);
/* depay CMR. The CMR is used by the sender to request
* a new encoding mode.
outbuf = gst_buffer_new_and_alloc (payload_len);
/* point to destination */
- p = GST_BUFFER_DATA (outbuf);
+ gst_buffer_map (outbuf, &map, GST_MAP_WRITE);
+
/* point to first data packet */
+ p = map.data;
dp = payload + num_packets;
if (rtpamrdepay->crc) {
/* skip CRC if present */
dp += fr_size;
}
}
+ gst_buffer_unmap (outbuf, &map);
+
/* we can set the duration because each packet is 20 milliseconds */
GST_BUFFER_DURATION (outbuf) = num_packets * 20 * GST_MSECOND;
- if (gst_rtp_buffer_get_marker (buf)) {
+ if (gst_rtp_buffer_get_marker (&rtp)) {
/* marker bit marks a discont buffer after a talkspurt. */
GST_DEBUG_OBJECT (depayload, "marker bit was set");
GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
}
- GST_DEBUG_OBJECT (depayload, "pushing buffer of size %d",
- GST_BUFFER_SIZE (outbuf));
+ GST_DEBUG_OBJECT (depayload, "pushing buffer of size %" G_GSIZE_FORMAT,
+ gst_buffer_get_size (outbuf));
}
return outbuf;
#define __GST_RTP_AMR_DEPAY_H__
#include <gst/gst.h>
-#include <gst/rtp/gstbasertpdepayload.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
G_BEGIN_DECLS
struct _GstRtpAMRDepay
{
- GstBaseRTPDepayload depayload;
+ GstRTPBaseDepayload depayload;
GstRtpAMRDepayMode mode;
struct _GstRtpAMRDepayClass
{
- GstBaseRTPDepayloadClass parent_class;
+ GstRTPBaseDepayloadClass parent_class;
};
GType gst_rtp_amr_depay_get_type (void);
"maxptime = (int) [ 20, MAX ], " "ptime = (int) [ 20, MAX ]")
);
-static gboolean gst_rtp_amr_pay_setcaps (GstBaseRTPPayload * basepayload,
+static gboolean gst_rtp_amr_pay_setcaps (GstRTPBasePayload * basepayload,
GstCaps * caps);
-static GstFlowReturn gst_rtp_amr_pay_handle_buffer (GstBaseRTPPayload * pad,
+static GstFlowReturn gst_rtp_amr_pay_handle_buffer (GstRTPBasePayload * pad,
GstBuffer * buffer);
static GstStateChangeReturn
gst_rtp_amr_pay_change_state (GstElement * element, GstStateChange transition);
-GST_BOILERPLATE (GstRtpAMRPay, gst_rtp_amr_pay, GstBaseRTPPayload,
- GST_TYPE_BASE_RTP_PAYLOAD);
-
-static void
-gst_rtp_amr_pay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_amr_pay_src_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_amr_pay_sink_template);
-
- gst_element_class_set_details_simple (element_class, "RTP AMR payloader",
- "Codec/Payloader/Network/RTP",
- "Payload-encode AMR or AMR-WB audio into RTP packets (RFC 3267)",
- "Wim Taymans <wim.taymans@gmail.com>");
-}
+#define gst_rtp_amr_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpAMRPay, gst_rtp_amr_pay, GST_TYPE_RTP_BASE_PAYLOAD);
static void
gst_rtp_amr_pay_class_init (GstRtpAMRPayClass * klass)
{
- GstBaseRTPPayloadClass *gstbasertppayload_class;
GstElementClass *gstelement_class;
+ GstRTPBasePayloadClass *gstrtpbasepayload_class;
gstelement_class = (GstElementClass *) klass;
+ gstrtpbasepayload_class = (GstRTPBasePayloadClass *) klass;
+
gstelement_class->change_state = gst_rtp_amr_pay_change_state;
- gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_amr_pay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_amr_pay_sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class, "RTP AMR payloader",
+ "Codec/Payloader/Network/RTP",
+ "Payload-encode AMR or AMR-WB audio into RTP packets (RFC 3267)",
+ "Wim Taymans <wim.taymans@gmail.com>");
- gstbasertppayload_class->set_caps = gst_rtp_amr_pay_setcaps;
- gstbasertppayload_class->handle_buffer = gst_rtp_amr_pay_handle_buffer;
+ gstrtpbasepayload_class->set_caps = gst_rtp_amr_pay_setcaps;
+ gstrtpbasepayload_class->handle_buffer = gst_rtp_amr_pay_handle_buffer;
GST_DEBUG_CATEGORY_INIT (rtpamrpay_debug, "rtpamrpay", 0,
"AMR/AMR-WB RTP Payloader");
}
static void
-gst_rtp_amr_pay_init (GstRtpAMRPay * rtpamrpay, GstRtpAMRPayClass * klass)
+gst_rtp_amr_pay_init (GstRtpAMRPay * rtpamrpay)
{
- /* needed because of GST_BOILERPLATE */
}
static void
}
static gboolean
-gst_rtp_amr_pay_setcaps (GstBaseRTPPayload * basepayload, GstCaps * caps)
+gst_rtp_amr_pay_setcaps (GstRTPBasePayload * basepayload, GstCaps * caps)
{
GstRtpAMRPay *rtpamrpay;
gboolean res;
goto wrong_type;
if (rtpamrpay->mode == GST_RTP_AMR_P_MODE_NB)
- gst_basertppayload_set_options (basepayload, "audio", TRUE, "AMR", 8000);
+ gst_rtp_base_payload_set_options (basepayload, "audio", TRUE, "AMR", 8000);
else
- gst_basertppayload_set_options (basepayload, "audio", TRUE, "AMR-WB",
+ gst_rtp_base_payload_set_options (basepayload, "audio", TRUE, "AMR-WB",
16000);
- res = gst_basertppayload_set_outcaps (basepayload,
+ res = gst_rtp_base_payload_set_outcaps (basepayload,
"encoding-params", G_TYPE_STRING, "1", "octet-align", G_TYPE_STRING, "1",
/* don't set the defaults
*
};
static GstFlowReturn
-gst_rtp_amr_pay_handle_buffer (GstBaseRTPPayload * basepayload,
+gst_rtp_amr_pay_handle_buffer (GstRTPBasePayload * basepayload,
GstBuffer * buffer)
{
GstRtpAMRPay *rtpamrpay;
const gint *frame_size;
GstFlowReturn ret;
- guint size, payload_len;
+ guint payload_len;
+ GstMapInfo map;
GstBuffer *outbuf;
- guint8 *payload, *data, *payload_amr;
+ guint8 *payload, *ptr, *payload_amr;
GstClockTime timestamp, duration;
guint packet_len, mtu;
gint i, num_packets, num_nonempty_packets;
gint amr_len;
gboolean sid = FALSE;
+ GstRTPBuffer rtp = { NULL };
rtpamrpay = GST_RTP_AMR_PAY (basepayload);
- mtu = GST_BASE_RTP_PAYLOAD_MTU (rtpamrpay);
+ mtu = GST_RTP_BASE_PAYLOAD_MTU (rtpamrpay);
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
- size = GST_BUFFER_SIZE (buffer);
- data = GST_BUFFER_DATA (buffer);
timestamp = GST_BUFFER_TIMESTAMP (buffer);
duration = GST_BUFFER_DURATION (buffer);
else
frame_size = wb_frame_size;
- GST_DEBUG_OBJECT (basepayload, "got %d bytes", size);
+ GST_DEBUG_OBJECT (basepayload, "got %" G_GSIZE_FORMAT " bytes", map.size);
/* FIXME, only
* octet aligned, no interleaving, single channel, no CRC,
/* first count number of packets and total amr frame size */
amr_len = num_packets = num_nonempty_packets = 0;
- for (i = 0; i < size; i++) {
+ for (i = 0; i < map.size; i++) {
guint8 FT;
gint fr_size;
- FT = (data[i] & 0x78) >> 3;
+ FT = (map.data[i] & 0x78) >> 3;
fr_size = frame_size[FT];
GST_DEBUG_OBJECT (basepayload, "frame type %d, frame size %d", FT, fr_size);
num_packets++;
i += fr_size;
}
- if (amr_len > size)
+ if (amr_len > map.size)
goto incomplete_frame;
/* we need one extra byte for the CMR, the ToC is in the input
* data */
- payload_len = size + 1;
+ payload_len = map.size + 1;
/* get packet len to check against MTU */
packet_len = gst_rtp_buffer_calc_packet_len (payload_len, 0, 0);
/* now alloc output buffer */
outbuf = gst_rtp_buffer_new_allocate (payload_len, 0, 0);
+ gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
+
/* copy timestamp */
GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
if (GST_BUFFER_IS_DISCONT (buffer)) {
GST_DEBUG_OBJECT (basepayload, "discont, setting marker bit");
GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
- gst_rtp_buffer_set_marker (outbuf, TRUE);
+ gst_rtp_buffer_set_marker (&rtp, TRUE);
gst_rtp_amr_pay_recalc_rtp_time (rtpamrpay, timestamp);
}
(num_packets * 160) << (rtpamrpay->mode == GST_RTP_AMR_P_MODE_WB);
/* get payload, this is now writable */
- payload = gst_rtp_buffer_get_payload (outbuf);
+ payload = gst_rtp_buffer_get_payload (&rtp);
/* 0 1 2 3 4 5 6 7
* +-+-+-+-+-+-+-+-+
/* copy data in payload, first we copy all the FTs then all
* the AMR data. The last FT has to have the F flag cleared. */
+ ptr = map.data;
for (i = 1; i <= num_packets; i++) {
guint8 FT;
gint fr_size;
* |F| FT |Q|P|P| more FT...
* +-+-+-+-+-+-+-+-+
*/
- FT = (*data & 0x78) >> 3;
+ FT = (*ptr & 0x78) >> 3;
fr_size = frame_size[FT];
if (i == num_packets)
/* last packet, clear F flag */
- payload[i] = *data & 0x7f;
+ payload[i] = *ptr & 0x7f;
else
/* set F flag */
- payload[i] = *data | 0x80;
+ payload[i] = *ptr | 0x80;
- memcpy (payload_amr, &data[1], fr_size);
+ memcpy (payload_amr, &ptr[1], fr_size);
/* all sizes are > 0 since we checked for that above */
- data += fr_size + 1;
+ ptr += fr_size + 1;
payload_amr += fr_size;
}
+ gst_buffer_unmap (buffer, &map);
gst_buffer_unref (buffer);
- ret = gst_basertppayload_push (basepayload, outbuf);
+ gst_rtp_buffer_unmap (&rtp);
+
+ ret = gst_rtp_base_payload_push (basepayload, outbuf);
return ret;
{
GST_ELEMENT_ERROR (basepayload, STREAM, FORMAT,
(NULL), ("received AMR frame with size <= 0"));
+ gst_buffer_unmap (buffer, &map);
gst_buffer_unref (buffer);
return GST_FLOW_ERROR;
{
GST_ELEMENT_ERROR (basepayload, STREAM, FORMAT,
(NULL), ("received incomplete AMR frames"));
+ gst_buffer_unmap (buffer, &map);
gst_buffer_unref (buffer);
return GST_FLOW_ERROR;
{
GST_ELEMENT_ERROR (basepayload, STREAM, FORMAT,
(NULL), ("received too many AMR frames for MTU"));
+ gst_buffer_unmap (buffer, &map);
gst_buffer_unref (buffer);
return GST_FLOW_ERROR;
#define __GST_RTP_AMR_PAY_H__
#include <gst/gst.h>
-#include <gst/rtp/gstbasertppayload.h>
+#include <gst/rtp/gstrtpbasepayload.h>
#include <gst/base/gstadapter.h>
G_BEGIN_DECLS
struct _GstRtpAMRPay
{
- GstBaseRTPPayload payload;
+ GstRTPBasePayload payload;
GstRtpAMRPayMode mode;
GstClockTime first_ts;
struct _GstRtpAMRPayClass
{
- GstBaseRTPPayloadClass parent_class;
+ GstRTPBasePayloadClass parent_class;
};
GType gst_rtp_amr_pay_get_type (void);
GST_STATIC_CAPS ("audio/x-bv, " "mode = (int) { 16, 32 }")
);
-static GstBuffer *gst_rtp_bv_depay_process (GstBaseRTPDepayload * depayload,
+static GstBuffer *gst_rtp_bv_depay_process (GstRTPBaseDepayload * depayload,
GstBuffer * buf);
-static gboolean gst_rtp_bv_depay_setcaps (GstBaseRTPDepayload * depayload,
+static gboolean gst_rtp_bv_depay_setcaps (GstRTPBaseDepayload * depayload,
GstCaps * caps);
-GST_BOILERPLATE (GstRTPBVDepay, gst_rtp_bv_depay, GstBaseRTPDepayload,
- GST_TYPE_BASE_RTP_DEPAYLOAD);
+#define gst_rtp_bv_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRTPBVDepay, gst_rtp_bv_depay, GST_TYPE_RTP_BASE_DEPAYLOAD);
static void
-gst_rtp_bv_depay_base_init (gpointer klass)
+gst_rtp_bv_depay_class_init (GstRTPBVDepayClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstElementClass *gstelement_class;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
+
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_bv_depay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_bv_depay_sink_template));
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_bv_depay_src_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_bv_depay_sink_template);
- gst_element_class_set_details_simple (element_class,
+ gst_element_class_set_details_simple (gstelement_class,
"RTP BroadcomVoice depayloader", "Codec/Depayloader/Network/RTP",
"Extracts BroadcomVoice audio from RTP packets (RFC 4298)",
"Wim Taymans <wim.taymans@collabora.co.uk>");
-}
-
-static void
-gst_rtp_bv_depay_class_init (GstRTPBVDepayClass * klass)
-{
- GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
- gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
-
- gstbasertpdepayload_class->process = gst_rtp_bv_depay_process;
- gstbasertpdepayload_class->set_caps = gst_rtp_bv_depay_setcaps;
+ gstrtpbasedepayload_class->process = gst_rtp_bv_depay_process;
+ gstrtpbasedepayload_class->set_caps = gst_rtp_bv_depay_setcaps;
}
static void
-gst_rtp_bv_depay_init (GstRTPBVDepay * rtpbvdepay, GstRTPBVDepayClass * klass)
+gst_rtp_bv_depay_init (GstRTPBVDepay * rtpbvdepay)
{
rtpbvdepay->mode = -1;
}
static gboolean
-gst_rtp_bv_depay_setcaps (GstBaseRTPDepayload * depayload, GstCaps * caps)
+gst_rtp_bv_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
{
GstRTPBVDepay *rtpbvdepay = GST_RTP_BV_DEPAY (depayload);
GstCaps *srccaps;
srccaps = gst_caps_new_simple ("audio/x-bv",
"mode", G_TYPE_INT, rtpbvdepay->mode, NULL);
- ret = gst_pad_set_caps (GST_BASE_RTP_DEPAYLOAD_SRCPAD (depayload), srccaps);
+ ret = gst_pad_set_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload), srccaps);
GST_DEBUG ("set caps on source: %" GST_PTR_FORMAT " (ret=%d)", srccaps, ret);
gst_caps_unref (srccaps);
}
static GstBuffer *
-gst_rtp_bv_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
+gst_rtp_bv_depay_process (GstRTPBaseDepayload * depayload, GstBuffer * buf)
{
GstBuffer *outbuf;
gboolean marker;
+ GstRTPBuffer rtp = { NULL, };
+
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
- marker = gst_rtp_buffer_get_marker (buf);
+ marker = gst_rtp_buffer_get_marker (&rtp);
- GST_DEBUG ("process : got %d bytes, mark %d ts %u seqn %d",
- GST_BUFFER_SIZE (buf), marker,
- gst_rtp_buffer_get_timestamp (buf), gst_rtp_buffer_get_seq (buf));
+ GST_DEBUG ("process : got %" G_GSIZE_FORMAT " bytes, mark %d ts %u seqn %d",
+ gst_buffer_get_size (buf), marker,
+ gst_rtp_buffer_get_timestamp (&rtp), gst_rtp_buffer_get_seq (&rtp));
- outbuf = gst_rtp_buffer_get_payload_buffer (buf);
+ outbuf = gst_rtp_buffer_get_payload_buffer (&rtp);
+ gst_rtp_buffer_unmap (&rtp);
if (marker && outbuf) {
/* mark start of talkspurt with DISCONT */
#define __GST_RTP_BV_DEPAY_H__
#include <gst/gst.h>
-#include <gst/rtp/gstbasertpdepayload.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
G_BEGIN_DECLS
struct _GstRTPBVDepay
{
- GstBaseRTPDepayload depayload;
+ GstRTPBaseDepayload depayload;
gint mode;
};
struct _GstRTPBVDepayClass
{
- GstBaseRTPDepayloadClass parent_class;
+ GstRTPBaseDepayloadClass parent_class;
};
GType gst_rtp_bv_depay_get_type (void);
);
-static GstCaps *gst_rtp_bv_pay_sink_getcaps (GstBaseRTPPayload * payload,
- GstPad * pad);
-static gboolean gst_rtp_bv_pay_sink_setcaps (GstBaseRTPPayload * payload,
+static GstCaps *gst_rtp_bv_pay_sink_getcaps (GstRTPBasePayload * payload,
+ GstPad * pad, GstCaps * filter);
+static gboolean gst_rtp_bv_pay_sink_setcaps (GstRTPBasePayload * payload,
GstCaps * caps);
-GST_BOILERPLATE (GstRTPBVPay, gst_rtp_bv_pay, GstBaseRTPAudioPayload,
- GST_TYPE_BASE_RTP_AUDIO_PAYLOAD);
+#define gst_rtp_bv_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRTPBVPay, gst_rtp_bv_pay, GST_TYPE_RTP_BASE_AUDIO_PAYLOAD);
static void
-gst_rtp_bv_pay_base_init (gpointer klass)
+gst_rtp_bv_pay_class_init (GstRTPBVPayClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstElementClass *gstelement_class;
+ GstRTPBasePayloadClass *gstrtpbasepayload_class;
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_bv_pay_sink_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_bv_pay_src_template);
- gst_element_class_set_details_simple (element_class, "RTP BV Payloader",
- "Codec/Payloader/Network/RTP",
- "Packetize BroadcomVoice audio streams into RTP packets (RFC 4298)",
- "Wim Taymans <wim.taymans@collabora.co.uk>");
-}
+ GST_DEBUG_CATEGORY_INIT (rtpbvpay_debug, "rtpbvpay", 0,
+ "BroadcomVoice audio RTP payloader");
-static void
-gst_rtp_bv_pay_class_init (GstRTPBVPayClass * klass)
-{
- GstBaseRTPPayloadClass *gstbasertppayload_class;
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasepayload_class = (GstRTPBasePayloadClass *) klass;
- gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_bv_pay_sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_bv_pay_src_template));
- gstbasertppayload_class->set_caps = gst_rtp_bv_pay_sink_setcaps;
- gstbasertppayload_class->get_caps = gst_rtp_bv_pay_sink_getcaps;
+ gst_element_class_set_details_simple (gstelement_class, "RTP BV Payloader",
+ "Codec/Payloader/Network/RTP",
+ "Packetize BroadcomVoice audio streams into RTP packets (RFC 4298)",
+ "Wim Taymans <wim.taymans@collabora.co.uk>");
- GST_DEBUG_CATEGORY_INIT (rtpbvpay_debug, "rtpbvpay", 0,
- "BroadcomVoice audio RTP payloader");
+ gstrtpbasepayload_class->set_caps = gst_rtp_bv_pay_sink_setcaps;
+ gstrtpbasepayload_class->get_caps = gst_rtp_bv_pay_sink_getcaps;
}
static void
-gst_rtp_bv_pay_init (GstRTPBVPay * rtpbvpay, GstRTPBVPayClass * klass)
+gst_rtp_bv_pay_init (GstRTPBVPay * rtpbvpay)
{
- GstBaseRTPAudioPayload *basertpaudiopayload;
+ GstRTPBaseAudioPayload *rtpbaseaudiopayload;
- basertpaudiopayload = GST_BASE_RTP_AUDIO_PAYLOAD (rtpbvpay);
+ rtpbaseaudiopayload = GST_RTP_BASE_AUDIO_PAYLOAD (rtpbvpay);
rtpbvpay->mode = -1;
- /* tell basertpaudiopayload that this is a frame based codec */
- gst_base_rtp_audio_payload_set_frame_based (basertpaudiopayload);
+ /* tell rtpbaseaudiopayload that this is a frame based codec */
+ gst_rtp_base_audio_payload_set_frame_based (rtpbaseaudiopayload);
}
static gboolean
-gst_rtp_bv_pay_sink_setcaps (GstBaseRTPPayload * basertppayload, GstCaps * caps)
+gst_rtp_bv_pay_sink_setcaps (GstRTPBasePayload * rtpbasepayload, GstCaps * caps)
{
GstRTPBVPay *rtpbvpay;
- GstBaseRTPAudioPayload *basertpaudiopayload;
+ GstRTPBaseAudioPayload *rtpbaseaudiopayload;
gint mode;
GstStructure *structure;
const char *payload_name;
- rtpbvpay = GST_RTP_BV_PAY (basertppayload);
- basertpaudiopayload = GST_BASE_RTP_AUDIO_PAYLOAD (basertppayload);
+ rtpbvpay = GST_RTP_BV_PAY (rtpbasepayload);
+ rtpbaseaudiopayload = GST_RTP_BASE_AUDIO_PAYLOAD (rtpbasepayload);
structure = gst_caps_get_structure (caps, 0);
goto wrong_mode;
if (mode == 16) {
- gst_basertppayload_set_options (basertppayload, "audio", TRUE, "BV16",
+ gst_rtp_base_payload_set_options (rtpbasepayload, "audio", TRUE, "BV16",
8000);
- basertppayload->clock_rate = 8000;
+ rtpbasepayload->clock_rate = 8000;
} else {
- gst_basertppayload_set_options (basertppayload, "audio", TRUE, "BV32",
+ gst_rtp_base_payload_set_options (rtpbasepayload, "audio", TRUE, "BV32",
16000);
- basertppayload->clock_rate = 16000;
+ rtpbasepayload->clock_rate = 16000;
}
/* set options for this frame based audio codec */
- gst_base_rtp_audio_payload_set_frame_options (basertpaudiopayload,
+ gst_rtp_base_audio_payload_set_frame_options (rtpbaseaudiopayload,
mode, mode == 16 ? 10 : 20);
if (mode != rtpbvpay->mode && rtpbvpay->mode != -1)
/* we return the padtemplate caps with the mode field fixated to a value if we
* can */
static GstCaps *
-gst_rtp_bv_pay_sink_getcaps (GstBaseRTPPayload * rtppayload, GstPad * pad)
+gst_rtp_bv_pay_sink_getcaps (GstRTPBasePayload * rtppayload, GstPad * pad,
+ GstCaps * filter)
{
GstCaps *otherpadcaps;
GstCaps *caps;
#define __GST_RTP_BV_PAY_H__
#include <gst/gst.h>
-#include <gst/rtp/gstbasertpaudiopayload.h>
+#include <gst/rtp/gstrtpbaseaudiopayload.h>
G_BEGIN_DECLS
struct _GstRTPBVPay
{
- GstBaseRTPAudioPayload audiopayload;
+ GstRTPBaseAudioPayload audiopayload;
gint mode;
};
struct _GstRTPBVPayClass
{
- GstBaseRTPAudioPayloadClass parent_class;
+ GstRTPBaseAudioPayloadClass parent_class;
};
GType gst_rtp_bv_pay_get_type (void);
GST_STATIC_CAPS ("audio/x-celt")
);
-static GstBuffer *gst_rtp_celt_depay_process (GstBaseRTPDepayload * depayload,
+static GstBuffer *gst_rtp_celt_depay_process (GstRTPBaseDepayload * depayload,
GstBuffer * buf);
-static gboolean gst_rtp_celt_depay_setcaps (GstBaseRTPDepayload * depayload,
+static gboolean gst_rtp_celt_depay_setcaps (GstRTPBaseDepayload * depayload,
GstCaps * caps);
-GST_BOILERPLATE (GstRtpCELTDepay, gst_rtp_celt_depay, GstBaseRTPDepayload,
- GST_TYPE_BASE_RTP_DEPAYLOAD);
+#define gst_rtp_celt_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpCELTDepay, gst_rtp_celt_depay,
+ GST_TYPE_RTP_BASE_DEPAYLOAD);
static void
-gst_rtp_celt_depay_base_init (gpointer klass)
+gst_rtp_celt_depay_class_init (GstRtpCELTDepayClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_celt_depay_src_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_celt_depay_sink_template);
- gst_element_class_set_details_simple (element_class, "RTP CELT depayloader",
- "Codec/Depayloader/Network/RTP",
- "Extracts CELT audio from RTP packets",
- "Wim Taymans <wim.taymans@gmail.com>");
+ GstElementClass *gstelement_class;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
GST_DEBUG_CATEGORY_INIT (rtpceltdepay_debug, "rtpceltdepay", 0,
"CELT RTP Depayloader");
-}
-static void
-gst_rtp_celt_depay_class_init (GstRtpCELTDepayClass * klass)
-{
- GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_celt_depay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_celt_depay_sink_template));
- gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP CELT depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts CELT audio from RTP packets",
+ "Wim Taymans <wim.taymans@gmail.com>");
- gstbasertpdepayload_class->process = gst_rtp_celt_depay_process;
- gstbasertpdepayload_class->set_caps = gst_rtp_celt_depay_setcaps;
+ gstrtpbasedepayload_class->process = gst_rtp_celt_depay_process;
+ gstrtpbasedepayload_class->set_caps = gst_rtp_celt_depay_setcaps;
}
static void
-gst_rtp_celt_depay_init (GstRtpCELTDepay * rtpceltdepay,
- GstRtpCELTDepayClass * klass)
+gst_rtp_celt_depay_init (GstRtpCELTDepay * rtpceltdepay)
{
}
"\045\0\0\0Depayloaded with GStreamer celtdepay\0\0\0\0";
static gboolean
-gst_rtp_celt_depay_setcaps (GstBaseRTPDepayload * depayload, GstCaps * caps)
+gst_rtp_celt_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
{
GstStructure *structure;
GstRtpCELTDepay *rtpceltdepay;
gint clock_rate, nb_channels = 0, frame_size = 0;
GstBuffer *buf;
- guint8 *data;
+ GstMapInfo map;
+ guint8 *ptr;
const gchar *params;
GstCaps *srccaps;
gboolean res;
/* construct minimal header and comment packet for the decoder */
buf = gst_buffer_new_and_alloc (60);
- data = GST_BUFFER_DATA (buf);
- memcpy (data, "CELT ", 8);
- data += 8;
- memcpy (data, "1.1.12", 7);
- data += 20;
- GST_WRITE_UINT32_LE (data, 0x80000006); /* version */
- data += 4;
- GST_WRITE_UINT32_LE (data, 56); /* header_size */
- data += 4;
- GST_WRITE_UINT32_LE (data, clock_rate); /* rate */
- data += 4;
- GST_WRITE_UINT32_LE (data, nb_channels); /* channels */
- data += 4;
- GST_WRITE_UINT32_LE (data, frame_size); /* frame-size */
- data += 4;
- GST_WRITE_UINT32_LE (data, -1); /* overlap */
- data += 4;
- GST_WRITE_UINT32_LE (data, -1); /* bytes_per_packet */
- data += 4;
- GST_WRITE_UINT32_LE (data, 0); /* extra headers */
-
- srccaps = gst_caps_new_simple ("audio/x-celt", NULL);
+ gst_buffer_map (buf, &map, GST_MAP_WRITE);
+ ptr = map.data;
+ memcpy (ptr, "CELT ", 8);
+ ptr += 8;
+ memcpy (ptr, "1.1.12", 7);
+ ptr += 20;
+ GST_WRITE_UINT32_LE (ptr, 0x80000006); /* version */
+ ptr += 4;
+ GST_WRITE_UINT32_LE (ptr, 56); /* header_size */
+ ptr += 4;
+ GST_WRITE_UINT32_LE (ptr, clock_rate); /* rate */
+ ptr += 4;
+ GST_WRITE_UINT32_LE (ptr, nb_channels); /* channels */
+ ptr += 4;
+ GST_WRITE_UINT32_LE (ptr, frame_size); /* frame-size */
+ ptr += 4;
+ GST_WRITE_UINT32_LE (ptr, -1); /* overlap */
+ ptr += 4;
+ GST_WRITE_UINT32_LE (ptr, -1); /* bytes_per_packet */
+ ptr += 4;
+ GST_WRITE_UINT32_LE (ptr, 0); /* extra headers */
+ gst_buffer_unmap (buf, &map);
+
+ srccaps = gst_caps_new_empty_simple ("audio/x-celt");
res = gst_pad_set_caps (depayload->srcpad, srccaps);
gst_caps_unref (srccaps);
- gst_buffer_set_caps (buf, GST_PAD_CAPS (depayload->srcpad));
- gst_base_rtp_depayload_push (GST_BASE_RTP_DEPAYLOAD (rtpceltdepay), buf);
+ gst_rtp_base_depayload_push (GST_RTP_BASE_DEPAYLOAD (rtpceltdepay), buf);
buf = gst_buffer_new_and_alloc (sizeof (gst_rtp_celt_comment));
- memcpy (GST_BUFFER_DATA (buf), gst_rtp_celt_comment,
- sizeof (gst_rtp_celt_comment));
+ gst_buffer_fill (buf, 0, gst_rtp_celt_comment, sizeof (gst_rtp_celt_comment));
- gst_buffer_set_caps (buf, GST_PAD_CAPS (depayload->srcpad));
- gst_base_rtp_depayload_push (GST_BASE_RTP_DEPAYLOAD (rtpceltdepay), buf);
+ gst_rtp_base_depayload_push (GST_RTP_BASE_DEPAYLOAD (rtpceltdepay), buf);
return res;
}
static GstBuffer *
-gst_rtp_celt_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
+gst_rtp_celt_depay_process (GstRTPBaseDepayload * depayload, GstBuffer * buf)
{
GstBuffer *outbuf = NULL;
guint8 *payload;
GstClockTime framesize_ns = 0, timestamp;
guint n = 0;
GstRtpCELTDepay *rtpceltdepay;
+ GstRTPBuffer rtp = { NULL, };
rtpceltdepay = GST_RTP_CELT_DEPAY (depayload);
clock_rate = depayload->clock_rate;
timestamp = GST_BUFFER_TIMESTAMP (buf);
- GST_LOG_OBJECT (depayload, "got %d bytes, mark %d ts %u seqn %d",
- GST_BUFFER_SIZE (buf),
- gst_rtp_buffer_get_marker (buf),
- gst_rtp_buffer_get_timestamp (buf), gst_rtp_buffer_get_seq (buf));
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
+
+ GST_LOG_OBJECT (depayload,
+ "got %" G_GSIZE_FORMAT " bytes, mark %d ts %u seqn %d",
+ gst_buffer_get_size (buf), gst_rtp_buffer_get_marker (&rtp),
+ gst_rtp_buffer_get_timestamp (&rtp), gst_rtp_buffer_get_seq (&rtp));
GST_LOG_OBJECT (depayload, "got clock-rate=%d, frame_size=%d, "
"_ns=%" GST_TIME_FORMAT ", timestamp=%" GST_TIME_FORMAT, clock_rate,
frame_size, GST_TIME_ARGS (framesize_ns), GST_TIME_ARGS (timestamp));
- payload = gst_rtp_buffer_get_payload (buf);
- payload_len = gst_rtp_buffer_get_payload_len (buf);
+ payload = gst_rtp_buffer_get_payload (&rtp);
+ payload_len = gst_rtp_buffer_get_payload_len (&rtp);
/* first count how many bytes are consumed by the size headers and make offset
* point to the first data byte */
total_size += size + 1;
} while (s == 0xff);
- outbuf = gst_rtp_buffer_get_payload_subbuffer (buf, offset, size);
+ outbuf = gst_rtp_buffer_get_payload_subbuffer (&rtp, offset, size);
offset += size;
if (frame_size != -1 && clock_rate != -1) {
GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)),
GST_TIME_ARGS (GST_BUFFER_DURATION (outbuf)));
- gst_base_rtp_depayload_push (depayload, outbuf);
+ gst_rtp_base_depayload_push (depayload, outbuf);
}
+ gst_rtp_buffer_unmap (&rtp);
+
return NULL;
}
#define __GST_RTP_CELT_DEPAY_H__
#include <gst/gst.h>
-#include <gst/rtp/gstbasertpdepayload.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
G_BEGIN_DECLS
struct _GstRtpCELTDepay
{
- GstBaseRTPDepayload depayload;
+ GstRTPBaseDepayload depayload;
gint frame_size;
};
struct _GstRtpCELTDepayClass
{
- GstBaseRTPDepayloadClass parent_class;
+ GstRTPBaseDepayloadClass parent_class;
};
GType gst_rtp_celt_depay_get_type (void);
static GstStateChangeReturn gst_rtp_celt_pay_change_state (GstElement *
element, GstStateChange transition);
-static gboolean gst_rtp_celt_pay_setcaps (GstBaseRTPPayload * payload,
+static gboolean gst_rtp_celt_pay_setcaps (GstRTPBasePayload * payload,
GstCaps * caps);
-static GstCaps *gst_rtp_celt_pay_getcaps (GstBaseRTPPayload * payload,
- GstPad * pad);
-static GstFlowReturn gst_rtp_celt_pay_handle_buffer (GstBaseRTPPayload *
+static GstCaps *gst_rtp_celt_pay_getcaps (GstRTPBasePayload * payload,
+ GstPad * pad, GstCaps * filter);
+static GstFlowReturn gst_rtp_celt_pay_handle_buffer (GstRTPBasePayload *
payload, GstBuffer * buffer);
-GST_BOILERPLATE (GstRtpCELTPay, gst_rtp_celt_pay, GstBaseRTPPayload,
- GST_TYPE_BASE_RTP_PAYLOAD);
-
-static void
-gst_rtp_celt_pay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_celt_pay_sink_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_celt_pay_src_template);
- gst_element_class_set_details_simple (element_class, "RTP CELT payloader",
- "Codec/Payloader/Network/RTP",
- "Payload-encodes CELT audio into a RTP packet",
- "Wim Taymans <wim.taymans@gmail.com>");
-
- GST_DEBUG_CATEGORY_INIT (rtpceltpay_debug, "rtpceltpay", 0,
- "CELT RTP Payloader");
-}
+#define gst_rtp_celt_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpCELTPay, gst_rtp_celt_pay, GST_TYPE_RTP_BASE_PAYLOAD);
static void
gst_rtp_celt_pay_class_init (GstRtpCELTPayClass * klass)
{
GObjectClass *gobject_class;
GstElementClass *gstelement_class;
- GstBaseRTPPayloadClass *gstbasertppayload_class;
+ GstRTPBasePayloadClass *gstrtpbasepayload_class;
+
+ GST_DEBUG_CATEGORY_INIT (rtpceltpay_debug, "rtpceltpay", 0,
+ "CELT RTP Payloader");
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
- gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
+ gstrtpbasepayload_class = (GstRTPBasePayloadClass *) klass;
gobject_class->finalize = gst_rtp_celt_pay_finalize;
gstelement_class->change_state = gst_rtp_celt_pay_change_state;
- gstbasertppayload_class->set_caps = gst_rtp_celt_pay_setcaps;
- gstbasertppayload_class->get_caps = gst_rtp_celt_pay_getcaps;
- gstbasertppayload_class->handle_buffer = gst_rtp_celt_pay_handle_buffer;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_celt_pay_sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_celt_pay_src_template));
+
+ gst_element_class_set_details_simple (gstelement_class, "RTP CELT payloader",
+ "Codec/Payloader/Network/RTP",
+ "Payload-encodes CELT audio into a RTP packet",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
+ gstrtpbasepayload_class->set_caps = gst_rtp_celt_pay_setcaps;
+ gstrtpbasepayload_class->get_caps = gst_rtp_celt_pay_getcaps;
+ gstrtpbasepayload_class->handle_buffer = gst_rtp_celt_pay_handle_buffer;
}
static void
-gst_rtp_celt_pay_init (GstRtpCELTPay * rtpceltpay, GstRtpCELTPayClass * klass)
+gst_rtp_celt_pay_init (GstRtpCELTPay * rtpceltpay)
{
rtpceltpay->queue = g_queue_new ();
}
}
static gboolean
-gst_rtp_celt_pay_setcaps (GstBaseRTPPayload * payload, GstCaps * caps)
+gst_rtp_celt_pay_setcaps (GstRTPBasePayload * payload, GstCaps * caps)
{
/* don't configure yet, we wait for the ident packet */
return TRUE;
static GstCaps *
-gst_rtp_celt_pay_getcaps (GstBaseRTPPayload * payload, GstPad * pad)
+gst_rtp_celt_pay_getcaps (GstRTPBasePayload * payload, GstPad * pad,
+ GstCaps * filter)
{
GstCaps *otherpadcaps;
GstCaps *caps;
{
guint32 version, header_size, rate, nb_channels, frame_size, overlap;
guint32 bytes_per_packet;
- GstBaseRTPPayload *payload;
+ GstRTPBasePayload *payload;
gchar *cstr, *fsstr;
gboolean res;
GST_DEBUG_OBJECT (rtpceltpay, "overlap %d, bytes_per_packet %d",
overlap, bytes_per_packet);
- payload = GST_BASE_RTP_PAYLOAD (rtpceltpay);
+ payload = GST_RTP_BASE_PAYLOAD (rtpceltpay);
- gst_basertppayload_set_options (payload, "audio", FALSE, "CELT", rate);
+ gst_rtp_base_payload_set_options (payload, "audio", FALSE, "CELT", rate);
cstr = g_strdup_printf ("%d", nb_channels);
fsstr = g_strdup_printf ("%d", frame_size);
- res = gst_basertppayload_set_outcaps (payload, "encoding-params",
+ res = gst_rtp_base_payload_set_outcaps (payload, "encoding-params",
G_TYPE_STRING, cstr, "frame-size", G_TYPE_STRING, fsstr, NULL);
g_free (cstr);
g_free (fsstr);
guint8 *payload, *spayload;
guint payload_len;
GstClockTime duration;
+ GstRTPBuffer rtp = { NULL, };
payload_len = rtpceltpay->bytes + rtpceltpay->sbytes;
duration = rtpceltpay->qduration;
GST_BUFFER_DURATION (outbuf) = duration;
+ gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
+
/* point to the payload for size headers and data */
- spayload = gst_rtp_buffer_get_payload (outbuf);
+ spayload = gst_rtp_buffer_get_payload (&rtp);
payload = spayload + rtpceltpay->sbytes;
while ((buf = g_queue_pop_head (rtpceltpay->queue))) {
GST_BUFFER_TIMESTAMP (outbuf) = GST_BUFFER_TIMESTAMP (buf);
/* write the size to the header */
- size = GST_BUFFER_SIZE (buf);
+ size = gst_buffer_get_size (buf);
while (size > 0xff) {
*spayload++ = 0xff;
size -= 0xff;
}
*spayload++ = size;
- size = GST_BUFFER_SIZE (buf);
/* copy payload */
- memcpy (payload, GST_BUFFER_DATA (buf), size);
+ size = gst_buffer_get_size (buf);
+ gst_buffer_extract (buf, 0, payload, size);
payload += size;
gst_buffer_unref (buf);
}
+ gst_rtp_buffer_unmap (&rtp);
/* we consumed it all */
rtpceltpay->bytes = 0;
rtpceltpay->sbytes = 0;
rtpceltpay->qduration = 0;
- ret = gst_basertppayload_push (GST_BASE_RTP_PAYLOAD (rtpceltpay), outbuf);
+ ret = gst_rtp_base_payload_push (GST_RTP_BASE_PAYLOAD (rtpceltpay), outbuf);
return ret;
}
static GstFlowReturn
-gst_rtp_celt_pay_handle_buffer (GstBaseRTPPayload * basepayload,
+gst_rtp_celt_pay_handle_buffer (GstRTPBasePayload * basepayload,
GstBuffer * buffer)
{
GstFlowReturn ret;
GstRtpCELTPay *rtpceltpay;
- guint size, payload_len;
- guint8 *data;
+ gsize payload_len;
+ GstMapInfo map;
GstClockTime duration, packet_dur;
guint i, ssize, packet_len;
ret = GST_FLOW_OK;
- size = GST_BUFFER_SIZE (buffer);
- data = GST_BUFFER_DATA (buffer);
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
switch (rtpceltpay->packet) {
case 0:
/* ident packet. We need to parse the headers to construct the RTP
* properties. */
- if (!gst_rtp_celt_pay_parse_ident (rtpceltpay, data, size))
+ if (!gst_rtp_celt_pay_parse_ident (rtpceltpay, map.data, map.size))
goto parse_error;
- goto done;
+ goto cleanup;
case 1:
/* comment packet, we ignore it */
- goto done;
+ goto cleanup;
default:
/* other packets go in the payload */
break;
}
+ gst_buffer_unmap (buffer, &map);
duration = GST_BUFFER_DURATION (buffer);
GST_LOG_OBJECT (rtpceltpay,
- "got buffer of duration %" GST_TIME_FORMAT ", size %u",
- GST_TIME_ARGS (duration), size);
+ "got buffer of duration %" GST_TIME_FORMAT ", size %" G_GSIZE_FORMAT,
+ GST_TIME_ARGS (duration), map.size);
/* calculate the size of the size field and the payload */
ssize = 1;
- for (i = size; i > 0xff; i -= 0xff)
+ for (i = map.size; i > 0xff; i -= 0xff)
ssize++;
GST_DEBUG_OBJECT (rtpceltpay, "bytes for size %u", ssize);
/* calculate what the new size and duration would be of the packet */
- payload_len = ssize + size + rtpceltpay->bytes + rtpceltpay->sbytes;
+ payload_len = ssize + map.size + rtpceltpay->bytes + rtpceltpay->sbytes;
if (rtpceltpay->qduration != -1 && duration != -1)
packet_dur = rtpceltpay->qduration + duration;
else
packet_len = gst_rtp_buffer_calc_packet_len (payload_len, 0, 0);
- if (gst_basertppayload_is_filled (basepayload, packet_len, packet_dur)) {
+ if (gst_rtp_base_payload_is_filled (basepayload, packet_len, packet_dur)) {
/* size or duration would overflow the packet, flush the queued data */
ret = gst_rtp_celt_pay_flush_queued (rtpceltpay);
}
/* queue the packet */
- gst_rtp_celt_pay_add_queued (rtpceltpay, buffer, ssize, size, duration);
+ gst_rtp_celt_pay_add_queued (rtpceltpay, buffer, ssize, map.size, duration);
done:
rtpceltpay->packet++;
return ret;
/* ERRORS */
+cleanup:
+ {
+ gst_buffer_unmap (buffer, &map);
+ goto done;
+ }
parse_error:
{
GST_ELEMENT_ERROR (rtpceltpay, STREAM, DECODE, (NULL),
("Error parsing first identification packet."));
+ gst_buffer_unmap (buffer, &map);
return GST_FLOW_ERROR;
}
}
#define __GST_RTP_CELT_PAY_H__
#include <gst/gst.h>
-#include <gst/rtp/gstbasertppayload.h>
+#include <gst/rtp/gstrtpbasepayload.h>
G_BEGIN_DECLS
struct _GstRtpCELTPay
{
- GstBaseRTPPayload payload;
+ GstRTPBasePayload payload;
guint64 packet;
struct _GstRtpCELTPayClass
{
- GstBaseRTPPayloadClass parent_class;
+ GstRTPBasePayloadClass parent_class;
};
GType gst_rtp_celt_pay_get_type (void);
check_channels (const GstRTPChannelOrder * order,
const GstAudioChannelPosition * pos)
{
- gint i;
+ gint i, j;
gboolean res = TRUE;
for (i = 0; i < order->channels; i++) {
- if (order->pos[i] != pos[i]) {
- res = FALSE;
- break;
+ for (j = 0; j < order->channels; j++) {
+ if (order->pos[j] == pos[i])
+ break;
}
+ if (j == order->channels)
+ return FALSE;
}
return res;
}
* Returns: a #GstAudioChannelPosition with all the channel position info set to
* #GST_AUDIO_CHANNEL_POSITION_NONE.
*/
-GstAudioChannelPosition *
-gst_rtp_channels_create_default (gint channels)
+void
+gst_rtp_channels_create_default (gint channels, GstAudioChannelPosition * posn)
{
gint i;
- GstAudioChannelPosition *posn;
- g_return_val_if_fail (channels > 0, NULL);
-
- posn = g_new (GstAudioChannelPosition, channels);
+ g_return_if_fail (channels > 0);
for (i = 0; i < channels; i++)
posn[i] = GST_AUDIO_CHANNEL_POSITION_NONE;
-
- return posn;
}
#include <stdlib.h>
#include <gst/audio/audio.h>
-#include <gst/audio/multichannel.h>
+
+#ifndef __GST_RTP_CHANNELS_H__
+#define __GST_RTP_CHANNELS_H__
typedef struct
{
GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT,
GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT,
GST_AUDIO_CHANNEL_POSITION_FRONT_CENTER,
- GST_AUDIO_CHANNEL_POSITION_LFE
+ GST_AUDIO_CHANNEL_POSITION_LFE1
};
static const GstAudioChannelPosition pos_4_3[] = {
GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT,
GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT,
GST_AUDIO_CHANNEL_POSITION_FRONT_CENTER,
- GST_AUDIO_CHANNEL_POSITION_LFE
+ GST_AUDIO_CHANNEL_POSITION_LFE1
};
static const GstAudioChannelPosition pos_5_1[] = {
GST_AUDIO_CHANNEL_POSITION_SIDE_LEFT,
GST_AUDIO_CHANNEL_POSITION_SIDE_RIGHT,
GST_AUDIO_CHANNEL_POSITION_FRONT_CENTER,
- GST_AUDIO_CHANNEL_POSITION_LFE
+ GST_AUDIO_CHANNEL_POSITION_LFE1
};
static const GstAudioChannelPosition pos_6_2[] = {
GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT,
GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT,
GST_AUDIO_CHANNEL_POSITION_FRONT_CENTER,
- GST_AUDIO_CHANNEL_POSITION_LFE,
+ GST_AUDIO_CHANNEL_POSITION_LFE1,
GST_AUDIO_CHANNEL_POSITION_SIDE_LEFT,
GST_AUDIO_CHANNEL_POSITION_SIDE_RIGHT
};
GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT,
GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT,
GST_AUDIO_CHANNEL_POSITION_FRONT_CENTER,
- GST_AUDIO_CHANNEL_POSITION_LFE,
+ GST_AUDIO_CHANNEL_POSITION_LFE1,
GST_AUDIO_CHANNEL_POSITION_SIDE_LEFT,
GST_AUDIO_CHANNEL_POSITION_SIDE_RIGHT,
GST_AUDIO_CHANNEL_POSITION_REAR_LEFT,
GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT,
GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT,
GST_AUDIO_CHANNEL_POSITION_FRONT_CENTER,
- GST_AUDIO_CHANNEL_POSITION_LFE,
+ GST_AUDIO_CHANNEL_POSITION_LFE1,
GST_AUDIO_CHANNEL_POSITION_SIDE_LEFT,
GST_AUDIO_CHANNEL_POSITION_SIDE_RIGHT,
GST_AUDIO_CHANNEL_POSITION_REAR_LEFT,
GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT,
GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT,
GST_AUDIO_CHANNEL_POSITION_FRONT_CENTER,
- GST_AUDIO_CHANNEL_POSITION_LFE,
+ GST_AUDIO_CHANNEL_POSITION_LFE1,
GST_AUDIO_CHANNEL_POSITION_SIDE_LEFT,
GST_AUDIO_CHANNEL_POSITION_SIDE_RIGHT,
GST_AUDIO_CHANNEL_POSITION_REAR_LEFT,
};
static const GstAudioChannelPosition pos_def_1[] = {
- GST_AUDIO_CHANNEL_POSITION_FRONT_MONO
+ GST_AUDIO_CHANNEL_POSITION_MONO
};
static const GstAudioChannelPosition pos_def_2[] = {
GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT,
GST_AUDIO_CHANNEL_POSITION_FRONT_CENTER,
GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT,
- GST_AUDIO_CHANNEL_POSITION_LFE
+ GST_AUDIO_CHANNEL_POSITION_LFE1
};
static const GstAudioChannelPosition pos_def_5[] = {
GST_AUDIO_CHANNEL_POSITION_FRONT_CENTER,
GST_AUDIO_CHANNEL_POSITION_SIDE_RIGHT,
GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT,
- GST_AUDIO_CHANNEL_POSITION_LFE
+ GST_AUDIO_CHANNEL_POSITION_LFE1
};
static const GstRTPChannelOrder channel_orders[] =
const gchar *order);
const GstRTPChannelOrder * gst_rtp_channels_get_by_index (gint channels, guint idx);
-GstAudioChannelPosition * gst_rtp_channels_create_default (gint channels);
+void gst_rtp_channels_create_default (gint channels, GstAudioChannelPosition *pos);
+
+#endif /* __GST_RTP_CHANNELS_H__ */
+++ /dev/null
-/* GStreamer
- * Copyright (C) <2005> Wim Taymans <wim.taymans@gmail.com>
- *
- * This library is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Library General Public
- * License as published by the Free Software Foundation; either
- * version 2 of the License, or (at your option) any later version.
- *
- * This library is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Library General Public License for more details.
- *
- * You should have received a copy of the GNU Library General Public
- * License along with this library; if not, write to the
- * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
- * Boston, MA 02111-1307, USA.
- */
-/* Element-Checklist-Version: 5 */
-
-#include "gstrtpdepay.h"
-
-GST_DEBUG_CATEGORY_STATIC (rtpdepay_debug);
-#define GST_CAT_DEFAULT (rtpdepay_debug)
-
-static GstStaticPadTemplate gst_rtp_depay_src_rtp_template =
-GST_STATIC_PAD_TEMPLATE ("srcrtp",
- GST_PAD_SRC,
- GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("application/x-rtp")
- );
-
-static GstStaticPadTemplate gst_rtp_depay_src_rtcp_template =
-GST_STATIC_PAD_TEMPLATE ("srcrtcp",
- GST_PAD_SRC,
- GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("application/x-rtcp")
- );
-
-static GstStaticPadTemplate gst_rtp_depay_sink_rtp_template =
-GST_STATIC_PAD_TEMPLATE ("sinkrtp",
- GST_PAD_SINK,
- GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("application/x-rtp")
- );
-
-static GstStaticPadTemplate gst_rtp_depay_sink_rtcp_template =
-GST_STATIC_PAD_TEMPLATE ("sinkrtcp",
- GST_PAD_SINK,
- GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("application/x-rtcp")
- );
-
-static GstCaps *gst_rtp_depay_getcaps (GstPad * pad);
-static GstFlowReturn gst_rtp_depay_chain_rtp (GstPad * pad, GstBuffer * buffer);
-static GstFlowReturn gst_rtp_depay_chain_rtcp (GstPad * pad,
- GstBuffer * buffer);
-
-GST_BOILERPLATE (GstRTPDepay, gst_rtp_depay, GstElement, GST_TYPE_ELEMENT);
-
-static void
-gst_rtp_depay_base_init (gpointer klass)
-{
- GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_static_pad_template (gstelement_class,
- &gst_rtp_depay_src_rtp_template);
- gst_element_class_add_static_pad_template (gstelement_class,
- &gst_rtp_depay_src_rtcp_template);
- gst_element_class_add_static_pad_template (gstelement_class,
- &gst_rtp_depay_sink_rtp_template);
- gst_element_class_add_static_pad_template (gstelement_class,
- &gst_rtp_depay_sink_rtcp_template);
- gst_element_class_set_details_simple (gstelement_class,
- "Dummy RTP session manager", "Codec/Depayloader/Network/RTP",
- "Accepts raw RTP and RTCP packets and sends them forward",
- "Wim Taymans <wim.taymans@gmail.com>");
-}
-
-static void
-gst_rtp_depay_class_init (GstRTPDepayClass * klass)
-{
- GST_DEBUG_CATEGORY_INIT (rtpdepay_debug, "rtpdepay", 0, "RTP decoder");
-}
-
-static void
-gst_rtp_depay_init (GstRTPDepay * rtpdepay, GstRTPDepayClass * klass)
-{
- /* the input rtp pad */
- rtpdepay->sink_rtp =
- gst_pad_new_from_static_template (&gst_rtp_depay_sink_rtp_template,
- "sinkrtp");
- gst_element_add_pad (GST_ELEMENT (rtpdepay), rtpdepay->sink_rtp);
- gst_pad_set_getcaps_function (rtpdepay->sink_rtp, gst_rtp_depay_getcaps);
- gst_pad_set_chain_function (rtpdepay->sink_rtp, gst_rtp_depay_chain_rtp);
-
- /* the input rtcp pad */
- rtpdepay->sink_rtcp =
- gst_pad_new_from_static_template (&gst_rtp_depay_sink_rtcp_template,
- "sinkrtcp");
- gst_element_add_pad (GST_ELEMENT (rtpdepay), rtpdepay->sink_rtcp);
- gst_pad_set_chain_function (rtpdepay->sink_rtcp, gst_rtp_depay_chain_rtcp);
-
- /* the output rtp pad */
- rtpdepay->src_rtp =
- gst_pad_new_from_static_template (&gst_rtp_depay_src_rtp_template,
- "srcrtp");
- gst_pad_set_getcaps_function (rtpdepay->src_rtp, gst_rtp_depay_getcaps);
- gst_element_add_pad (GST_ELEMENT (rtpdepay), rtpdepay->src_rtp);
-
- /* the output rtcp pad */
- rtpdepay->src_rtcp =
- gst_pad_new_from_static_template (&gst_rtp_depay_src_rtcp_template,
- "srcrtcp");
- gst_element_add_pad (GST_ELEMENT (rtpdepay), rtpdepay->src_rtcp);
-}
-
-static GstCaps *
-gst_rtp_depay_getcaps (GstPad * pad)
-{
- GstRTPDepay *src;
- GstPad *other;
- GstCaps *caps;
-
- src = GST_RTP_DEPAY (GST_PAD_PARENT (pad));
-
- other = pad == src->src_rtp ? src->sink_rtp : src->src_rtp;
-
- caps = gst_pad_peer_get_caps (other);
-
- if (caps == NULL)
- caps = gst_caps_copy (gst_pad_get_pad_template_caps (pad));
-
- return caps;
-}
-
-static GstFlowReturn
-gst_rtp_depay_chain_rtp (GstPad * pad, GstBuffer * buffer)
-{
- GstRTPDepay *src;
-
- src = GST_RTP_DEPAY (GST_PAD_PARENT (pad));
-
- GST_DEBUG ("got rtp packet");
- return gst_pad_push (src->src_rtp, buffer);
-}
-
-static GstFlowReturn
-gst_rtp_depay_chain_rtcp (GstPad * pad, GstBuffer * buffer)
-{
- GST_DEBUG ("got rtcp packet");
-
- gst_buffer_unref (buffer);
- return GST_FLOW_OK;
-}
-
-gboolean
-gst_rtp_depay_plugin_init (GstPlugin * plugin)
-{
- return gst_element_register (plugin, "rtpdepay",
- GST_RANK_SECONDARY, GST_TYPE_RTP_DEPAY);
-}
+++ /dev/null
-/* GStreamer
- * Copyright (C) <2005> Wim Taymans <wim.taymans@gmail.com>
- *
- * This library is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Library General Public
- * License as published by the Free Software Foundation; either
- * version 2 of the License, or (at your option) any later version.
- *
- * This library is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Library General Public License for more details.
- *
- * You should have received a copy of the GNU Library General Public
- * License along with this library; if not, write to the
- * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
- * Boston, MA 02111-1307, USA.
- */
-
-#ifndef __GST_RTP_DEPAY_H__
-#define __GST_RTP_DEPAY_H__
-
-#include <gst/gst.h>
-
-G_BEGIN_DECLS
-
-#define GST_TYPE_RTP_DEPAY (gst_rtp_depay_get_type())
-#define GST_IS_RTP_DEPAY(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_DEPAY))
-#define GST_IS_RTP_DEPAY_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_DEPAY))
-#define GST_RTP_DEPAY(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_DEPAY, GstRTPDepay))
-#define GST_RTP_DEPAY_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_DEPAY, GstRTPDepayClass))
-
-typedef struct _GstRTPDepay GstRTPDepay;
-typedef struct _GstRTPDepayClass GstRTPDepayClass;
-
-struct _GstRTPDepay {
- GstElement element;
-
- GstPad *sink_rtp;
- GstPad *sink_rtcp;
- GstPad *src_rtp;
- GstPad *src_rtcp;
-};
-
-struct _GstRTPDepayClass {
- GstElementClass parent_class;
-};
-
-gboolean gst_rtp_depay_plugin_init (GstPlugin * plugin);
-
-GType gst_rtp_depay_get_type(void);
-
-G_END_DECLS
-
-#endif /* __GST_RTP_DEPAY_H__ */
static GstStateChangeReturn
gst_rtp_dv_depay_change_state (GstElement * element, GstStateChange transition);
-static GstBuffer *gst_rtp_dv_depay_process (GstBaseRTPDepayload * base,
+static GstBuffer *gst_rtp_dv_depay_process (GstRTPBaseDepayload * base,
GstBuffer * in);
-static gboolean gst_rtp_dv_depay_setcaps (GstBaseRTPDepayload * depayload,
+static gboolean gst_rtp_dv_depay_setcaps (GstRTPBaseDepayload * depayload,
GstCaps * caps);
-GST_BOILERPLATE (GstRTPDVDepay, gst_rtp_dv_depay, GstBaseRTPDepayload,
- GST_TYPE_BASE_RTP_DEPAYLOAD)
+#define gst_rtp_dv_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRTPDVDepay, gst_rtp_dv_depay, GST_TYPE_RTP_BASE_DEPAYLOAD);
- static void gst_rtp_dv_depay_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_static_pad_template (element_class, &src_factory);
- gst_element_class_add_static_pad_template (element_class, &sink_factory);
-
- gst_element_class_set_details_simple (element_class, "RTP DV Depayloader",
- "Codec/Depayloader/Network/RTP",
- "Depayloads DV from RTP packets (RFC 3189)",
- "Marcel Moreaux <marcelm@spacelabs.nl>, Wim Taymans <wim.taymans@gmail.com>");
-}
-/* initialize the plugin's class */
static void
gst_rtp_dv_depay_class_init (GstRTPDVDepayClass * klass)
{
GstElementClass *gstelement_class = (GstElementClass *) klass;
- GstBaseRTPDepayloadClass *gstbasertpdepayload_class =
- (GstBaseRTPDepayloadClass *) klass;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class =
+ (GstRTPBaseDepayloadClass *) klass;
+
+ GST_DEBUG_CATEGORY_INIT (rtpdvdepay_debug, "rtpdvdepay", 0,
+ "DV RTP Depayloader");
gstelement_class->change_state =
GST_DEBUG_FUNCPTR (gst_rtp_dv_depay_change_state);
- gstbasertpdepayload_class->process =
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&src_factory));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&sink_factory));
+
+ gst_element_class_set_details_simple (gstelement_class, "RTP DV Depayloader",
+ "Codec/Depayloader/Network/RTP",
+ "Depayloads DV from RTP packets (RFC 3189)",
+ "Marcel Moreaux <marcelm@spacelabs.nl>, Wim Taymans <wim.taymans@gmail.com>");
+
+ gstrtpbasedepayload_class->process =
GST_DEBUG_FUNCPTR (gst_rtp_dv_depay_process);
- gstbasertpdepayload_class->set_caps =
+ gstrtpbasedepayload_class->set_caps =
GST_DEBUG_FUNCPTR (gst_rtp_dv_depay_setcaps);
-
- GST_DEBUG_CATEGORY_INIT (rtpdvdepay_debug, "rtpdvdepay", 0,
- "DV RTP Depayloader");
}
/* initialize the new element
* initialize structure
*/
static void
-gst_rtp_dv_depay_init (GstRTPDVDepay * filter, GstRTPDVDepayClass * klass)
+gst_rtp_dv_depay_init (GstRTPDVDepay * filter)
{
}
}
static gboolean
-gst_rtp_dv_depay_setcaps (GstBaseRTPDepayload * depayload, GstCaps * caps)
+gst_rtp_dv_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
{
GstStructure *structure;
GstRTPDVDepay *rtpdvdepay;
/* Initialize the new accumulator frame.
* If the previous frame exists, copy that into the accumulator frame.
* This way, missing packets in the stream won't show up badly. */
- memset (GST_BUFFER_DATA (rtpdvdepay->acc), 0, rtpdvdepay->frame_size);
+ gst_buffer_memset (rtpdvdepay->acc, 0, 0, rtpdvdepay->frame_size);
srccaps = gst_caps_new_simple ("video/x-dv",
"systemstream", G_TYPE_BOOLEAN, systemstream,
* NTSC.
*/
static GstBuffer *
-gst_rtp_dv_depay_process (GstBaseRTPDepayload * base, GstBuffer * in)
+gst_rtp_dv_depay_process (GstRTPBaseDepayload * base, GstBuffer * in)
{
GstBuffer *out = NULL;
guint8 *payload;
guint payload_len, location;
GstRTPDVDepay *dvdepay = GST_RTP_DV_DEPAY (base);
gboolean marker;
+ GstRTPBuffer rtp = { NULL, };
+
+ gst_rtp_buffer_map (in, GST_MAP_READ, &rtp);
- marker = gst_rtp_buffer_get_marker (in);
+ marker = gst_rtp_buffer_get_marker (&rtp);
/* Check if the received packet contains (the start of) a new frame, we do
* this by checking the RTP timestamp. */
- rtp_ts = gst_rtp_buffer_get_timestamp (in);
+ rtp_ts = gst_rtp_buffer_get_timestamp (&rtp);
/* we cannot copy the packet yet if the marker is set, we will do that below
* after taking out the data */
}
/* Extract the payload */
- payload_len = gst_rtp_buffer_get_payload_len (in);
- payload = gst_rtp_buffer_get_payload (in);
+ payload_len = gst_rtp_buffer_get_payload_len (&rtp);
+ payload = gst_rtp_buffer_get_payload (&rtp);
/* copy all DIF chunks in their place. */
while (payload_len >= 80) {
/* And copy it in, provided the location is sane. */
if (offset >= 0 && offset <= dvdepay->frame_size - 80)
- memcpy (GST_BUFFER_DATA (dvdepay->acc) + offset, payload, 80);
+ gst_buffer_fill (dvdepay->acc, offset, payload, 80);
payload += 80;
payload_len -= 80;
}
+ gst_rtp_buffer_unmap (&rtp);
if (marker) {
GST_DEBUG_OBJECT (dvdepay, "marker bit complete frame %u", rtp_ts);
#define __GSTRTPDVDEPAY_H__
#include <gst/gst.h>
-#include <gst/rtp/gstbasertpdepayload.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
G_BEGIN_DECLS
struct _GstRTPDVDepay
{
- GstBaseRTPDepayload parent;
+ GstRTPBaseDepayload parent;
GstBuffer *acc;
guint frame_size;
struct _GstRTPDVDepayClass
{
- GstBaseRTPDepayloadClass parent_class;
+ GstRTPBaseDepayloadClass parent_class;
};
GType gst_rtp_dv_depay_get_type (void);
)
);
-static gboolean gst_rtp_dv_pay_setcaps (GstBaseRTPPayload * payload,
+static gboolean gst_rtp_dv_pay_setcaps (GstRTPBasePayload * payload,
GstCaps * caps);
-static GstFlowReturn gst_rtp_dv_pay_handle_buffer (GstBaseRTPPayload * payload,
+static GstFlowReturn gst_rtp_dv_pay_handle_buffer (GstRTPBasePayload * payload,
GstBuffer * buffer);
#define GST_TYPE_DV_PAY_MODE (gst_dv_pay_mode_get_type())
static void gst_dv_pay_get_property (GObject * object,
guint prop_id, GValue * value, GParamSpec * pspec);
-GST_BOILERPLATE (GstRTPDVPay, gst_rtp_dv_pay, GstBaseRTPPayload,
- GST_TYPE_BASE_RTP_PAYLOAD)
-
- static void gst_rtp_dv_pay_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_dv_pay_sink_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_dv_pay_src_template);
- gst_element_class_set_details_simple (element_class, "RTP DV Payloader",
- "Codec/Payloader/Network/RTP",
- "Payloads DV into RTP packets (RFC 3189)",
- "Marcel Moreaux <marcelm@spacelabs.nl>, Wim Taymans <wim.taymans@gmail.com>");
-}
+#define gst_rtp_dv_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRTPDVPay, gst_rtp_dv_pay, GST_TYPE_RTP_BASE_PAYLOAD);
static void
gst_rtp_dv_pay_class_init (GstRTPDVPayClass * klass)
{
GObjectClass *gobject_class;
- GstBaseRTPPayloadClass *gstbasertppayload_class;
+ GstElementClass *gstelement_class;
+ GstRTPBasePayloadClass *gstrtpbasepayload_class;
+
+ GST_DEBUG_CATEGORY_INIT (rtpdvpay_debug, "rtpdvpay", 0, "DV RTP Payloader");
gobject_class = (GObjectClass *) klass;
- gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasepayload_class = (GstRTPBasePayloadClass *) klass;
gobject_class->set_property = gst_dv_pay_set_property;
gobject_class->get_property = gst_dv_pay_get_property;
- gstbasertppayload_class->set_caps = gst_rtp_dv_pay_setcaps;
- gstbasertppayload_class->handle_buffer = gst_rtp_dv_pay_handle_buffer;
-
g_object_class_install_property (gobject_class, PROP_MODE,
g_param_spec_enum ("mode", "Mode",
"The payload mode of payloading",
GST_TYPE_DV_PAY_MODE, DEFAULT_MODE,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
- GST_DEBUG_CATEGORY_INIT (rtpdvpay_debug, "rtpdvpay", 0, "DV RTP Payloader");
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_dv_pay_sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_dv_pay_src_template));
+
+ gst_element_class_set_details_simple (gstelement_class, "RTP DV Payloader",
+ "Codec/Payloader/Network/RTP",
+ "Payloads DV into RTP packets (RFC 3189)",
+ "Marcel Moreaux <marcelm@spacelabs.nl>, Wim Taymans <wim.taymans@gmail.com>");
+
+ gstrtpbasepayload_class->set_caps = gst_rtp_dv_pay_setcaps;
+ gstrtpbasepayload_class->handle_buffer = gst_rtp_dv_pay_handle_buffer;
}
static void
-gst_rtp_dv_pay_init (GstRTPDVPay * rtpdvpay, GstRTPDVPayClass * klass)
+gst_rtp_dv_pay_init (GstRTPDVPay * rtpdvpay)
{
}
}
static gboolean
-gst_rtp_dv_pay_setcaps (GstBaseRTPPayload * payload, GstCaps * caps)
+gst_rtp_dv_pay_setcaps (GstRTPBasePayload * payload, GstCaps * caps)
{
/* We don't do anything here, but we could check if it's a system stream and if
* it's not, default to sending the video only. We will negotiate downstream
}
static gboolean
-gst_dv_pay_negotiate (GstRTPDVPay * rtpdvpay, guint8 * data, guint size)
+gst_dv_pay_negotiate (GstRTPDVPay * rtpdvpay, guint8 * data, gsize size)
{
const gchar *encode, *media;
gboolean audio_bundled, res;
default:
break;
}
- gst_basertppayload_set_options (GST_BASE_RTP_PAYLOAD (rtpdvpay), media, TRUE,
- "DV", 90000);
+ gst_rtp_base_payload_set_options (GST_RTP_BASE_PAYLOAD (rtpdvpay), media,
+ TRUE, "DV", 90000);
if (audio_bundled) {
- res = gst_basertppayload_set_outcaps (GST_BASE_RTP_PAYLOAD (rtpdvpay),
+ res = gst_rtp_base_payload_set_outcaps (GST_RTP_BASE_PAYLOAD (rtpdvpay),
"encode", G_TYPE_STRING, encode,
"audio", G_TYPE_STRING, "bundled", NULL);
} else {
- res = gst_basertppayload_set_outcaps (GST_BASE_RTP_PAYLOAD (rtpdvpay),
+ res = gst_rtp_base_payload_set_outcaps (GST_RTP_BASE_PAYLOAD (rtpdvpay),
"encode", G_TYPE_STRING, encode, NULL);
}
return res;
/* Get a DV frame, chop it up in pieces, and push the pieces to the RTP layer.
*/
static GstFlowReturn
-gst_rtp_dv_pay_handle_buffer (GstBaseRTPPayload * basepayload,
+gst_rtp_dv_pay_handle_buffer (GstRTPBasePayload * basepayload,
GstBuffer * buffer)
{
GstRTPDVPay *rtpdvpay;
GstBuffer *outbuf;
GstFlowReturn ret = GST_FLOW_OK;
gint hdrlen;
- guint size;
+ gsize size;
+ GstMapInfo map;
guint8 *data;
guint8 *dest;
guint filled;
+ GstRTPBuffer rtp = { NULL, };
rtpdvpay = GST_RTP_DV_PAY (basepayload);
* Therefore, we round the available room down to the nearest multiple of 80.
*
* The available room is just the packet MTU, minus the RTP header length. */
- max_payload_size = ((GST_BASE_RTP_PAYLOAD_MTU (rtpdvpay) - hdrlen) / 80) * 80;
+ max_payload_size = ((GST_RTP_BASE_PAYLOAD_MTU (rtpdvpay) - hdrlen) / 80) * 80;
/* The length of the buffer to transmit. */
- size = GST_BUFFER_SIZE (buffer);
- data = GST_BUFFER_DATA (buffer);
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ data = map.data;
+ size = map.size;
GST_DEBUG_OBJECT (rtpdvpay,
- "DV RTP payloader got buffer of %u bytes, splitting in %u byte "
- "payload fragments, at time %" GST_TIME_FORMAT, size, max_payload_size,
+ "DV RTP payloader got buffer of %" G_GSIZE_FORMAT
+ " bytes, splitting in %u byte " "payload fragments, at time %"
+ GST_TIME_FORMAT, size, max_payload_size,
GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer)));
if (!rtpdvpay->negotiated) {
if (outbuf == NULL) {
outbuf = gst_rtp_buffer_new_allocate (max_payload_size, 0, 0);
GST_BUFFER_TIMESTAMP (outbuf) = GST_BUFFER_TIMESTAMP (buffer);
- dest = gst_rtp_buffer_get_payload (outbuf);
+
+ gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
+ dest = gst_rtp_buffer_get_payload (&rtp);
filled = 0;
}
guint hlen;
/* set marker */
- gst_rtp_buffer_set_marker (outbuf, TRUE);
+ gst_rtp_buffer_set_marker (&rtp, TRUE);
/* shrink buffer to last packet */
- hlen = gst_rtp_buffer_get_header_len (outbuf);
- gst_rtp_buffer_set_packet_len (outbuf, hlen + filled);
+ hlen = gst_rtp_buffer_get_header_len (&rtp);
+ gst_rtp_buffer_set_packet_len (&rtp, hlen + filled);
}
+
/* Push out the created piece, and check for errors. */
- ret = gst_basertppayload_push (basepayload, outbuf);
+ gst_rtp_buffer_unmap (&rtp);
+ ret = gst_rtp_base_payload_push (basepayload, outbuf);
if (ret != GST_FLOW_OK)
break;
outbuf = NULL;
}
}
+ gst_buffer_unmap (buffer, &map);
gst_buffer_unref (buffer);
return ret;
#define __GSTRTPDVPAY_H__
#include <gst/gst.h>
-#include <gst/rtp/gstbasertppayload.h>
+#include <gst/rtp/gstrtpbasepayload.h>
G_BEGIN_DECLS
struct _GstRTPDVPay
{
- GstBaseRTPPayload payload;
+ GstRTPBasePayload payload;
gboolean negotiated;
GstDVPayMode mode;
struct _GstRTPDVPayClass
{
- GstBaseRTPPayloadClass parent_class;
+ GstRTPBasePayloadClass parent_class;
};
GType gst_rtp_dv_pay_get_type (void);
#include <stdlib.h>
#include <gst/audio/audio.h>
-#include <gst/audio/multichannel.h>
#include "gstrtpg722depay.h"
#include "gstrtpchannels.h"
)
);
-GST_BOILERPLATE (GstRtpG722Depay, gst_rtp_g722_depay, GstBaseRTPDepayload,
- GST_TYPE_BASE_RTP_DEPAYLOAD);
+#define gst_rtp_g722_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpG722Depay, gst_rtp_g722_depay,
+ GST_TYPE_RTP_BASE_DEPAYLOAD);
-static gboolean gst_rtp_g722_depay_setcaps (GstBaseRTPDepayload * depayload,
+static gboolean gst_rtp_g722_depay_setcaps (GstRTPBaseDepayload * depayload,
GstCaps * caps);
-static GstBuffer *gst_rtp_g722_depay_process (GstBaseRTPDepayload * depayload,
+static GstBuffer *gst_rtp_g722_depay_process (GstRTPBaseDepayload * depayload,
GstBuffer * buf);
static void
-gst_rtp_g722_depay_base_init (gpointer klass)
+gst_rtp_g722_depay_class_init (GstRtpG722DepayClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstElementClass *gstelement_class;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_g722_depay_src_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_g722_depay_sink_template);
+ GST_DEBUG_CATEGORY_INIT (rtpg722depay_debug, "rtpg722depay", 0,
+ "G722 RTP Depayloader");
- gst_element_class_set_details_simple (element_class, "RTP audio depayloader",
- "Codec/Depayloader/Network/RTP",
- "Extracts G722 audio from RTP packets",
- "Wim Taymans <wim.taymans@gmail.com>");
-}
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
-static void
-gst_rtp_g722_depay_class_init (GstRtpG722DepayClass * klass)
-{
- GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_g722_depay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_g722_depay_sink_template));
- gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
-
- gstbasertpdepayload_class->set_caps = gst_rtp_g722_depay_setcaps;
- gstbasertpdepayload_class->process = gst_rtp_g722_depay_process;
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP audio depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts G722 audio from RTP packets",
+ "Wim Taymans <wim.taymans@gmail.com>");
- GST_DEBUG_CATEGORY_INIT (rtpg722depay_debug, "rtpg722depay", 0,
- "G722 RTP Depayloader");
+ gstrtpbasedepayload_class->set_caps = gst_rtp_g722_depay_setcaps;
+ gstrtpbasedepayload_class->process = gst_rtp_g722_depay_process;
}
static void
-gst_rtp_g722_depay_init (GstRtpG722Depay * rtpg722depay,
- GstRtpG722DepayClass * klass)
+gst_rtp_g722_depay_init (GstRtpG722Depay * rtpg722depay)
{
- /* needed because of GST_BOILERPLATE */
}
static gint
}
static gboolean
-gst_rtp_g722_depay_setcaps (GstBaseRTPDepayload * depayload, GstCaps * caps)
+gst_rtp_g722_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
{
GstStructure *structure;
GstRtpG722Depay *rtpg722depay;
gint channels;
GstCaps *srccaps;
gboolean res;
+#if 0
const gchar *channel_order;
const GstRTPChannelOrder *order;
+#endif
rtpg722depay = GST_RTP_G722_DEPAY (depayload);
srccaps = gst_caps_new_simple ("audio/G722",
"rate", G_TYPE_INT, samplerate, "channels", G_TYPE_INT, channels, NULL);
+ /* FIXME: Do something with the channel order */
+#if 0
/* add channel positions */
channel_order = gst_structure_get_string (structure, "channel-order");
gst_audio_set_channel_positions (gst_caps_get_structure (srccaps, 0), pos);
g_free (pos);
}
+#endif
res = gst_pad_set_caps (depayload->srcpad, srccaps);
gst_caps_unref (srccaps);
}
static GstBuffer *
-gst_rtp_g722_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
+gst_rtp_g722_depay_process (GstRTPBaseDepayload * depayload, GstBuffer * buf)
{
GstRtpG722Depay *rtpg722depay;
GstBuffer *outbuf;
gint payload_len;
gboolean marker;
+ GstRTPBuffer rtp = { NULL };
rtpg722depay = GST_RTP_G722_DEPAY (depayload);
- payload_len = gst_rtp_buffer_get_payload_len (buf);
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
+
+ payload_len = gst_rtp_buffer_get_payload_len (&rtp);
if (payload_len <= 0)
goto empty_packet;
GST_DEBUG_OBJECT (rtpg722depay, "got payload of %d bytes", payload_len);
- outbuf = gst_rtp_buffer_get_payload_buffer (buf);
- marker = gst_rtp_buffer_get_marker (buf);
+ outbuf = gst_rtp_buffer_get_payload_buffer (&rtp);
+ marker = gst_rtp_buffer_get_marker (&rtp);
+ gst_rtp_buffer_unmap (&rtp);
if (marker && outbuf) {
/* mark talk spurt with DISCONT */
{
GST_ELEMENT_WARNING (rtpg722depay, STREAM, DECODE,
("Empty Payload."), (NULL));
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
}
#define __GST_RTP_G722_DEPAY_H__
#include <gst/gst.h>
-#include <gst/rtp/gstbasertpdepayload.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
G_BEGIN_DECLS
/* Definition of structure storing data for this element. */
struct _GstRtpG722Depay
{
- GstBaseRTPDepayload depayload;
+ GstRTPBaseDepayload depayload;
guint rate;
guint channels;
/* Standard definition defining a class for this element. */
struct _GstRtpG722DepayClass
{
- GstBaseRTPDepayloadClass parent_class;
+ GstRTPBaseDepayloadClass parent_class;
};
GType gst_rtp_g722_depay_get_type (void);
#include <string.h>
#include <gst/audio/audio.h>
-#include <gst/audio/multichannel.h>
#include <gst/rtp/gstrtpbuffer.h>
#include "gstrtpg722pay.h"
"clock-rate = (int) 8000")
);
-static gboolean gst_rtp_g722_pay_setcaps (GstBaseRTPPayload * basepayload,
+static gboolean gst_rtp_g722_pay_setcaps (GstRTPBasePayload * basepayload,
GstCaps * caps);
-static GstCaps *gst_rtp_g722_pay_getcaps (GstBaseRTPPayload * rtppayload,
- GstPad * pad);
+static GstCaps *gst_rtp_g722_pay_getcaps (GstRTPBasePayload * rtppayload,
+ GstPad * pad, GstCaps * filter);
-GST_BOILERPLATE (GstRtpG722Pay, gst_rtp_g722_pay, GstBaseRTPAudioPayload,
- GST_TYPE_BASE_RTP_AUDIO_PAYLOAD);
+#define gst_rtp_g722_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpG722Pay, gst_rtp_g722_pay,
+ GST_TYPE_RTP_BASE_AUDIO_PAYLOAD);
static void
-gst_rtp_g722_pay_base_init (gpointer klass)
+gst_rtp_g722_pay_class_init (GstRtpG722PayClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstElementClass *gstelement_class;
+ GstRTPBasePayloadClass *gstrtpbasepayload_class;
+
+ GST_DEBUG_CATEGORY_INIT (rtpg722pay_debug, "rtpg722pay", 0,
+ "G722 RTP Payloader");
+
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasepayload_class = (GstRTPBasePayloadClass *) klass;
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_g722_pay_src_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_g722_pay_sink_template);
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_g722_pay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_g722_pay_sink_template));
- gst_element_class_set_details_simple (element_class, "RTP audio payloader",
+ gst_element_class_set_details_simple (gstelement_class, "RTP audio payloader",
"Codec/Payloader/Network/RTP",
"Payload-encode Raw audio into RTP packets (RFC 3551)",
"Wim Taymans <wim.taymans@gmail.com>");
-}
-
-static void
-gst_rtp_g722_pay_class_init (GstRtpG722PayClass * klass)
-{
- GstBaseRTPPayloadClass *gstbasertppayload_class;
- gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
-
- gstbasertppayload_class->set_caps = gst_rtp_g722_pay_setcaps;
- gstbasertppayload_class->get_caps = gst_rtp_g722_pay_getcaps;
-
- GST_DEBUG_CATEGORY_INIT (rtpg722pay_debug, "rtpg722pay", 0,
- "G722 RTP Payloader");
+ gstrtpbasepayload_class->set_caps = gst_rtp_g722_pay_setcaps;
+ gstrtpbasepayload_class->get_caps = gst_rtp_g722_pay_getcaps;
}
static void
-gst_rtp_g722_pay_init (GstRtpG722Pay * rtpg722pay, GstRtpG722PayClass * klass)
+gst_rtp_g722_pay_init (GstRtpG722Pay * rtpg722pay)
{
- GstBaseRTPAudioPayload *basertpaudiopayload;
+ GstRTPBaseAudioPayload *rtpbaseaudiopayload;
- basertpaudiopayload = GST_BASE_RTP_AUDIO_PAYLOAD (rtpg722pay);
+ rtpbaseaudiopayload = GST_RTP_BASE_AUDIO_PAYLOAD (rtpg722pay);
- /* tell basertpaudiopayload that this is a sample based codec */
- gst_base_rtp_audio_payload_set_sample_based (basertpaudiopayload);
+ /* tell rtpbaseaudiopayload that this is a sample based codec */
+ gst_rtp_base_audio_payload_set_sample_based (rtpbaseaudiopayload);
}
static gboolean
-gst_rtp_g722_pay_setcaps (GstBaseRTPPayload * basepayload, GstCaps * caps)
+gst_rtp_g722_pay_setcaps (GstRTPBasePayload * basepayload, GstCaps * caps)
{
GstRtpG722Pay *rtpg722pay;
GstStructure *structure;
gint rate, channels, clock_rate;
gboolean res;
gchar *params;
+#if 0
GstAudioChannelPosition *pos;
const GstRTPChannelOrder *order;
- GstBaseRTPAudioPayload *basertpaudiopayload;
+#endif
+ GstRTPBaseAudioPayload *rtpbaseaudiopayload;
- basertpaudiopayload = GST_BASE_RTP_AUDIO_PAYLOAD (basepayload);
+ rtpbaseaudiopayload = GST_RTP_BASE_AUDIO_PAYLOAD (basepayload);
rtpg722pay = GST_RTP_G722_PAY (basepayload);
structure = gst_caps_get_structure (caps, 0);
if (!gst_structure_get_int (structure, "channels", &channels))
goto no_channels;
+ /* FIXME: Do something with the channel positions */
+#if 0
/* get the channel order */
pos = gst_audio_get_channel_positions (structure);
if (pos)
order = gst_rtp_channels_get_by_pos (channels, pos);
else
order = NULL;
+#endif
/* Clock rate is always 8000 Hz for G722 according to
* RFC 3551 although the sampling rate is 16000 Hz */
clock_rate = 8000;
- gst_basertppayload_set_options (basepayload, "audio", TRUE, "G722",
+ gst_rtp_base_payload_set_options (basepayload, "audio", TRUE, "G722",
clock_rate);
params = g_strdup_printf ("%d", channels);
+#if 0
if (!order && channels > 2) {
GST_ELEMENT_WARNING (rtpg722pay, STREAM, DECODE,
(NULL), ("Unknown channel order for %d channels", channels));
}
if (order && order->name) {
- res = gst_basertppayload_set_outcaps (basepayload,
+ res = gst_rtp_base_payload_set_outcaps (basepayload,
"encoding-params", G_TYPE_STRING, params, "channels", G_TYPE_INT,
channels, "channel-order", G_TYPE_STRING, order->name, NULL);
} else {
- res = gst_basertppayload_set_outcaps (basepayload,
+#endif
+ res = gst_rtp_base_payload_set_outcaps (basepayload,
"encoding-params", G_TYPE_STRING, params, "channels", G_TYPE_INT,
channels, NULL);
+#if 0
}
+#endif
g_free (params);
+#if 0
g_free (pos);
+#endif
rtpg722pay->rate = rate;
rtpg722pay->channels = channels;
/* bits-per-sample is 4 * channels for G722, but as the RTP clock runs at
* half speed (8 instead of 16 khz), pretend it's 8 bits per sample
* channels. */
- gst_base_rtp_audio_payload_set_samplebits_options (basertpaudiopayload,
+ gst_rtp_base_audio_payload_set_samplebits_options (rtpbaseaudiopayload,
8 * rtpg722pay->channels);
return res;
}
static GstCaps *
-gst_rtp_g722_pay_getcaps (GstBaseRTPPayload * rtppayload, GstPad * pad)
+gst_rtp_g722_pay_getcaps (GstRTPBasePayload * rtppayload, GstPad * pad,
+ GstCaps * filter)
{
GstCaps *otherpadcaps;
GstCaps *caps;
#define __GST_RTP_G722_PAY_H__
#include <gst/gst.h>
-#include <gst/rtp/gstbasertpaudiopayload.h>
+#include <gst/rtp/gstrtpbaseaudiopayload.h>
G_BEGIN_DECLS
struct _GstRtpG722Pay
{
- GstBaseRTPAudioPayload payload;
+ GstRTPBaseAudioPayload payload;
gint rate;
gint channels;
struct _GstRtpG722PayClass
{
- GstBaseRTPAudioPayloadClass parent_class;
+ GstRTPBaseAudioPayloadClass parent_class;
};
GType gst_rtp_g722_pay_get_type (void);
GST_STATIC_CAPS ("audio/G723, " "channels = (int) 1," "rate = (int) 8000")
);
-static gboolean gst_rtp_g723_depay_setcaps (GstBaseRTPDepayload * depayload,
+static gboolean gst_rtp_g723_depay_setcaps (GstRTPBaseDepayload * depayload,
GstCaps * caps);
-static GstBuffer *gst_rtp_g723_depay_process (GstBaseRTPDepayload * depayload,
+static GstBuffer *gst_rtp_g723_depay_process (GstRTPBaseDepayload * depayload,
GstBuffer * buf);
-GST_BOILERPLATE (GstRtpG723Depay, gst_rtp_g723_depay, GstBaseRTPDepayload,
- GST_TYPE_BASE_RTP_DEPAYLOAD);
+#define gst_rtp_g723_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpG723Depay, gst_rtp_g723_depay,
+ GST_TYPE_RTP_BASE_DEPAYLOAD);
static void
-gst_rtp_g723_depay_base_init (gpointer klass)
+gst_rtp_g723_depay_class_init (GstRtpG723DepayClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_g723_depay_src_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_g723_depay_sink_template);
-
- gst_element_class_set_details_simple (element_class, "RTP G.723 depayloader",
- "Codec/Depayloader/Network/RTP",
- "Extracts G.723 audio from RTP packets (RFC 3551)",
- "Wim Taymans <wim.taymans@gmail.com>");
+ GstElementClass *gstelement_class;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
GST_DEBUG_CATEGORY_INIT (rtpg723depay_debug, "rtpg723depay", 0,
"G.723 RTP Depayloader");
-}
-static void
-gst_rtp_g723_depay_class_init (GstRtpG723DepayClass * klass)
-{
- GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_g723_depay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_g723_depay_sink_template));
- gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP G.723 depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts G.723 audio from RTP packets (RFC 3551)",
+ "Wim Taymans <wim.taymans@gmail.com>");
- gstbasertpdepayload_class->process = gst_rtp_g723_depay_process;
- gstbasertpdepayload_class->set_caps = gst_rtp_g723_depay_setcaps;
+ gstrtpbasedepayload_class->process = gst_rtp_g723_depay_process;
+ gstrtpbasedepayload_class->set_caps = gst_rtp_g723_depay_setcaps;
}
static void
-gst_rtp_g723_depay_init (GstRtpG723Depay * rtpg723depay,
- GstRtpG723DepayClass * klass)
+gst_rtp_g723_depay_init (GstRtpG723Depay * rtpg723depay)
{
- GstBaseRTPDepayload *depayload;
+ GstRTPBaseDepayload *depayload;
- depayload = GST_BASE_RTP_DEPAYLOAD (rtpg723depay);
+ depayload = GST_RTP_BASE_DEPAYLOAD (rtpg723depay);
- gst_pad_use_fixed_caps (GST_BASE_RTP_DEPAYLOAD_SRCPAD (depayload));
+ gst_pad_use_fixed_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload));
}
static gboolean
-gst_rtp_g723_depay_setcaps (GstBaseRTPDepayload * depayload, GstCaps * caps)
+gst_rtp_g723_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
{
GstStructure *structure;
GstCaps *srccaps;
srccaps = gst_caps_new_simple ("audio/G723",
"channels", G_TYPE_INT, channels, "rate", G_TYPE_INT, clock_rate, NULL);
- ret = gst_pad_set_caps (GST_BASE_RTP_DEPAYLOAD_SRCPAD (depayload), srccaps);
+ ret = gst_pad_set_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload), srccaps);
gst_caps_unref (srccaps);
return ret;
static GstBuffer *
-gst_rtp_g723_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
+gst_rtp_g723_depay_process (GstRTPBaseDepayload * depayload, GstBuffer * buf)
{
GstRtpG723Depay *rtpg723depay;
GstBuffer *outbuf = NULL;
gint payload_len;
gboolean marker;
+ GstRTPBuffer rtp = { NULL };
rtpg723depay = GST_RTP_G723_DEPAY (depayload);
- payload_len = gst_rtp_buffer_get_payload_len (buf);
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
+
+ payload_len = gst_rtp_buffer_get_payload_len (&rtp);
/* At least 4 bytes */
if (payload_len < 4)
GST_LOG_OBJECT (rtpg723depay, "payload len %d", payload_len);
- outbuf = gst_rtp_buffer_get_payload_buffer (buf);
- marker = gst_rtp_buffer_get_marker (buf);
+ outbuf = gst_rtp_buffer_get_payload_buffer (&rtp);
+ marker = gst_rtp_buffer_get_marker (&rtp);
+ gst_rtp_buffer_unmap (&rtp);
if (marker) {
/* marker bit starts talkspurt */
GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
}
- GST_LOG_OBJECT (depayload, "pushing buffer of size %d",
- GST_BUFFER_SIZE (outbuf));
+ GST_LOG_OBJECT (depayload, "pushing buffer of size %" G_GSIZE_FORMAT,
+ gst_buffer_get_size (outbuf));
return outbuf;
bad_packet:
{
/* no fatal error */
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
}
#define __GST_RTP_G723_DEPAY_H__
#include <gst/gst.h>
-#include <gst/rtp/gstbasertpdepayload.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
G_BEGIN_DECLS
struct _GstRtpG723Depay
{
- GstBaseRTPDepayload depayload;
+ GstRTPBaseDepayload depayload;
};
struct _GstRtpG723DepayClass
{
- GstBaseRTPDepayloadClass parent_class;
+ GstRTPBaseDepayloadClass parent_class;
};
GType gst_rtp_g723_depay_get_type (void);
#define G723_FRAME_DURATION (30 * GST_MSECOND)
-static gboolean gst_rtp_g723_pay_set_caps (GstBaseRTPPayload * payload,
+static gboolean gst_rtp_g723_pay_set_caps (GstRTPBasePayload * payload,
GstCaps * caps);
-static GstFlowReturn gst_rtp_g723_pay_handle_buffer (GstBaseRTPPayload *
+static GstFlowReturn gst_rtp_g723_pay_handle_buffer (GstRTPBasePayload *
payload, GstBuffer * buf);
static GstStaticPadTemplate gst_rtp_g723_pay_sink_template =
static GstStateChangeReturn gst_rtp_g723_pay_change_state (GstElement * element,
GstStateChange transition);
-GST_BOILERPLATE (GstRTPG723Pay, gst_rtp_g723_pay, GstBaseRTPPayload,
- GST_TYPE_BASE_RTP_PAYLOAD);
-
-static void
-gst_rtp_g723_pay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_g723_pay_sink_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_g723_pay_src_template);
- gst_element_class_set_details_simple (element_class, "RTP G.723 payloader",
- "Codec/Payloader/Network/RTP",
- "Packetize G.723 audio into RTP packets",
- "Wim Taymans <wim.taymans@gmail.com>");
-}
+#define gst_rtp_g723_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRTPG723Pay, gst_rtp_g723_pay, GST_TYPE_RTP_BASE_PAYLOAD);
static void
gst_rtp_g723_pay_class_init (GstRTPG723PayClass * klass)
{
GObjectClass *gobject_class;
GstElementClass *gstelement_class;
- GstBaseRTPPayloadClass *payload_class;
+ GstRTPBasePayloadClass *payload_class;
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
- payload_class = (GstBaseRTPPayloadClass *) klass;
+ payload_class = (GstRTPBasePayloadClass *) klass;
gobject_class->finalize = gst_rtp_g723_pay_finalize;
gstelement_class->change_state = gst_rtp_g723_pay_change_state;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_g723_pay_sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_g723_pay_src_template));
+
+ gst_element_class_set_details_simple (gstelement_class, "RTP G.723 payloader",
+ "Codec/Payloader/Network/RTP",
+ "Packetize G.723 audio into RTP packets",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
payload_class->set_caps = gst_rtp_g723_pay_set_caps;
payload_class->handle_buffer = gst_rtp_g723_pay_handle_buffer;
}
static void
-gst_rtp_g723_pay_init (GstRTPG723Pay * pay, GstRTPG723PayClass * klass)
+gst_rtp_g723_pay_init (GstRTPG723Pay * pay)
{
- GstBaseRTPPayload *payload = GST_BASE_RTP_PAYLOAD (pay);
+ GstRTPBasePayload *payload = GST_RTP_BASE_PAYLOAD (pay);
pay->adapter = gst_adapter_new ();
payload->pt = GST_RTP_PAYLOAD_G723;
- gst_basertppayload_set_options (payload, "audio", FALSE, "G723", 8000);
+ gst_rtp_base_payload_set_options (payload, "audio", FALSE, "G723", 8000);
}
static void
static gboolean
-gst_rtp_g723_pay_set_caps (GstBaseRTPPayload * payload, GstCaps * caps)
+gst_rtp_g723_pay_set_caps (GstRTPBasePayload * payload, GstCaps * caps)
{
gboolean res;
GstStructure *structure;
payload->pt = pt;
payload->dynamic = pt != GST_RTP_PAYLOAD_G723;
- res = gst_basertppayload_set_outcaps (payload, NULL);
+ res = gst_rtp_base_payload_set_outcaps (payload, NULL);
return res;
}
GstFlowReturn ret;
guint8 *payload;
guint avail;
+ GstRTPBuffer rtp = { NULL };
avail = gst_adapter_available (pay->adapter);
outbuf = gst_rtp_buffer_new_allocate (avail, 0, 0);
- payload = gst_rtp_buffer_get_payload (outbuf);
+
+ gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
+ payload = gst_rtp_buffer_get_payload (&rtp);
GST_BUFFER_TIMESTAMP (outbuf) = pay->timestamp;
GST_BUFFER_DURATION (outbuf) = pay->duration;
/* set discont and marker */
if (pay->discont) {
GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
- gst_rtp_buffer_set_marker (outbuf, TRUE);
+ gst_rtp_buffer_set_marker (&rtp, TRUE);
pay->discont = FALSE;
}
+ gst_rtp_buffer_unmap (&rtp);
- ret = gst_basertppayload_push (GST_BASE_RTP_PAYLOAD (pay), outbuf);
+ ret = gst_rtp_base_payload_push (GST_RTP_BASE_PAYLOAD (pay), outbuf);
return ret;
}
};
static GstFlowReturn
-gst_rtp_g723_pay_handle_buffer (GstBaseRTPPayload * payload, GstBuffer * buf)
+gst_rtp_g723_pay_handle_buffer (GstRTPBasePayload * payload, GstBuffer * buf)
{
GstFlowReturn ret = GST_FLOW_OK;
- guint8 *data;
- guint size;
+ GstMapInfo map;
guint8 HDR;
GstRTPG723Pay *pay;
GstClockTime packet_dur, timestamp;
pay = GST_RTP_G723_PAY (payload);
- size = GST_BUFFER_SIZE (buf);
- data = GST_BUFFER_DATA (buf);
+ gst_buffer_map (buf, &map, GST_MAP_READ);
timestamp = GST_BUFFER_TIMESTAMP (buf);
if (GST_BUFFER_IS_DISCONT (buf)) {
}
/* should be one of these sizes */
- if (size != 4 && size != 20 && size != 24)
+ if (map.size != 4 && map.size != 20 && map.size != 24)
goto invalid_size;
/* check size by looking at the header bits */
- HDR = data[0] & 0x3;
- if (size_tab[HDR] != size)
+ HDR = map.data[0] & 0x3;
+ if (size_tab[HDR] != map.size)
goto wrong_size;
/* calculate packet size and duration */
- payload_len = gst_adapter_available (pay->adapter) + size;
+ payload_len = gst_adapter_available (pay->adapter) + map.size;
packet_dur = pay->duration + G723_FRAME_DURATION;
packet_len = gst_rtp_buffer_calc_packet_len (payload_len, 0, 0);
- if (gst_basertppayload_is_filled (payload, packet_len, packet_dur)) {
+ if (gst_rtp_base_payload_is_filled (payload, packet_len, packet_dur)) {
/* size or duration would overflow the packet, flush the queued data */
ret = gst_rtp_g723_pay_flush (pay);
}
else
pay->timestamp = 0;
}
+ gst_buffer_unmap (buf, &map);
/* add packet to the queue */
gst_adapter_push (pay->adapter, buf);
{
GST_ELEMENT_WARNING (pay, STREAM, WRONG_TYPE,
("Invalid input buffer size"),
- ("Input size should be 4, 20 or 24, got %u", size));
+ ("Input size should be 4, 20 or 24, got %" G_GSIZE_FORMAT, map.size));
+ gst_buffer_unmap (buf, &map);
gst_buffer_unref (buf);
return GST_FLOW_OK;
}
{
GST_ELEMENT_WARNING (pay, STREAM, WRONG_TYPE,
("Wrong input buffer size"),
- ("Expected input buffer size %u but got %u", size_tab[HDR], size));
+ ("Expected input buffer size %u but got %" G_GSIZE_FORMAT,
+ size_tab[HDR], map.size));
+ gst_buffer_unmap (buf, &map);
gst_buffer_unref (buf);
return GST_FLOW_OK;
}
#define __GST_RTP_G723_PAY_H__
#include <gst/gst.h>
-#include <gst/rtp/gstbasertpaudiopayload.h>
+#include <gst/rtp/gstrtpbaseaudiopayload.h>
G_BEGIN_DECLS
struct _GstRTPG723Pay
{
- GstBaseRTPPayload payload;
+ GstRTPBasePayload payload;
GstAdapter *adapter;
GstClockTime duration;
struct _GstRTPG723PayClass
{
- GstBaseRTPPayloadClass parent_class;
+ GstRTPBasePayloadClass parent_class;
};
GType gst_rtp_g723_pay_get_type (void);
static void gst_rtp_g726_depay_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
-static GstBuffer *gst_rtp_g726_depay_process (GstBaseRTPDepayload * depayload,
+static GstBuffer *gst_rtp_g726_depay_process (GstRTPBaseDepayload * depayload,
GstBuffer * buf);
-static gboolean gst_rtp_g726_depay_setcaps (GstBaseRTPDepayload * depayload,
+static gboolean gst_rtp_g726_depay_setcaps (GstRTPBaseDepayload * depayload,
GstCaps * caps);
-GST_BOILERPLATE (GstRtpG726Depay, gst_rtp_g726_depay, GstBaseRTPDepayload,
- GST_TYPE_BASE_RTP_DEPAYLOAD);
-
-static void
-gst_rtp_g726_depay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_g726_depay_src_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_g726_depay_sink_template);
- gst_element_class_set_details_simple (element_class, "RTP G.726 depayloader",
- "Codec/Depayloader/Network/RTP",
- "Extracts G.726 audio from RTP packets",
- "Axis Communications <dev-gstreamer@axis.com>");
-}
+#define gst_rtp_g726_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpG726Depay, gst_rtp_g726_depay,
+ GST_TYPE_RTP_BASE_DEPAYLOAD);
static void
gst_rtp_g726_depay_class_init (GstRtpG726DepayClass * klass)
{
GObjectClass *gobject_class;
- GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
+ GstElementClass *gstelement_class;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
+
+ GST_DEBUG_CATEGORY_INIT (rtpg726depay_debug, "rtpg726depay", 0,
+ "G.726 RTP Depayloader");
gobject_class = (GObjectClass *) klass;
- gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
gobject_class->set_property = gst_rtp_g726_depay_set_property;
gobject_class->get_property = gst_rtp_g726_depay_get_property;
"Force AAL2 decoding for compatibility with bad payloaders",
DEFAULT_FORCE_AAL2, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
- gstbasertpdepayload_class->process = gst_rtp_g726_depay_process;
- gstbasertpdepayload_class->set_caps = gst_rtp_g726_depay_setcaps;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_g726_depay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_g726_depay_sink_template));
- GST_DEBUG_CATEGORY_INIT (rtpg726depay_debug, "rtpg726depay", 0,
- "G.726 RTP Depayloader");
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP G.726 depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts G.726 audio from RTP packets",
+ "Axis Communications <dev-gstreamer@axis.com>");
+
+ gstrtpbasedepayload_class->process = gst_rtp_g726_depay_process;
+ gstrtpbasedepayload_class->set_caps = gst_rtp_g726_depay_setcaps;
}
static void
-gst_rtp_g726_depay_init (GstRtpG726Depay * rtpG726depay,
- GstRtpG726DepayClass * klass)
+gst_rtp_g726_depay_init (GstRtpG726Depay * rtpG726depay)
{
- GstBaseRTPDepayload *depayload;
+ GstRTPBaseDepayload *depayload;
- depayload = GST_BASE_RTP_DEPAYLOAD (rtpG726depay);
+ depayload = GST_RTP_BASE_DEPAYLOAD (rtpG726depay);
- gst_pad_use_fixed_caps (GST_BASE_RTP_DEPAYLOAD_SRCPAD (depayload));
+ gst_pad_use_fixed_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload));
rtpG726depay->force_aal2 = DEFAULT_FORCE_AAL2;
}
static gboolean
-gst_rtp_g726_depay_setcaps (GstBaseRTPDepayload * depayload, GstCaps * caps)
+gst_rtp_g726_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
{
GstCaps *srccaps;
GstStructure *structure;
"bitrate", G_TYPE_INT, depay->bitrate,
"layout", G_TYPE_STRING, LAYOUT_G726, NULL);
- ret = gst_pad_set_caps (GST_BASE_RTP_DEPAYLOAD_SRCPAD (depayload), srccaps);
+ ret = gst_pad_set_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload), srccaps);
gst_caps_unref (srccaps);
return ret;
static GstBuffer *
-gst_rtp_g726_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
+gst_rtp_g726_depay_process (GstRTPBaseDepayload * depayload, GstBuffer * buf)
{
GstRtpG726Depay *depay;
GstBuffer *outbuf = NULL;
gboolean marker;
+ GstRTPBuffer rtp = { NULL };
depay = GST_RTP_G726_DEPAY (depayload);
- marker = gst_rtp_buffer_get_marker (buf);
+ gst_rtp_buffer_map (buf, GST_MAP_READWRITE, &rtp);
- GST_DEBUG ("process : got %d bytes, mark %d ts %u seqn %d",
- GST_BUFFER_SIZE (buf), marker,
- gst_rtp_buffer_get_timestamp (buf), gst_rtp_buffer_get_seq (buf));
+ marker = gst_rtp_buffer_get_marker (&rtp);
+
+ GST_DEBUG ("process : got %" G_GSIZE_FORMAT " bytes, mark %d ts %u seqn %d",
+ gst_buffer_get_size (buf), marker,
+ gst_rtp_buffer_get_timestamp (&rtp), gst_rtp_buffer_get_seq (&rtp));
if (depay->aal2 || depay->force_aal2) {
/* AAL2, we can just copy the bytes */
- outbuf = gst_rtp_buffer_get_payload_buffer (buf);
+ outbuf = gst_rtp_buffer_get_payload_buffer (&rtp);
if (!outbuf)
goto bad_len;
} else {
guint8 *in, *out, tmp;
guint len;
+ GstMapInfo map;
- in = gst_rtp_buffer_get_payload (buf);
- len = gst_rtp_buffer_get_payload_len (buf);
-
- if (gst_buffer_is_writable (buf)) {
- outbuf = gst_rtp_buffer_get_payload_buffer (buf);
- } else {
- GstBuffer *copy;
-
- /* copy buffer */
- copy = gst_buffer_copy (buf);
- outbuf = gst_rtp_buffer_get_payload_buffer (copy);
- gst_buffer_unref (copy);
- }
+ in = gst_rtp_buffer_get_payload (&rtp);
+ len = gst_rtp_buffer_get_payload_len (&rtp);
+ outbuf = gst_rtp_buffer_get_payload_buffer (&rtp);
if (!outbuf)
goto bad_len;
+ outbuf = gst_buffer_make_writable (outbuf);
- out = GST_BUFFER_DATA (outbuf);
+ gst_buffer_map (outbuf, &map, GST_MAP_WRITE);
+ out = map.data;
/* we need to reshuffle the bytes, input is always of the form
* A B C D ... with the number of bits depending on the bitrate. */
break;
}
}
+ gst_buffer_unmap (outbuf, &map);
}
if (marker) {
#define __GST_RTP_G726_DEPAY_H__
#include <gst/gst.h>
-#include <gst/rtp/gstbasertpdepayload.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
G_BEGIN_DECLS
struct _GstRtpG726Depay
{
- GstBaseRTPDepayload depayload;
+ GstRTPBaseDepayload depayload;
gboolean aal2;
gboolean force_aal2;
struct _GstRtpG726DepayClass
{
- GstBaseRTPDepayloadClass parent_class;
+ GstRTPBaseDepayloadClass parent_class;
};
GType gst_rtp_g726_depay_get_type (void);
static void gst_rtp_g726_pay_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
-static gboolean gst_rtp_g726_pay_setcaps (GstBaseRTPPayload * payload,
+static gboolean gst_rtp_g726_pay_setcaps (GstRTPBasePayload * payload,
GstCaps * caps);
-static GstFlowReturn gst_rtp_g726_pay_handle_buffer (GstBaseRTPPayload *
+static GstFlowReturn gst_rtp_g726_pay_handle_buffer (GstRTPBasePayload *
payload, GstBuffer * buffer);
-GST_BOILERPLATE (GstRtpG726Pay, gst_rtp_g726_pay, GstBaseRTPAudioPayload,
- GST_TYPE_BASE_RTP_AUDIO_PAYLOAD);
-
-static void
-gst_rtp_g726_pay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_g726_pay_sink_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_g726_pay_src_template);
- gst_element_class_set_details_simple (element_class, "RTP G.726 payloader",
- "Codec/Payloader/Network/RTP",
- "Payload-encodes G.726 audio into a RTP packet",
- "Axis Communications <dev-gstreamer@axis.com>");
-}
+#define gst_rtp_g726_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpG726Pay, gst_rtp_g726_pay,
+ GST_TYPE_RTP_BASE_AUDIO_PAYLOAD);
static void
gst_rtp_g726_pay_class_init (GstRtpG726PayClass * klass)
{
GObjectClass *gobject_class;
- GstBaseRTPPayloadClass *gstbasertppayload_class;
+ GstElementClass *gstelement_class;
+ GstRTPBasePayloadClass *gstrtpbasepayload_class;
gobject_class = (GObjectClass *) klass;
- gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasepayload_class = (GstRTPBasePayloadClass *) klass;
gobject_class->set_property = gst_rtp_g726_pay_set_property;
gobject_class->get_property = gst_rtp_g726_pay_get_property;
"Force AAL2 encoding for compatibility with bad depayloaders",
DEFAULT_FORCE_AAL2, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
- gstbasertppayload_class->set_caps = gst_rtp_g726_pay_setcaps;
- gstbasertppayload_class->handle_buffer = gst_rtp_g726_pay_handle_buffer;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_g726_pay_sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_g726_pay_src_template));
+
+ gst_element_class_set_details_simple (gstelement_class, "RTP G.726 payloader",
+ "Codec/Payloader/Network/RTP",
+ "Payload-encodes G.726 audio into a RTP packet",
+ "Axis Communications <dev-gstreamer@axis.com>");
+
+ gstrtpbasepayload_class->set_caps = gst_rtp_g726_pay_setcaps;
+ gstrtpbasepayload_class->handle_buffer = gst_rtp_g726_pay_handle_buffer;
GST_DEBUG_CATEGORY_INIT (rtpg726pay_debug, "rtpg726pay", 0,
"G.726 RTP Payloader");
}
static void
-gst_rtp_g726_pay_init (GstRtpG726Pay * rtpg726pay, GstRtpG726PayClass * klass)
+gst_rtp_g726_pay_init (GstRtpG726Pay * rtpg726pay)
{
- GstBaseRTPAudioPayload *basertpaudiopayload;
+ GstRTPBaseAudioPayload *rtpbaseaudiopayload;
- basertpaudiopayload = GST_BASE_RTP_AUDIO_PAYLOAD (rtpg726pay);
+ rtpbaseaudiopayload = GST_RTP_BASE_AUDIO_PAYLOAD (rtpg726pay);
- GST_BASE_RTP_PAYLOAD (rtpg726pay)->clock_rate = 8000;
+ GST_RTP_BASE_PAYLOAD (rtpg726pay)->clock_rate = 8000;
rtpg726pay->force_aal2 = DEFAULT_FORCE_AAL2;
/* sample based codec */
- gst_base_rtp_audio_payload_set_sample_based (basertpaudiopayload);
+ gst_rtp_base_audio_payload_set_sample_based (rtpbaseaudiopayload);
}
static gboolean
-gst_rtp_g726_pay_setcaps (GstBaseRTPPayload * payload, GstCaps * caps)
+gst_rtp_g726_pay_setcaps (GstRTPBasePayload * payload, GstCaps * caps)
{
gchar *encoding_name;
GstStructure *structure;
- GstBaseRTPAudioPayload *basertpaudiopayload;
+ GstRTPBaseAudioPayload *rtpbaseaudiopayload;
GstRtpG726Pay *pay;
GstCaps *peercaps;
gboolean res;
- basertpaudiopayload = GST_BASE_RTP_AUDIO_PAYLOAD (payload);
+ rtpbaseaudiopayload = GST_RTP_BASE_AUDIO_PAYLOAD (payload);
pay = GST_RTP_G726_PAY (payload);
structure = gst_caps_get_structure (caps, 0);
switch (pay->bitrate) {
case 16000:
encoding_name = g_strdup ("G726-16");
- gst_base_rtp_audio_payload_set_samplebits_options (basertpaudiopayload,
+ gst_rtp_base_audio_payload_set_samplebits_options (rtpbaseaudiopayload,
2);
break;
case 24000:
encoding_name = g_strdup ("G726-24");
- gst_base_rtp_audio_payload_set_samplebits_options (basertpaudiopayload,
+ gst_rtp_base_audio_payload_set_samplebits_options (rtpbaseaudiopayload,
3);
break;
case 32000:
encoding_name = g_strdup ("G726-32");
- gst_base_rtp_audio_payload_set_samplebits_options (basertpaudiopayload,
+ gst_rtp_base_audio_payload_set_samplebits_options (rtpbaseaudiopayload,
4);
break;
case 40000:
encoding_name = g_strdup ("G726-40");
- gst_base_rtp_audio_payload_set_samplebits_options (basertpaudiopayload,
+ gst_rtp_base_audio_payload_set_samplebits_options (rtpbaseaudiopayload,
5);
break;
default:
GST_DEBUG_OBJECT (payload, "selected base encoding %s", encoding_name);
/* now see if we need to produce AAL2 or not */
- peercaps = gst_pad_peer_get_caps (payload->srcpad);
+ peercaps = gst_pad_peer_query_caps (payload->srcpad, NULL);
if (peercaps) {
GstCaps *filter, *intersect;
gchar *capsstr;
GST_DEBUG_OBJECT (payload, "no peer caps, AAL2 %d", pay->aal2);
}
- gst_basertppayload_set_options (payload, "audio", TRUE, encoding_name, 8000);
- res = gst_basertppayload_set_outcaps (payload, NULL);
+ gst_rtp_base_payload_set_options (payload, "audio", TRUE, encoding_name,
+ 8000);
+ res = gst_rtp_base_payload_set_outcaps (payload, NULL);
g_free (encoding_name);
}
static GstFlowReturn
-gst_rtp_g726_pay_handle_buffer (GstBaseRTPPayload * payload, GstBuffer * buffer)
+gst_rtp_g726_pay_handle_buffer (GstRTPBasePayload * payload, GstBuffer * buffer)
{
GstFlowReturn res;
GstRtpG726Pay *pay;
pay = GST_RTP_G726_PAY (payload);
if (!pay->aal2) {
+ GstMapInfo map;
guint8 *data, tmp;
- guint len;
+ gsize size;
/* for non AAL2, we need to reshuffle the bytes, we can do this in-place
* when the buffer is writable. */
buffer = gst_buffer_make_writable (buffer);
- data = GST_BUFFER_DATA (buffer);
- len = GST_BUFFER_SIZE (buffer);
+ gst_buffer_map (buffer, &map, GST_MAP_READWRITE);
+ data = map.data;
+ size = map.size;
- GST_LOG_OBJECT (pay, "packing %u bytes of data", len);
+ GST_LOG_OBJECT (pay, "packing %" G_GSIZE_FORMAT " bytes of data", map.size);
/* we need to reshuffle the bytes, output is of the form:
* A B C D .. with the number of bits depending on the bitrate. */
* |0 1|0 1|0 1|0 1|
* +-+-+-+-+-+-+-+-+-
*/
- while (len > 0) {
+ while (size > 0) {
tmp = *data;
*data++ = ((tmp & 0xc0) >> 6) |
((tmp & 0x30) >> 2) | ((tmp & 0x0c) << 2) | ((tmp & 0x03) << 6);
- len--;
+ size--;
}
break;
}
* |1 2|0 1 2|0 1 2|2|0 1 2|0 1 2|0|0 1 2|0 1 2|0 1|
* +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-
*/
- while (len > 2) {
+ while (size > 2) {
tmp = *data;
*data++ = ((tmp & 0xc0) >> 6) |
((tmp & 0x38) >> 1) | ((tmp & 0x07) << 5);
tmp = *data;
*data++ = ((tmp & 0xe0) >> 5) |
((tmp & 0x1c) >> 2) | ((tmp & 0x03) << 6);
- len -= 3;
+ size -= 3;
}
break;
}
* |0 1 2 3|0 1 2 3|0 1 2 3|0 1 2 3|
* +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-
*/
- while (len > 0) {
+ while (size > 0) {
tmp = *data;
*data++ = ((tmp & 0xf0) >> 4) | ((tmp & 0x0f) << 4);
- len--;
+ size--;
}
break;
}
* |2 3 4|0 1 2 3 4|4|0 1 2 3 4|0 1|1 2 3 4|0 1 2 3|3 4|0 1 2 3 4|0|0 1 2 3 4|0 1 2|
* +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-
*/
- while (len > 4) {
+ while (size > 4) {
tmp = *data;
*data++ = ((tmp & 0xe0) >> 5) | ((tmp & 0x1f) << 3);
tmp = *data;
((tmp & 0x3e) << 2) | ((tmp & 0x01) << 7);
tmp = *data;
*data++ = ((tmp & 0xf8) >> 3) | ((tmp & 0x07) << 5);
- len -= 5;
+ size -= 5;
}
break;
}
}
+ gst_buffer_unmap (buffer, &map);
}
res =
- GST_BASE_RTP_PAYLOAD_CLASS (parent_class)->handle_buffer (payload,
+ GST_RTP_BASE_PAYLOAD_CLASS (parent_class)->handle_buffer (payload,
buffer);
return res;
#define __GST_RTP_G726_PAY_H__
#include <gst/gst.h>
-#include <gst/rtp/gstbasertpaudiopayload.h>
+#include <gst/rtp/gstrtpbaseaudiopayload.h>
G_BEGIN_DECLS typedef struct _GstRtpG726Pay GstRtpG726Pay;
typedef struct _GstRtpG726PayClass GstRtpG726PayClass;
struct _GstRtpG726Pay
{
- GstBaseRTPAudioPayload audiopayload;
+ GstRTPBaseAudioPayload audiopayload;
gboolean aal2;
gboolean force_aal2;
struct _GstRtpG726PayClass
{
- GstBaseRTPAudioPayloadClass parent_class;
+ GstRTPBaseAudioPayloadClass parent_class;
};
GType gst_rtp_g726_pay_get_type (void);
GST_STATIC_CAPS ("audio/G729, " "channels = (int) 1," "rate = (int) 8000")
);
-static gboolean gst_rtp_g729_depay_setcaps (GstBaseRTPDepayload * depayload,
+static gboolean gst_rtp_g729_depay_setcaps (GstRTPBaseDepayload * depayload,
GstCaps * caps);
-static GstBuffer *gst_rtp_g729_depay_process (GstBaseRTPDepayload * depayload,
+static GstBuffer *gst_rtp_g729_depay_process (GstRTPBaseDepayload * depayload,
GstBuffer * buf);
-GST_BOILERPLATE (GstRtpG729Depay, gst_rtp_g729_depay, GstBaseRTPDepayload,
- GST_TYPE_BASE_RTP_DEPAYLOAD);
+#define gst_rtp_g729_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpG729Depay, gst_rtp_g729_depay,
+ GST_TYPE_RTP_BASE_DEPAYLOAD);
static void
-gst_rtp_g729_depay_base_init (gpointer klass)
+gst_rtp_g729_depay_class_init (GstRtpG729DepayClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_g729_depay_src_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_g729_depay_sink_template);
-
- gst_element_class_set_details_simple (element_class, "RTP G.729 depayloader",
- "Codec/Depayloader/Network/RTP",
- "Extracts G.729 audio from RTP packets (RFC 3551)",
- "Laurent Glayal <spglegle@yahoo.fr>");
+ GstElementClass *gstelement_class;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
GST_DEBUG_CATEGORY_INIT (rtpg729depay_debug, "rtpg729depay", 0,
"G.729 RTP Depayloader");
-}
-static void
-gst_rtp_g729_depay_class_init (GstRtpG729DepayClass * klass)
-{
- GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_g729_depay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_g729_depay_sink_template));
- gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP G.729 depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts G.729 audio from RTP packets (RFC 3551)",
+ "Laurent Glayal <spglegle@yahoo.fr>");
- gstbasertpdepayload_class->process = gst_rtp_g729_depay_process;
- gstbasertpdepayload_class->set_caps = gst_rtp_g729_depay_setcaps;
+ gstrtpbasedepayload_class->process = gst_rtp_g729_depay_process;
+ gstrtpbasedepayload_class->set_caps = gst_rtp_g729_depay_setcaps;
}
static void
-gst_rtp_g729_depay_init (GstRtpG729Depay * rtpg729depay,
- GstRtpG729DepayClass * klass)
+gst_rtp_g729_depay_init (GstRtpG729Depay * rtpg729depay)
{
- GstBaseRTPDepayload *depayload;
+ GstRTPBaseDepayload *depayload;
- depayload = GST_BASE_RTP_DEPAYLOAD (rtpg729depay);
+ depayload = GST_RTP_BASE_DEPAYLOAD (rtpg729depay);
- gst_pad_use_fixed_caps (GST_BASE_RTP_DEPAYLOAD_SRCPAD (depayload));
+ gst_pad_use_fixed_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload));
}
static gboolean
-gst_rtp_g729_depay_setcaps (GstBaseRTPDepayload * depayload, GstCaps * caps)
+gst_rtp_g729_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
{
GstStructure *structure;
GstCaps *srccaps;
srccaps = gst_caps_new_simple ("audio/G729",
"channels", G_TYPE_INT, channels, "rate", G_TYPE_INT, clock_rate, NULL);
- ret = gst_pad_set_caps (GST_BASE_RTP_DEPAYLOAD_SRCPAD (depayload), srccaps);
+ ret = gst_pad_set_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload), srccaps);
gst_caps_unref (srccaps);
return ret;
}
}
-
static GstBuffer *
-gst_rtp_g729_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
+gst_rtp_g729_depay_process (GstRTPBaseDepayload * depayload, GstBuffer * buf)
{
GstRtpG729Depay *rtpg729depay;
GstBuffer *outbuf = NULL;
gint payload_len;
gboolean marker;
+ GstRTPBuffer rtp = { NULL };
rtpg729depay = GST_RTP_G729_DEPAY (depayload);
- payload_len = gst_rtp_buffer_get_payload_len (buf);
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
+
+ payload_len = gst_rtp_buffer_get_payload_len (&rtp);
/* At least 2 bytes (CNG from G729 Annex B) */
if (payload_len < 2) {
GST_LOG_OBJECT (rtpg729depay, "G729 payload contains CNG frame");
}
- outbuf = gst_rtp_buffer_get_payload_buffer (buf);
- marker = gst_rtp_buffer_get_marker (buf);
+ outbuf = gst_rtp_buffer_get_payload_buffer (&rtp);
+ marker = gst_rtp_buffer_get_marker (&rtp);
+
+ gst_rtp_buffer_unmap (&rtp);
if (marker) {
/* marker bit starts talkspurt */
GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
}
- GST_LOG_OBJECT (depayload, "pushing buffer of size %d",
- GST_BUFFER_SIZE (outbuf));
+ GST_LOG_OBJECT (depayload, "pushing buffer of size %" G_GSIZE_FORMAT,
+ gst_buffer_get_size (outbuf));
return outbuf;
#define __GST_RTP_G729_DEPAY_H__
#include <gst/gst.h>
-#include <gst/rtp/gstbasertpdepayload.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
G_BEGIN_DECLS
struct _GstRtpG729Depay
{
- GstBaseRTPDepayload depayload;
+ GstRTPBaseDepayload depayload;
};
struct _GstRtpG729DepayClass
{
- GstBaseRTPDepayloadClass parent_class;
+ GstRTPBaseDepayloadClass parent_class;
};
GType gst_rtp_g729_depay_get_type (void);
#define G729_FRAME_DURATION_MS (10)
static gboolean
-gst_rtp_g729_pay_set_caps (GstBaseRTPPayload * payload, GstCaps * caps);
+gst_rtp_g729_pay_set_caps (GstRTPBasePayload * payload, GstCaps * caps);
static GstFlowReturn
-gst_rtp_g729_pay_handle_buffer (GstBaseRTPPayload * payload, GstBuffer * buf);
+gst_rtp_g729_pay_handle_buffer (GstRTPBasePayload * payload, GstBuffer * buf);
static GstStateChangeReturn
gst_rtp_g729_pay_change_state (GstElement * element, GstStateChange transition);
"clock-rate = (int) 8000, " "encoding-name = (string) \"G729\"")
);
-GST_BOILERPLATE (GstRTPG729Pay, gst_rtp_g729_pay, GstBaseRTPPayload,
- GST_TYPE_BASE_RTP_PAYLOAD);
-
-static void
-gst_rtp_g729_pay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_g729_pay_sink_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_g729_pay_src_template);
- gst_element_class_set_details_simple (element_class, "RTP G.729 payloader",
- "Codec/Payloader/Network/RTP",
- "Packetize G.729 audio into RTP packets",
- "Olivier Crete <olivier.crete@collabora.co.uk>");
-
- GST_DEBUG_CATEGORY_INIT (rtpg729pay_debug, "rtpg729pay", 0,
- "G.729 RTP Payloader");
-}
+#define gst_rtp_g729_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRTPG729Pay, gst_rtp_g729_pay, GST_TYPE_RTP_BASE_PAYLOAD);
static void
gst_rtp_g729_pay_finalize (GObject * object)
{
GObjectClass *gobject_class = (GObjectClass *) klass;
GstElementClass *gstelement_class = (GstElementClass *) klass;
- GstBaseRTPPayloadClass *payload_class = GST_BASE_RTP_PAYLOAD_CLASS (klass);
+ GstRTPBasePayloadClass *payload_class = GST_RTP_BASE_PAYLOAD_CLASS (klass);
+
+ GST_DEBUG_CATEGORY_INIT (rtpg729pay_debug, "rtpg729pay", 0,
+ "G.729 RTP Payloader");
gobject_class->finalize = gst_rtp_g729_pay_finalize;
gstelement_class->change_state = gst_rtp_g729_pay_change_state;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_g729_pay_sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_g729_pay_src_template));
+
+ gst_element_class_set_details_simple (gstelement_class, "RTP G.729 payloader",
+ "Codec/Payloader/Network/RTP",
+ "Packetize G.729 audio into RTP packets",
+ "Olivier Crete <olivier.crete@collabora.co.uk>");
+
payload_class->set_caps = gst_rtp_g729_pay_set_caps;
payload_class->handle_buffer = gst_rtp_g729_pay_handle_buffer;
}
static void
-gst_rtp_g729_pay_init (GstRTPG729Pay * pay, GstRTPG729PayClass * klass)
+gst_rtp_g729_pay_init (GstRTPG729Pay * pay)
{
- GstBaseRTPPayload *payload = GST_BASE_RTP_PAYLOAD (pay);
+ GstRTPBasePayload *payload = GST_RTP_BASE_PAYLOAD (pay);
payload->pt = GST_RTP_PAYLOAD_G729;
- gst_basertppayload_set_options (payload, "audio", FALSE, "G729", 8000);
+ gst_rtp_base_payload_set_options (payload, "audio", FALSE, "G729", 8000);
pay->adapter = gst_adapter_new ();
}
}
static gboolean
-gst_rtp_g729_pay_set_caps (GstBaseRTPPayload * payload, GstCaps * caps)
+gst_rtp_g729_pay_set_caps (GstRTPBasePayload * payload, GstCaps * caps)
{
gboolean res;
GstStructure *structure;
payload->pt = pt;
payload->dynamic = pt != GST_RTP_PAYLOAD_G729;
- res = gst_basertppayload_set_outcaps (payload, NULL);
+ res = gst_rtp_base_payload_set_outcaps (payload, NULL);
return res;
}
gst_rtp_g729_pay_push (GstRTPG729Pay * rtpg729pay,
const guint8 * data, guint payload_len)
{
- GstBaseRTPPayload *basepayload;
+ GstRTPBasePayload *basepayload;
GstClockTime duration;
guint frames;
GstBuffer *outbuf;
guint8 *payload;
GstFlowReturn ret;
+ GstRTPBuffer rtp = { NULL };
- basepayload = GST_BASE_RTP_PAYLOAD (rtpg729pay);
+ basepayload = GST_RTP_BASE_PAYLOAD (rtpg729pay);
GST_DEBUG_OBJECT (rtpg729pay, "Pushing %d bytes ts %" GST_TIME_FORMAT,
payload_len, GST_TIME_ARGS (rtpg729pay->next_ts));
/* create buffer to hold the payload */
outbuf = gst_rtp_buffer_new_allocate (payload_len, 0, 0);
+ gst_rtp_buffer_map (outbuf, GST_MAP_READWRITE, &rtp);
+
/* copy payload */
- payload = gst_rtp_buffer_get_payload (outbuf);
+ payload = gst_rtp_buffer_get_payload (&rtp);
memcpy (payload, data, payload_len);
/* set metadata */
if (G_UNLIKELY (rtpg729pay->discont)) {
GST_DEBUG_OBJECT (basepayload, "discont, setting marker bit");
GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
- gst_rtp_buffer_set_marker (outbuf, TRUE);
+ gst_rtp_buffer_set_marker (&rtp, TRUE);
rtpg729pay->discont = FALSE;
}
+ gst_rtp_buffer_unmap (&rtp);
- ret = gst_basertppayload_push (basepayload, outbuf);
+ ret = gst_rtp_base_payload_push (basepayload, outbuf);
return ret;
}
}
static GstFlowReturn
-gst_rtp_g729_pay_handle_buffer (GstBaseRTPPayload * payload, GstBuffer * buf)
+gst_rtp_g729_pay_handle_buffer (GstRTPBasePayload * payload, GstBuffer * buf)
{
GstFlowReturn ret = GST_FLOW_OK;
GstRTPG729Pay *rtpg729pay = GST_RTP_G729_PAY (payload);
guint minptime_octets = 0;
guint min_payload_len;
guint max_payload_len;
+ gsize size;
+ GstClockTime timestamp;
- available = GST_BUFFER_SIZE (buf);
+ size = gst_buffer_get_size (buf);
- if (available % G729_FRAME_SIZE != 0 &&
- available % G729_FRAME_SIZE != G729B_CN_FRAME_SIZE)
+ if (size % G729_FRAME_SIZE != 0 &&
+ size % G729_FRAME_SIZE != G729B_CN_FRAME_SIZE)
goto invalid_size;
/* max number of bytes based on given ptime, has to be multiple of
max_payload_len = MIN (
/* MTU max */
- (int) (gst_rtp_buffer_calc_payload_len (GST_BASE_RTP_PAYLOAD_MTU
+ (int) (gst_rtp_buffer_calc_payload_len (GST_RTP_BASE_PAYLOAD_MTU
(payload), 0, 0) / G729_FRAME_SIZE)
* G729_FRAME_SIZE,
/* ptime max */
}
/* If the ptime is specified in the caps, tried to adhere to it exactly */
- if (payload->abidata.ABI.ptime) {
- guint64 ptime = payload->abidata.ABI.ptime / GST_MSECOND;
+ if (payload->ptime) {
+ guint64 ptime = payload->ptime / GST_MSECOND;
guint ptime_in_bytes = G729_FRAME_SIZE *
(guint) (ptime / G729_FRAME_DURATION_MS);
adapter = rtpg729pay->adapter;
available = gst_adapter_available (adapter);
+ timestamp = GST_BUFFER_TIMESTAMP (buf);
+
/* resync rtp time on discont or a discontinuous cn packet */
if (GST_BUFFER_IS_DISCONT (buf)) {
/* flush remainder */
available = 0;
}
rtpg729pay->discont = TRUE;
- gst_rtp_g729_pay_recalc_rtp_time (rtpg729pay, GST_BUFFER_TIMESTAMP (buf));
+ gst_rtp_g729_pay_recalc_rtp_time (rtpg729pay, timestamp);
}
- if (GST_BUFFER_SIZE (buf) < G729_FRAME_SIZE)
- gst_rtp_g729_pay_recalc_rtp_time (rtpg729pay, GST_BUFFER_TIMESTAMP (buf));
+ if (size < G729_FRAME_SIZE)
+ gst_rtp_g729_pay_recalc_rtp_time (rtpg729pay, timestamp);
if (G_UNLIKELY (!GST_CLOCK_TIME_IS_VALID (rtpg729pay->first_ts))) {
- rtpg729pay->first_ts = GST_BUFFER_TIMESTAMP (buf);
+ rtpg729pay->first_ts = timestamp;
rtpg729pay->first_rtp_time = rtpg729pay->next_rtp_time;
}
/* let's reset the base timestamp when the adapter is empty */
if (available == 0)
- rtpg729pay->next_ts = GST_BUFFER_TIMESTAMP (buf);
+ rtpg729pay->next_ts = timestamp;
- if (available == 0 &&
- GST_BUFFER_SIZE (buf) >= min_payload_len &&
- GST_BUFFER_SIZE (buf) <= max_payload_len) {
- ret = gst_rtp_g729_pay_push (rtpg729pay,
- GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (buf));
+ if (available == 0 && size >= min_payload_len && size <= max_payload_len) {
+ GstMapInfo map;
+
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ ret = gst_rtp_g729_pay_push (rtpg729pay, map.data, map.size);
+ gst_buffer_unmap (buf, &map);
gst_buffer_unref (buf);
return ret;
}
("Invalid input buffer size"),
("Invalid buffer size, should be a multiple of"
" G729_FRAME_SIZE(10) with an optional G729B_CN_FRAME_SIZE(2)"
- " added to it, but it is %u", available));
+ " added to it, but it is %" G_GSIZE_FORMAT, size));
gst_buffer_unref (buf);
return GST_FLOW_ERROR;
}
#define __GST_RTP_G729_PAY_H__
#include <gst/gst.h>
-#include <gst/rtp/gstbasertpaudiopayload.h>
+#include <gst/rtp/gstrtpbaseaudiopayload.h>
G_BEGIN_DECLS
struct _GstRTPG729Pay
{
- GstBaseRTPPayload payload;
+ GstRTPBasePayload payload;
GstAdapter *adapter;
GstClockTime next_ts;
struct _GstRTPG729PayClass
{
- GstBaseRTPPayloadClass parent_class;
+ GstRTPBasePayloadClass parent_class;
};
GType gst_rtp_g729_pay_get_type (void);
"clock-rate = (int) 8000")
);
-static GstBuffer *gst_rtp_gsm_depay_process (GstBaseRTPDepayload * _depayload,
+static GstBuffer *gst_rtp_gsm_depay_process (GstRTPBaseDepayload * _depayload,
GstBuffer * buf);
-static gboolean gst_rtp_gsm_depay_setcaps (GstBaseRTPDepayload * _depayload,
+static gboolean gst_rtp_gsm_depay_setcaps (GstRTPBaseDepayload * _depayload,
GstCaps * caps);
-GST_BOILERPLATE (GstRTPGSMDepay, gst_rtp_gsm_depay, GstBaseRTPDepayload,
- GST_TYPE_BASE_RTP_DEPAYLOAD);
+#define gst_rtp_gsm_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRTPGSMDepay, gst_rtp_gsm_depay, GST_TYPE_RTP_BASE_DEPAYLOAD);
static void
-gst_rtp_gsm_depay_base_init (gpointer klass)
+gst_rtp_gsm_depay_class_init (GstRTPGSMDepayClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstElementClass *gstelement_class;
+ GstRTPBaseDepayloadClass *gstrtpbase_depayload_class;
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_gsm_depay_src_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_gsm_depay_sink_template);
- gst_element_class_set_details_simple (element_class, "RTP GSM depayloader",
- "Codec/Depayloader/Network/RTP",
- "Extracts GSM audio from RTP packets", "Zeeshan Ali <zeenix@gmail.com>");
-}
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbase_depayload_class = (GstRTPBaseDepayloadClass *) klass;
-static void
-gst_rtp_gsm_depay_class_init (GstRTPGSMDepayClass * klass)
-{
- GstBaseRTPDepayloadClass *gstbasertp_depayload_class;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_gsm_depay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_gsm_depay_sink_template));
- gstbasertp_depayload_class = (GstBaseRTPDepayloadClass *) klass;
+ gst_element_class_set_details_simple (gstelement_class, "RTP GSM depayloader",
+ "Codec/Depayloader/Network/RTP",
+ "Extracts GSM audio from RTP packets", "Zeeshan Ali <zeenix@gmail.com>");
- gstbasertp_depayload_class->process = gst_rtp_gsm_depay_process;
- gstbasertp_depayload_class->set_caps = gst_rtp_gsm_depay_setcaps;
+ gstrtpbase_depayload_class->process = gst_rtp_gsm_depay_process;
+ gstrtpbase_depayload_class->set_caps = gst_rtp_gsm_depay_setcaps;
GST_DEBUG_CATEGORY_INIT (rtpgsmdepay_debug, "rtpgsmdepay", 0,
"GSM Audio RTP Depayloader");
}
static void
-gst_rtp_gsm_depay_init (GstRTPGSMDepay * rtpgsmdepay,
- GstRTPGSMDepayClass * klass)
+gst_rtp_gsm_depay_init (GstRTPGSMDepay * rtpgsmdepay)
{
- /* needed because of GST_BOILERPLATE */
}
static gboolean
-gst_rtp_gsm_depay_setcaps (GstBaseRTPDepayload * depayload, GstCaps * caps)
+gst_rtp_gsm_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
{
GstCaps *srccaps;
gboolean ret;
srccaps = gst_caps_new_simple ("audio/x-gsm",
"channels", G_TYPE_INT, 1, "rate", G_TYPE_INT, clock_rate, NULL);
- ret = gst_pad_set_caps (GST_BASE_RTP_DEPAYLOAD_SRCPAD (depayload), srccaps);
+ ret = gst_pad_set_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload), srccaps);
gst_caps_unref (srccaps);
return ret;
}
static GstBuffer *
-gst_rtp_gsm_depay_process (GstBaseRTPDepayload * _depayload, GstBuffer * buf)
+gst_rtp_gsm_depay_process (GstRTPBaseDepayload * _depayload, GstBuffer * buf)
{
GstBuffer *outbuf = NULL;
gboolean marker;
+ GstRTPBuffer rtp = { NULL };
+
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
+
+ marker = gst_rtp_buffer_get_marker (&rtp);
- marker = gst_rtp_buffer_get_marker (buf);
+ GST_DEBUG ("process : got %" G_GSIZE_FORMAT " bytes, mark %d ts %u seqn %d",
+ gst_buffer_get_size (buf), marker,
+ gst_rtp_buffer_get_timestamp (&rtp), gst_rtp_buffer_get_seq (&rtp));
- GST_DEBUG ("process : got %d bytes, mark %d ts %u seqn %d",
- GST_BUFFER_SIZE (buf), marker,
- gst_rtp_buffer_get_timestamp (buf), gst_rtp_buffer_get_seq (buf));
+ outbuf = gst_rtp_buffer_get_payload_buffer (&rtp);
- outbuf = gst_rtp_buffer_get_payload_buffer (buf);
+ gst_rtp_buffer_unmap (&rtp);
if (marker && outbuf) {
/* mark start of talkspurt with DISCONT */
#define __GST_RTP_GSM_DEPAY_H__
#include <gst/gst.h>
-#include <gst/rtp/gstbasertpdepayload.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
G_BEGIN_DECLS
struct _GstRTPGSMDepay
{
- GstBaseRTPDepayload _depayload;
+ GstRTPBaseDepayload _depayload;
};
struct _GstRTPGSMDepayClass
{
- GstBaseRTPDepayloadClass parent_class;
+ GstRTPBaseDepayloadClass parent_class;
};
GType gst_rtp_gsm_depay_get_type (void);
"clock-rate = (int) 8000, " "encoding-name = (string) \"GSM\"")
);
-static gboolean gst_rtp_gsm_pay_setcaps (GstBaseRTPPayload * payload,
+static gboolean gst_rtp_gsm_pay_setcaps (GstRTPBasePayload * payload,
GstCaps * caps);
-static GstFlowReturn gst_rtp_gsm_pay_handle_buffer (GstBaseRTPPayload * payload,
+static GstFlowReturn gst_rtp_gsm_pay_handle_buffer (GstRTPBasePayload * payload,
GstBuffer * buffer);
-GST_BOILERPLATE (GstRTPGSMPay, gst_rtp_gsm_pay, GstBaseRTPPayload,
- GST_TYPE_BASE_RTP_PAYLOAD);
+#define gst_rtp_gsm_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRTPGSMPay, gst_rtp_gsm_pay, GST_TYPE_RTP_BASE_PAYLOAD);
static void
-gst_rtp_gsm_pay_base_init (gpointer klass)
+gst_rtp_gsm_pay_class_init (GstRTPGSMPayClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstElementClass *gstelement_class;
+ GstRTPBasePayloadClass *gstrtpbasepayload_class;
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_gsm_pay_sink_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_gsm_pay_src_template);
- gst_element_class_set_details_simple (element_class, "RTP GSM payloader",
- "Codec/Payloader/Network/RTP",
- "Payload-encodes GSM audio into a RTP packet",
- "Zeeshan Ali <zeenix@gmail.com>");
-}
+ GST_DEBUG_CATEGORY_INIT (rtpgsmpay_debug, "rtpgsmpay", 0,
+ "GSM Audio RTP Payloader");
-static void
-gst_rtp_gsm_pay_class_init (GstRTPGSMPayClass * klass)
-{
- GstBaseRTPPayloadClass *gstbasertppayload_class;
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasepayload_class = (GstRTPBasePayloadClass *) klass;
- gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_gsm_pay_sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_gsm_pay_src_template));
- gstbasertppayload_class->set_caps = gst_rtp_gsm_pay_setcaps;
- gstbasertppayload_class->handle_buffer = gst_rtp_gsm_pay_handle_buffer;
+ gst_element_class_set_details_simple (gstelement_class, "RTP GSM payloader",
+ "Codec/Payloader/Network/RTP",
+ "Payload-encodes GSM audio into a RTP packet",
+ "Zeeshan Ali <zeenix@gmail.com>");
- GST_DEBUG_CATEGORY_INIT (rtpgsmpay_debug, "rtpgsmpay", 0,
- "GSM Audio RTP Payloader");
+ gstrtpbasepayload_class->set_caps = gst_rtp_gsm_pay_setcaps;
+ gstrtpbasepayload_class->handle_buffer = gst_rtp_gsm_pay_handle_buffer;
}
static void
-gst_rtp_gsm_pay_init (GstRTPGSMPay * rtpgsmpay, GstRTPGSMPayClass * klass)
+gst_rtp_gsm_pay_init (GstRTPGSMPay * rtpgsmpay)
{
- GST_BASE_RTP_PAYLOAD (rtpgsmpay)->clock_rate = 8000;
- GST_BASE_RTP_PAYLOAD_PT (rtpgsmpay) = GST_RTP_PAYLOAD_GSM;
+ GST_RTP_BASE_PAYLOAD (rtpgsmpay)->clock_rate = 8000;
+ GST_RTP_BASE_PAYLOAD_PT (rtpgsmpay) = GST_RTP_PAYLOAD_GSM;
}
static gboolean
-gst_rtp_gsm_pay_setcaps (GstBaseRTPPayload * payload, GstCaps * caps)
+gst_rtp_gsm_pay_setcaps (GstRTPBasePayload * payload, GstCaps * caps)
{
const char *stname;
GstStructure *structure;
if (strcmp ("audio/x-gsm", stname))
goto invalid_type;
- gst_basertppayload_set_options (payload, "audio", FALSE, "GSM", 8000);
- res = gst_basertppayload_set_outcaps (payload, NULL);
+ gst_rtp_base_payload_set_options (payload, "audio", FALSE, "GSM", 8000);
+ res = gst_rtp_base_payload_set_outcaps (payload, NULL);
return res;
}
static GstFlowReturn
-gst_rtp_gsm_pay_handle_buffer (GstBaseRTPPayload * basepayload,
+gst_rtp_gsm_pay_handle_buffer (GstRTPBasePayload * basepayload,
GstBuffer * buffer)
{
GstRTPGSMPay *rtpgsmpay;
- guint size, payload_len;
+ guint payload_len;
GstBuffer *outbuf;
- guint8 *payload, *data;
+ GstMapInfo map;
+ guint8 *payload;
GstClockTime timestamp, duration;
GstFlowReturn ret;
+ GstRTPBuffer rtp = { NULL };
rtpgsmpay = GST_RTP_GSM_PAY (basepayload);
- size = GST_BUFFER_SIZE (buffer);
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+
timestamp = GST_BUFFER_TIMESTAMP (buffer);
duration = GST_BUFFER_DURATION (buffer);
/* FIXME, only one GSM frame per RTP packet for now */
- payload_len = size;
+ payload_len = map.size;
/* FIXME, just error out for now */
- if (payload_len > GST_BASE_RTP_PAYLOAD_MTU (rtpgsmpay)) {
- GST_ELEMENT_ERROR (rtpgsmpay, STREAM, ENCODE, (NULL),
- ("payload_len %u > mtu %u", payload_len,
- GST_BASE_RTP_PAYLOAD_MTU (rtpgsmpay)));
- return GST_FLOW_ERROR;
- }
+ if (payload_len > GST_RTP_BASE_PAYLOAD_MTU (rtpgsmpay))
+ goto too_big;
outbuf = gst_rtp_buffer_new_allocate (payload_len, 0, 0);
GST_BUFFER_DURATION (outbuf) = duration;
/* get payload */
- payload = gst_rtp_buffer_get_payload (outbuf);
-
- data = GST_BUFFER_DATA (buffer);
+ gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
/* copy data in payload */
- memcpy (&payload[0], data, size);
+ payload = gst_rtp_buffer_get_payload (&rtp);
+ memcpy (payload, map.data, map.size);
+
+ gst_rtp_buffer_unmap (&rtp);
+ gst_buffer_unmap (buffer, &map);
gst_buffer_unref (buffer);
- GST_DEBUG ("gst_rtp_gsm_pay_chain: pushing buffer of size %d",
- GST_BUFFER_SIZE (outbuf));
+ GST_DEBUG ("gst_rtp_gsm_pay_chain: pushing buffer of size %" G_GSIZE_FORMAT,
+ gst_buffer_get_size (outbuf));
- ret = gst_basertppayload_push (basepayload, outbuf);
+ ret = gst_rtp_base_payload_push (basepayload, outbuf);
return ret;
+
+ /* ERRORS */
+too_big:
+ {
+ GST_ELEMENT_ERROR (rtpgsmpay, STREAM, ENCODE, (NULL),
+ ("payload_len %u > mtu %u", payload_len,
+ GST_RTP_BASE_PAYLOAD_MTU (rtpgsmpay)));
+ gst_buffer_unmap (buffer, &map);
+ return GST_FLOW_ERROR;
+ }
}
gboolean
#define __GST_RTP_GSM_PAY_H__
#include <gst/gst.h>
-#include <gst/rtp/gstbasertppayload.h>
+#include <gst/rtp/gstrtpbasepayload.h>
G_BEGIN_DECLS
struct _GstRTPGSMPay
{
- GstBaseRTPPayload payload;
+ GstRTPBasePayload payload;
};
struct _GstRTPGSMPayClass
{
- GstBaseRTPPayloadClass parent_class;
+ GstRTPBasePayloadClass parent_class;
};
GType gst_rtp_gsm_pay_get_type (void);
"clock-rate = (int) 90000, " "encoding-name = (string) \"X-GST\"")
);
-GST_BOILERPLATE (GstRtpGSTDepay, gst_rtp_gst_depay, GstBaseRTPDepayload,
- GST_TYPE_BASE_RTP_DEPAYLOAD);
+#define gst_rtp_gst_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpGSTDepay, gst_rtp_gst_depay, GST_TYPE_RTP_BASE_DEPAYLOAD);
static void gst_rtp_gst_depay_finalize (GObject * object);
element, GstStateChange transition);
static void gst_rtp_gst_depay_reset (GstRtpGSTDepay * rtpgstdepay);
-static gboolean gst_rtp_gst_depay_setcaps (GstBaseRTPDepayload * depayload,
+static gboolean gst_rtp_gst_depay_setcaps (GstRTPBaseDepayload * depayload,
GstCaps * caps);
-static GstBuffer *gst_rtp_gst_depay_process (GstBaseRTPDepayload * depayload,
+static GstBuffer *gst_rtp_gst_depay_process (GstRTPBaseDepayload * depayload,
GstBuffer * buf);
static void
-gst_rtp_gst_depay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_gst_depay_src_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_gst_depay_sink_template);
-
- gst_element_class_set_details_simple (element_class,
- "GStreamer depayloader", "Codec/Depayloader/Network",
- "Extracts GStreamer buffers from RTP packets",
- "Wim Taymans <wim.taymans@gmail.com>");
-}
-
-static void
gst_rtp_gst_depay_class_init (GstRtpGSTDepayClass * klass)
{
GObjectClass *gobject_class;
GstElementClass *gstelement_class;
- GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
+
+ GST_DEBUG_CATEGORY_INIT (rtpgstdepay_debug, "rtpgstdepay", 0,
+ "Gstreamer RTP Depayloader");
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
- gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
+ gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
gobject_class->finalize = gst_rtp_gst_depay_finalize;
gstelement_class->change_state = gst_rtp_gst_depay_change_state;
- gstbasertpdepayload_class->set_caps = gst_rtp_gst_depay_setcaps;
- gstbasertpdepayload_class->process = gst_rtp_gst_depay_process;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_gst_depay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_gst_depay_sink_template));
- GST_DEBUG_CATEGORY_INIT (rtpgstdepay_debug, "rtpgstdepay", 0,
- "Gstreamer RTP Depayloader");
+ gst_element_class_set_details_simple (gstelement_class,
+ "GStreamer depayloader", "Codec/Depayloader/Network",
+ "Extracts GStreamer buffers from RTP packets",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
+ gstrtpbasedepayload_class->set_caps = gst_rtp_gst_depay_setcaps;
+ gstrtpbasedepayload_class->process = gst_rtp_gst_depay_process;
}
static void
-gst_rtp_gst_depay_init (GstRtpGSTDepay * rtpgstdepay,
- GstRtpGSTDepayClass * klass)
+gst_rtp_gst_depay_init (GstRtpGSTDepay * rtpgstdepay)
{
rtpgstdepay->adapter = gst_adapter_new ();
}
}
static gboolean
-gst_rtp_gst_depay_setcaps (GstBaseRTPDepayload * depayload, GstCaps * caps)
+gst_rtp_gst_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
{
GstRtpGSTDepay *rtpgstdepay;
GstStructure *structure;
}
static GstBuffer *
-gst_rtp_gst_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
+gst_rtp_gst_depay_process (GstRTPBaseDepayload * depayload, GstBuffer * buf)
{
GstRtpGSTDepay *rtpgstdepay;
GstBuffer *subbuf, *outbuf = NULL;
gint payload_len;
guint8 *payload;
guint CV;
+ GstRTPBuffer rtp = { NULL };
rtpgstdepay = GST_RTP_GST_DEPAY (depayload);
- payload_len = gst_rtp_buffer_get_payload_len (buf);
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
+
+ payload_len = gst_rtp_buffer_get_payload_len (&rtp);
if (payload_len <= 8)
goto empty_packet;
gst_adapter_clear (rtpgstdepay->adapter);
}
- payload = gst_rtp_buffer_get_payload (buf);
+ payload = gst_rtp_buffer_get_payload (&rtp);
/* strip off header
*
* 0 1 2 3
* 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
* +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
- * |C| CV |D|X|Y|Z| MBZ |
+ * |C| CV |D|0|0|0| MBZ |
* +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
* | Frag_offset |
* +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
*/
/* subbuffer skipping the 8 header bytes */
- subbuf = gst_rtp_buffer_get_payload_subbuffer (buf, 8, -1);
+ subbuf = gst_rtp_buffer_get_payload_subbuffer (&rtp, 8, -1);
gst_adapter_push (rtpgstdepay->adapter, subbuf);
- if (gst_rtp_buffer_get_marker (buf)) {
+ if (gst_rtp_buffer_get_marker (&rtp)) {
guint avail;
GstCaps *outcaps;
CV = (payload[0] >> 4) & 0x7;
if (payload[0] & 0x80) {
- guint b, csize, size, offset;
- guint8 *data;
+ guint b, csize, left, offset;
+ GstMapInfo map;
GstBuffer *subbuf;
/* C bit, we have inline caps */
- data = GST_BUFFER_DATA (outbuf);
- size = GST_BUFFER_SIZE (outbuf);
+ gst_buffer_map (outbuf, &map, GST_MAP_READ);
/* start reading the length, we need this to skip to the data later */
csize = offset = 0;
+ left = map.size;
do {
- if (offset >= size)
+ if (offset >= left) {
+ gst_buffer_unmap (outbuf, &map);
goto too_small;
- b = data[offset++];
+ }
+ b = map.data[offset++];
csize = (csize << 7) | (b & 0x7f);
} while (b & 0x80);
- if (size < csize)
+ if (left < csize) {
+ gst_buffer_unmap (outbuf, &map);
goto too_small;
+ }
/* parse and store in cache */
- outcaps = gst_caps_from_string ((gchar *) & data[offset]);
+ outcaps = gst_caps_from_string ((gchar *) & map.data[offset]);
store_cache (rtpgstdepay, CV, outcaps);
/* skip caps */
offset += csize;
- size -= csize;
+ left -= csize;
GST_DEBUG_OBJECT (rtpgstdepay,
"inline caps %u, length %u, %" GST_PTR_FORMAT, CV, csize, outcaps);
/* create real data buffer when needed */
- if (size)
- subbuf = gst_buffer_create_sub (outbuf, offset, size);
+ if (map.size)
+ subbuf =
+ gst_buffer_copy_region (outbuf, GST_BUFFER_COPY_ALL, offset, left);
else
subbuf = NULL;
+ gst_buffer_unmap (outbuf, &map);
gst_buffer_unref (outbuf);
outbuf = subbuf;
}
if (outbuf) {
if (payload[0] & 0x8)
GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT);
- if (payload[0] & 0x4)
- GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_MEDIA1);
- if (payload[0] & 0x2)
- GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_MEDIA2);
- if (payload[0] & 0x1)
- GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_MEDIA3);
}
}
return outbuf;
{
GST_ELEMENT_WARNING (rtpgstdepay, STREAM, DECODE,
("Empty Payload."), (NULL));
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
too_small:
("Buffer too small."), (NULL));
if (outbuf)
gst_buffer_unref (outbuf);
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
missing_caps:
("Missing caps %u.", CV), (NULL));
if (outbuf)
gst_buffer_unref (outbuf);
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
}
#include <gst/gst.h>
#include <gst/base/gstadapter.h>
-#include <gst/rtp/gstbasertpdepayload.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
G_BEGIN_DECLS
struct _GstRtpGSTDepay
{
- GstBaseRTPDepayload depayload;
+ GstRTPBaseDepayload depayload;
GstAdapter *adapter;
guint current_CV;
struct _GstRtpGSTDepayClass
{
- GstBaseRTPDepayloadClass parent_class;
+ GstRTPBaseDepayloadClass parent_class;
};
GType gst_rtp_gst_depay_get_type (void);
* 0 1 2 3
* 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
* +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
- * |C| CV |D|X|Y|Z| MBZ |
+ * |C| CV |D|0|0|0| MBZ |
* +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
* | Frag_offset |
* +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
*
* CV: caps version, 0 = caps from SDP, 1 - 7 inlined caps
* D: delta unit buffer
- * X: media 1 flag
- * Y: media 2 flag
- * Z: media 3 flag
*
*
*/
"clock-rate = (int) 90000, " "encoding-name = (string) \"X-GST\"")
);
-static gboolean gst_rtp_gst_pay_setcaps (GstBaseRTPPayload * payload,
+static gboolean gst_rtp_gst_pay_setcaps (GstRTPBasePayload * payload,
GstCaps * caps);
-static GstFlowReturn gst_rtp_gst_pay_handle_buffer (GstBaseRTPPayload * payload,
+static GstFlowReturn gst_rtp_gst_pay_handle_buffer (GstRTPBasePayload * payload,
GstBuffer * buffer);
-GST_BOILERPLATE (GstRtpGSTPay, gst_rtp_gst_pay, GstBaseRTPPayload,
- GST_TYPE_BASE_RTP_PAYLOAD)
+#define gst_rtp_gst_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpGSTPay, gst_rtp_gst_pay, GST_TYPE_RTP_BASE_PAYLOAD);
- static void gst_rtp_gst_pay_base_init (gpointer klass)
+static void
+gst_rtp_gst_pay_class_init (GstRtpGSTPayClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstElementClass *gstelement_class;
+ GstRTPBasePayloadClass *gstrtpbasepayload_class;
+
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasepayload_class = (GstRTPBasePayloadClass *) klass;
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_gst_pay_src_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_gst_pay_sink_template);
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_gst_pay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_gst_pay_sink_template));
- gst_element_class_set_details_simple (element_class,
+ gst_element_class_set_details_simple (gstelement_class,
"RTP GStreamer payloader", "Codec/Payloader/Network/RTP",
"Payload GStreamer buffers as RTP packets",
"Wim Taymans <wim.taymans@gmail.com>");
-}
-static void
-gst_rtp_gst_pay_class_init (GstRtpGSTPayClass * klass)
-{
- GstBaseRTPPayloadClass *gstbasertppayload_class;
-
- gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
-
- gstbasertppayload_class->set_caps = gst_rtp_gst_pay_setcaps;
- gstbasertppayload_class->handle_buffer = gst_rtp_gst_pay_handle_buffer;
+ gstrtpbasepayload_class->set_caps = gst_rtp_gst_pay_setcaps;
+ gstrtpbasepayload_class->handle_buffer = gst_rtp_gst_pay_handle_buffer;
}
static void
-gst_rtp_gst_pay_init (GstRtpGSTPay * rtpgstpay, GstRtpGSTPayClass * klass)
+gst_rtp_gst_pay_init (GstRtpGSTPay * rtpgstpay)
{
}
static gboolean
-gst_rtp_gst_pay_setcaps (GstBaseRTPPayload * payload, GstCaps * caps)
+gst_rtp_gst_pay_setcaps (GstRTPBasePayload * payload, GstCaps * caps)
{
gboolean res;
gchar *capsstr, *capsenc;
capsenc = g_base64_encode ((guchar *) capsstr, strlen (capsstr));
g_free (capsstr);
- gst_basertppayload_set_options (payload, "application", TRUE, "X-GST", 90000);
+ gst_rtp_base_payload_set_options (payload, "application", TRUE, "X-GST",
+ 90000);
res =
- gst_basertppayload_set_outcaps (payload, "caps", G_TYPE_STRING, capsenc,
+ gst_rtp_base_payload_set_outcaps (payload, "caps", G_TYPE_STRING, capsenc,
NULL);
g_free (capsenc);
}
static GstFlowReturn
-gst_rtp_gst_pay_handle_buffer (GstBaseRTPPayload * basepayload,
+gst_rtp_gst_pay_handle_buffer (GstRTPBasePayload * basepayload,
GstBuffer * buffer)
{
GstRtpGSTPay *rtpgstpay;
- guint8 *data;
- guint size;
+ GstMapInfo map;
+ guint8 *ptr;
+ gsize left;
GstBuffer *outbuf;
GstFlowReturn ret;
GstClockTime timestamp;
rtpgstpay = GST_RTP_GST_PAY (basepayload);
- size = GST_BUFFER_SIZE (buffer);
- data = GST_BUFFER_DATA (buffer);
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
timestamp = GST_BUFFER_TIMESTAMP (buffer);
ret = GST_FLOW_OK;
flags = 0;
if (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT))
flags |= (1 << 3);
- if (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_MEDIA1))
- flags |= (1 << 2);
- if (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_MEDIA2))
- flags |= (1 << 1);
- if (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_MEDIA3))
- flags |= (1 << 0);
/*
* 0 1 2 3
* +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
*/
frag_offset = 0;
+ ptr = map.data;
+ left = map.size;
- while (size > 0) {
+ while (left > 0) {
guint towrite;
guint8 *payload;
guint payload_len;
guint packet_len;
+ GstRTPBuffer rtp = { NULL };
/* this will be the total lenght of the packet */
- packet_len = gst_rtp_buffer_calc_packet_len (8 + size, 0, 0);
+ packet_len = gst_rtp_buffer_calc_packet_len (8 + left, 0, 0);
/* fill one MTU or all available bytes */
- towrite = MIN (packet_len, GST_BASE_RTP_PAYLOAD_MTU (rtpgstpay));
+ towrite = MIN (packet_len, GST_RTP_BASE_PAYLOAD_MTU (rtpgstpay));
/* this is the payload length */
payload_len = gst_rtp_buffer_calc_payload_len (towrite, 0, 0);
/* create buffer to hold the payload */
outbuf = gst_rtp_buffer_new_allocate (payload_len, 0, 0);
- payload = gst_rtp_buffer_get_payload (outbuf);
+
+ gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
+ payload = gst_rtp_buffer_get_payload (&rtp);
payload[0] = flags;
payload[1] = payload[2] = payload[3] = 0;
payload += 8;
payload_len -= 8;
- memcpy (payload, data, payload_len);
+ memcpy (payload, ptr, payload_len);
- data += payload_len;
- size -= payload_len;
+ ptr += payload_len;
+ left -= payload_len;
frag_offset += payload_len;
- if (size == 0)
- gst_rtp_buffer_set_marker (outbuf, TRUE);
+ if (left == 0)
+ gst_rtp_buffer_set_marker (&rtp, TRUE);
+
+ gst_rtp_buffer_unmap (&rtp);
GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
- ret = gst_basertppayload_push (basepayload, outbuf);
+ ret = gst_rtp_base_payload_push (basepayload, outbuf);
}
+ gst_buffer_unmap (buffer, &map);
gst_buffer_unref (buffer);
return ret;
#define __GST_RTP_GST_PAY_H__
#include <gst/gst.h>
-#include <gst/rtp/gstbasertppayload.h>
+#include <gst/rtp/gstrtpbasepayload.h>
G_BEGIN_DECLS
struct _GstRtpGSTPay
{
- GstBaseRTPPayload payload;
+ GstRTPBasePayload payload;
};
struct _GstRtpGSTPayClass
{
- GstBaseRTPPayloadClass parent_class;
+ GstRTPBasePayloadClass parent_class;
};
GType gst_rtp_gst_pay_get_type (void);
"clock-rate = (int) 90000, " "encoding-name = (string) \"H263\"")
);
-GST_BOILERPLATE (GstRtpH263Depay, gst_rtp_h263_depay, GstBaseRTPDepayload,
- GST_TYPE_BASE_RTP_DEPAYLOAD);
+#define gst_rtp_h263_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpH263Depay, gst_rtp_h263_depay,
+ GST_TYPE_RTP_BASE_DEPAYLOAD);
static void gst_rtp_h263_depay_finalize (GObject * object);
static GstStateChangeReturn gst_rtp_h263_depay_change_state (GstElement *
element, GstStateChange transition);
-static GstBuffer *gst_rtp_h263_depay_process (GstBaseRTPDepayload * depayload,
+static GstBuffer *gst_rtp_h263_depay_process (GstRTPBaseDepayload * depayload,
GstBuffer * buf);
-gboolean gst_rtp_h263_depay_setcaps (GstBaseRTPDepayload * filter,
+gboolean gst_rtp_h263_depay_setcaps (GstRTPBaseDepayload * filter,
GstCaps * caps);
static void
-gst_rtp_h263_depay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_h263_depay_src_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_h263_depay_sink_template);
-
- gst_element_class_set_details_simple (element_class, "RTP H263 depayloader",
- "Codec/Depayloader/Network/RTP",
- "Extracts H263 video from RTP packets (RFC 2190)",
- "Philippe Kalaf <philippe.kalaf@collabora.co.uk>, "
- "Edward Hervey <bilboed@bilboed.com>");
-}
-
-static void
gst_rtp_h263_depay_class_init (GstRtpH263DepayClass * klass)
{
GObjectClass *gobject_class;
GstElementClass *gstelement_class;
- GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
+
+ GST_DEBUG_CATEGORY_INIT (rtph263depay_debug, "rtph263depay", 0,
+ "H263 Video RTP Depayloader");
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
- gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
-
- gstbasertpdepayload_class->process = gst_rtp_h263_depay_process;
- gstbasertpdepayload_class->set_caps = gst_rtp_h263_depay_setcaps;
+ gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
gobject_class->finalize = gst_rtp_h263_depay_finalize;
gstelement_class->change_state = gst_rtp_h263_depay_change_state;
- GST_DEBUG_CATEGORY_INIT (rtph263depay_debug, "rtph263depay", 0,
- "H263 Video RTP Depayloader");
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_h263_depay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_h263_depay_sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP H263 depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts H263 video from RTP packets (RFC 2190)",
+ "Philippe Kalaf <philippe.kalaf@collabora.co.uk>, "
+ "Edward Hervey <bilboed@bilboed.com>");
+
+ gstrtpbasedepayload_class->process = gst_rtp_h263_depay_process;
+ gstrtpbasedepayload_class->set_caps = gst_rtp_h263_depay_setcaps;
}
static void
-gst_rtp_h263_depay_init (GstRtpH263Depay * rtph263depay,
- GstRtpH263DepayClass * klass)
+gst_rtp_h263_depay_init (GstRtpH263Depay * rtph263depay)
{
rtph263depay->adapter = gst_adapter_new ();
}
gboolean
-gst_rtp_h263_depay_setcaps (GstBaseRTPDepayload * filter, GstCaps * caps)
+gst_rtp_h263_depay_setcaps (GstRTPBaseDepayload * filter, GstCaps * caps)
{
GstCaps *srccaps;
GstStructure *structure = gst_caps_get_structure (caps, 0);
srccaps = gst_caps_new_simple ("video/x-h263",
"variant", G_TYPE_STRING, "itu",
"h263version", G_TYPE_STRING, "h263", NULL);
- gst_pad_set_caps (GST_BASE_RTP_DEPAYLOAD_SRCPAD (filter), srccaps);
+ gst_pad_set_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (filter), srccaps);
gst_caps_unref (srccaps);
return TRUE;
}
static GstBuffer *
-gst_rtp_h263_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
+gst_rtp_h263_depay_process (GstRTPBaseDepayload * depayload, GstBuffer * buf)
{
GstRtpH263Depay *rtph263depay;
GstBuffer *outbuf;
guint SBIT, EBIT;
gboolean F, P, M;
gboolean I;
+ GstRTPBuffer rtp = { NULL };
rtph263depay = GST_RTP_H263_DEPAY (depayload);
rtph263depay->start = FALSE;
}
- payload_len = gst_rtp_buffer_get_payload_len (buf);
- payload = gst_rtp_buffer_get_payload (buf);
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
+
+ payload_len = gst_rtp_buffer_get_payload_len (&rtp);
+ payload = gst_rtp_buffer_get_payload (&rtp);
- M = gst_rtp_buffer_get_marker (buf);
+ M = gst_rtp_buffer_get_marker (&rtp);
/* Let's see what mode we are using */
F = (payload[0] & 0x80) == 0x80;
if (!F && payload_len > 4 && (GST_READ_UINT32_BE (payload) >> 10 == 0x20)) {
GST_DEBUG ("Mode A with PSC => frame start");
rtph263depay->start = TRUE;
- if (!!(payload[4] & 0x02) != I) {
+ if (! !(payload[4] & 0x02) != I) {
GST_DEBUG ("Wrong Picture Coding Type Flag in rtp header");
I = !I;
}
GstBuffer *tmp;
/* Take the entire buffer */
- tmp = gst_rtp_buffer_get_payload_subbuffer (buf, header_len, payload_len);
+ tmp = gst_rtp_buffer_get_payload_subbuffer (&rtp, header_len, payload_len);
gst_adapter_push (rtph263depay->adapter, tmp);
} else {
GstBuffer *tmp;
/* Take the entire buffer except for the last byte */
- tmp = gst_rtp_buffer_get_payload_subbuffer (buf, header_len,
+ tmp = gst_rtp_buffer_get_payload_subbuffer (&rtp, header_len,
payload_len - 1);
gst_adapter_push (rtph263depay->adapter, tmp);
if (rtph263depay->start) {
/* frame is completed */
guint avail;
- guint32 timestamp;
if (rtph263depay->offset) {
/* push in the leftover */
GstBuffer *buf = gst_buffer_new_and_alloc (1);
GST_DEBUG ("Pushing leftover in adapter");
- GST_BUFFER_DATA (buf)[0] = rtph263depay->leftover;
+ gst_buffer_fill (buf, 0, &rtph263depay->leftover, 1);
gst_adapter_push (rtph263depay->adapter, buf);
}
GST_DEBUG ("Pushing out a buffer of %d bytes", avail);
- timestamp = gst_rtp_buffer_get_timestamp (buf);
- gst_base_rtp_depayload_push_ts (depayload, timestamp, outbuf);
+ gst_rtp_base_depayload_push (depayload, outbuf);
rtph263depay->offset = 0;
rtph263depay->leftover = 0;
rtph263depay->start = FALSE;
rtph263depay->start = TRUE;
}
}
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
#include <gst/gst.h>
#include <gst/base/gstadapter.h>
-#include <gst/rtp/gstbasertpdepayload.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
G_BEGIN_DECLS
struct _GstRtpH263Depay
{
- GstBaseRTPDepayload depayload;
+ GstRTPBaseDepayload depayload;
guint8 offset; /* offset to apply to next payload */
guint8 leftover; /* leftover from previous payload (if offset != 0) */
struct _GstRtpH263DepayClass
{
- GstBaseRTPDepayloadClass parent_class;
+ GstRTPBaseDepayloadClass parent_class;
};
GType gst_rtp_h263_depay_get_type (void);
static void gst_rtp_h263_pay_finalize (GObject * object);
-static gboolean gst_rtp_h263_pay_setcaps (GstBaseRTPPayload * payload,
+static gboolean gst_rtp_h263_pay_setcaps (GstRTPBasePayload * payload,
GstCaps * caps);
static void gst_rtp_h263_pay_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void gst_rtp_h263_pay_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
-static GstFlowReturn gst_rtp_h263_pay_handle_buffer (GstBaseRTPPayload *
+static GstFlowReturn gst_rtp_h263_pay_handle_buffer (GstRTPBasePayload *
payload, GstBuffer * buffer);
static void gst_rtp_h263_pay_boundry_init (GstRtpH263PayBoundry * boundry,
guint ind);
static void gst_rtp_h263_pay_package_destroy (GstRtpH263PayPackage * pack);
-GST_BOILERPLATE (GstRtpH263Pay, gst_rtp_h263_pay, GstBaseRTPPayload,
- GST_TYPE_BASE_RTP_PAYLOAD)
-
- static void gst_rtp_h263_pay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_h263_pay_src_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_h263_pay_sink_template);
-
- gst_element_class_set_details_simple (element_class,
- "RTP H263 packet payloader", "Codec/Payloader/Network/RTP",
- "Payload-encodes H263 video in RTP packets (RFC 2190)",
- "Neil Stratford <neils@vipadia.com>"
- "Dejan Sakelsak <dejan.sakelsak@marand.si>");
-}
+#define gst_rtp_h263_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpH263Pay, gst_rtp_h263_pay, GST_TYPE_RTP_BASE_PAYLOAD);
static void
gst_rtp_h263_pay_class_init (GstRtpH263PayClass * klass)
{
GObjectClass *gobject_class;
- GstBaseRTPPayloadClass *gstbasertppayload_class;
+ GstElementClass *gstelement_class;
+ GstRTPBasePayloadClass *gstrtpbasepayload_class;
gobject_class = (GObjectClass *) klass;
- gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasepayload_class = (GstRTPBasePayloadClass *) klass;
gobject_class->finalize = gst_rtp_h263_pay_finalize;
- gstbasertppayload_class->set_caps = gst_rtp_h263_pay_setcaps;
- gstbasertppayload_class->handle_buffer = gst_rtp_h263_pay_handle_buffer;
+ gstrtpbasepayload_class->set_caps = gst_rtp_h263_pay_setcaps;
+ gstrtpbasepayload_class->handle_buffer = gst_rtp_h263_pay_handle_buffer;
gobject_class->set_property = gst_rtp_h263_pay_set_property;
gobject_class->get_property = gst_rtp_h263_pay_get_property;
"Disable packetization modes B and C", DEFAULT_MODE_A,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_h263_pay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_h263_pay_sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP H263 packet payloader", "Codec/Payloader/Network/RTP",
+ "Payload-encodes H263 video in RTP packets (RFC 2190)",
+ "Neil Stratford <neils@vipadia.com>"
+ "Dejan Sakelsak <dejan.sakelsak@marand.si>");
+
GST_DEBUG_CATEGORY_INIT (rtph263pay_debug, "rtph263pay", 0,
"H263 RTP Payloader");
}
static void
-gst_rtp_h263_pay_init (GstRtpH263Pay * rtph263pay, GstRtpH263PayClass * klass)
+gst_rtp_h263_pay_init (GstRtpH263Pay * rtph263pay)
{
rtph263pay->adapter = gst_adapter_new ();
}
static gboolean
-gst_rtp_h263_pay_setcaps (GstBaseRTPPayload * payload, GstCaps * caps)
+gst_rtp_h263_pay_setcaps (GstRTPBasePayload * payload, GstCaps * caps)
{
gboolean res;
payload->pt = GST_RTP_PAYLOAD_H263;
- gst_basertppayload_set_options (payload, "video", TRUE, "H263", 90000);
- res = gst_basertppayload_set_outcaps (payload, NULL);
+ gst_rtp_base_payload_set_options (payload, "video", TRUE, "H263", 90000);
+ res = gst_rtp_base_payload_set_outcaps (payload, NULL);
return res;
}
guint8 *header;
guint8 *payload;
GstFlowReturn ret;
+ GstRTPBuffer rtp = { NULL };
- header = gst_rtp_buffer_get_payload (package->outbuf);
+ gst_rtp_buffer_map (package->outbuf, GST_MAP_WRITE, &rtp);
+
+ header = gst_rtp_buffer_get_payload (&rtp);
payload = header + package->mode;
switch (package->mode) {
*/
GST_BUFFER_TIMESTAMP (package->outbuf) = rtph263pay->first_ts;
- gst_rtp_buffer_set_marker (package->outbuf, package->marker);
+ gst_rtp_buffer_set_marker (&rtp, package->marker);
if (package->marker)
GST_DEBUG ("Marker set!");
+ gst_rtp_buffer_unmap (&rtp);
+
ret =
- gst_basertppayload_push (GST_BASE_RTP_PAYLOAD (rtph263pay),
+ gst_rtp_base_payload_push (GST_RTP_BASE_PAYLOAD (rtph263pay),
package->outbuf);
GST_DEBUG ("Package pushed, returning");
GST_DEBUG ("MTU: %d", context->mtu);
rtph263pay->available_data = gst_adapter_available (rtph263pay->adapter);
if (rtph263pay->available_data == 0) {
- ret = GST_FLOW_RESEND;
+ ret = GST_FLOW_OK;
goto end;
}
/* Get a pointer to all the data for the frame */
rtph263pay->data =
- (guint8 *) gst_adapter_peek (rtph263pay->adapter,
+ (guint8 *) gst_adapter_map (rtph263pay->adapter,
rtph263pay->available_data);
/* Picture header */
end:
gst_rtp_h263_pay_context_destroy (context,
context->piclayer->ptype_srcformat);
+ gst_adapter_unmap (rtph263pay->adapter);
gst_adapter_flush (rtph263pay->adapter, rtph263pay->available_data);
return ret;
}
static GstFlowReturn
-gst_rtp_h263_pay_handle_buffer (GstBaseRTPPayload * payload, GstBuffer * buffer)
+gst_rtp_h263_pay_handle_buffer (GstRTPBasePayload * payload, GstBuffer * buffer)
{
GstRtpH263Pay *rtph263pay;
/* GStreamer
- * Copyright (C) <2005> Wim Taymans <wim@fluendo.com>
+ * Copyright (C) <2005> Wim Taymans <wim.taymans@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
#define __GST_RTP_H263_PAY_H__
#include <gst/gst.h>
-#include <gst/rtp/gstbasertppayload.h>
+#include <gst/rtp/gstrtpbasepayload.h>
#include <gst/base/gstadapter.h>
G_BEGIN_DECLS
struct _GstRtpH263Pay
{
- GstBaseRTPPayload payload;
+ GstRTPBasePayload payload;
GstAdapter *adapter;
GstClockTime first_ts;
struct _GstRtpH263PayClass
{
- GstBaseRTPPayloadClass parent_class;
+ GstRTPBasePayloadClass parent_class;
};
typedef struct _GstRtpH263PayAHeader
)
);
-GST_BOILERPLATE (GstRtpH263PDepay, gst_rtp_h263p_depay, GstBaseRTPDepayload,
- GST_TYPE_BASE_RTP_DEPAYLOAD);
+#define gst_rtp_h263p_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpH263PDepay, gst_rtp_h263p_depay,
+ GST_TYPE_RTP_BASE_DEPAYLOAD);
static void gst_rtp_h263p_depay_finalize (GObject * object);
static GstStateChangeReturn gst_rtp_h263p_depay_change_state (GstElement *
element, GstStateChange transition);
-static GstBuffer *gst_rtp_h263p_depay_process (GstBaseRTPDepayload * depayload,
+static GstBuffer *gst_rtp_h263p_depay_process (GstRTPBaseDepayload * depayload,
GstBuffer * buf);
-gboolean gst_rtp_h263p_depay_setcaps (GstBaseRTPDepayload * filter,
+gboolean gst_rtp_h263p_depay_setcaps (GstRTPBaseDepayload * filter,
GstCaps * caps);
static void
-gst_rtp_h263p_depay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_h263p_depay_src_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_h263p_depay_sink_template);
-
-
- gst_element_class_set_details_simple (element_class, "RTP H263 depayloader",
- "Codec/Depayloader/Network/RTP",
- "Extracts H263/+/++ video from RTP packets (RFC 4629)",
- "Wim Taymans <wim.taymans@gmail.com>");
-}
-
-static void
gst_rtp_h263p_depay_class_init (GstRtpH263PDepayClass * klass)
{
GObjectClass *gobject_class;
GstElementClass *gstelement_class;
- GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
- gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
-
- gstbasertpdepayload_class->process = gst_rtp_h263p_depay_process;
- gstbasertpdepayload_class->set_caps = gst_rtp_h263p_depay_setcaps;
+ gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
gobject_class->finalize = gst_rtp_h263p_depay_finalize;
gstelement_class->change_state = gst_rtp_h263p_depay_change_state;
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_h263p_depay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_h263p_depay_sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP H263 depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts H263/+/++ video from RTP packets (RFC 4629)",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
+ gstrtpbasedepayload_class->process = gst_rtp_h263p_depay_process;
+ gstrtpbasedepayload_class->set_caps = gst_rtp_h263p_depay_setcaps;
}
static void
-gst_rtp_h263p_depay_init (GstRtpH263PDepay * rtph263pdepay,
- GstRtpH263PDepayClass * klass)
+gst_rtp_h263p_depay_init (GstRtpH263PDepay * rtph263pdepay)
{
rtph263pdepay->adapter = gst_adapter_new ();
}
}
gboolean
-gst_rtp_h263p_depay_setcaps (GstBaseRTPDepayload * filter, GstCaps * caps)
+gst_rtp_h263p_depay_setcaps (GstRTPBaseDepayload * filter, GstCaps * caps)
{
GstCaps *srccaps = NULL;
GstStructure *structure = gst_caps_get_structure (caps, 0);
if (!srccaps)
goto no_caps;
- res = gst_pad_set_caps (GST_BASE_RTP_DEPAYLOAD_SRCPAD (filter), srccaps);
+ res = gst_pad_set_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (filter), srccaps);
gst_caps_unref (srccaps);
return res;
}
static GstBuffer *
-gst_rtp_h263p_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
+gst_rtp_h263p_depay_process (GstRTPBaseDepayload * depayload, GstBuffer * buf)
{
GstRtpH263PDepay *rtph263pdepay;
GstBuffer *outbuf;
+ gint payload_len;
+ guint8 *payload;
+ gboolean P, V, M;
+ guint header_len;
+ guint8 PLEN, PEBIT;
+ GstRTPBuffer rtp = { NULL };
rtph263pdepay = GST_RTP_H263P_DEPAY (depayload);
rtph263pdepay->wait_start = TRUE;
}
- {
- gint payload_len;
- guint8 *payload;
- gboolean P, V, M;
- guint header_len;
- guint8 PLEN, PEBIT;
-
- payload_len = gst_rtp_buffer_get_payload_len (buf);
- payload = gst_rtp_buffer_get_payload (buf);
-
- header_len = 2;
-
- if (payload_len < header_len)
- goto too_small;
-
- M = gst_rtp_buffer_get_marker (buf);
-
- /* 0 1
- * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5
- * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
- * | RR |P|V| PLEN |PEBIT|
- * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
- */
- P = (payload[0] & 0x04) == 0x04;
- V = (payload[0] & 0x02) == 0x02;
- PLEN = ((payload[0] & 0x1) << 5) | (payload[1] >> 3);
- PEBIT = payload[1] & 0x7;
-
- GST_LOG_OBJECT (depayload, "P %d, V %d, PLEN %d, PEBIT %d", P, V, PLEN,
- PEBIT);
-
- if (V) {
- header_len++;
- }
- if (PLEN) {
- header_len += PLEN;
- }
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
- if ((!P && payload_len < header_len) || (P && payload_len < header_len - 2))
- goto too_small;
+ payload_len = gst_rtp_buffer_get_payload_len (&rtp);
+ header_len = 2;
- if (P) {
- /* FIXME, have to make the packet writable hear. Better to reset these
- * bytes when we copy the packet below */
- rtph263pdepay->wait_start = FALSE;
- header_len -= 2;
- payload[header_len] = 0;
- payload[header_len + 1] = 0;
- }
+ if (payload_len < header_len)
+ goto too_small;
- if (rtph263pdepay->wait_start)
- goto waiting_start;
+ payload = gst_rtp_buffer_get_payload (&rtp);
- if (payload_len < header_len)
- goto too_small;
+ M = gst_rtp_buffer_get_marker (&rtp);
- /* FIXME do not ignore the VRC header (See RFC 2429 section 4.2) */
- /* FIXME actually use the RTP picture header when it is lost in the network */
- /* for now strip off header */
- payload += header_len;
- payload_len -= header_len;
+ /* 0 1
+ * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | RR |P|V| PLEN |PEBIT|
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ */
+ P = (payload[0] & 0x04) == 0x04;
+ V = (payload[0] & 0x02) == 0x02;
+ PLEN = ((payload[0] & 0x1) << 5) | (payload[1] >> 3);
+ PEBIT = payload[1] & 0x7;
- if (M) {
- /* frame is completed: append to previous, push it out */
- guint len, padlen;
- guint avail;
+ GST_LOG_OBJECT (depayload, "P %d, V %d, PLEN %d, PEBIT %d", P, V, PLEN,
+ PEBIT);
- GST_LOG_OBJECT (depayload, "Frame complete");
+ if (V) {
+ header_len++;
+ }
+ if (PLEN) {
+ header_len += PLEN;
+ }
- avail = gst_adapter_available (rtph263pdepay->adapter);
+ if ((!P && payload_len < header_len) || (P && payload_len < header_len - 2))
+ goto too_small;
- len = avail + payload_len;
- padlen = (len % 4) + 4;
- outbuf = gst_buffer_new_and_alloc (len + padlen);
- memset (GST_BUFFER_DATA (outbuf) + len, 0, padlen);
- GST_BUFFER_SIZE (outbuf) = len;
+ if (P) {
+ /* FIXME, have to make the packet writable hear. Better to reset these
+ * bytes when we copy the packet below */
+ rtph263pdepay->wait_start = FALSE;
+ header_len -= 2;
+ payload[header_len] = 0;
+ payload[header_len + 1] = 0;
+ }
- /* prepend previous data */
- if (avail > 0) {
- gst_adapter_copy (rtph263pdepay->adapter, GST_BUFFER_DATA (outbuf), 0,
- avail);
- gst_adapter_flush (rtph263pdepay->adapter, avail);
- }
- memcpy (GST_BUFFER_DATA (outbuf) + avail, payload, payload_len);
+ if (rtph263pdepay->wait_start)
+ goto waiting_start;
- return outbuf;
+ if (payload_len < header_len)
+ goto too_small;
- } else {
- /* frame not completed: store in adapter */
- outbuf = gst_buffer_new_and_alloc (payload_len);
+ /* FIXME do not ignore the VRC header (See RFC 2429 section 4.2) */
+ /* FIXME actually use the RTP picture header when it is lost in the network */
+ /* for now strip off header */
+ payload += header_len;
+ payload_len -= header_len;
- GST_LOG_OBJECT (depayload, "Frame incomplete, storing %d", payload_len);
+ if (M) {
+ /* frame is completed: append to previous, push it out */
+ guint len, padlen;
+ guint avail;
+ GstMapInfo map;
- memcpy (GST_BUFFER_DATA (outbuf), payload, payload_len);
+ GST_LOG_OBJECT (depayload, "Frame complete");
- gst_adapter_push (rtph263pdepay->adapter, outbuf);
+ avail = gst_adapter_available (rtph263pdepay->adapter);
+ len = avail + payload_len;
+ padlen = (len % 4) + 4;
+
+ outbuf = gst_buffer_new_and_alloc (len + padlen);
+
+ gst_buffer_map (outbuf, &map, GST_MAP_WRITE);
+ memset (map.data + len, 0, padlen);
+
+ /* prepend previous data */
+ if (avail > 0) {
+ gst_adapter_copy (rtph263pdepay->adapter, map.data, 0, avail);
+ gst_adapter_flush (rtph263pdepay->adapter, avail);
}
+ memcpy (map.data + avail, payload, payload_len);
+ gst_buffer_unmap (outbuf, &map);
+ gst_rtp_buffer_unmap (&rtp);
+
+ return outbuf;
+
+ } else {
+ /* frame not completed: store in adapter */
+ outbuf = gst_buffer_new_and_alloc (payload_len);
+
+ GST_LOG_OBJECT (depayload, "Frame incomplete, storing %d", payload_len);
+ gst_buffer_fill (outbuf, 0, payload, payload_len);
+
+ gst_adapter_push (rtph263pdepay->adapter, outbuf);
+ gst_rtp_buffer_unmap (&rtp);
}
return NULL;
{
GST_ELEMENT_WARNING (rtph263pdepay, STREAM, DECODE,
("Packet payload was too small"), (NULL));
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
waiting_start:
{
GST_DEBUG_OBJECT (rtph263pdepay, "waiting for picture start");
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
}
#include <gst/gst.h>
#include <gst/base/gstadapter.h>
-#include <gst/rtp/gstbasertpdepayload.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
G_BEGIN_DECLS
struct _GstRtpH263PDepay
{
- GstBaseRTPDepayload depayload;
+ GstRTPBaseDepayload depayload;
GstAdapter *adapter;
gboolean wait_start;
struct _GstRtpH263PDepayClass
{
- GstBaseRTPDepayloadClass parent_class;
+ GstRTPBaseDepayloadClass parent_class;
};
GType gst_rtp_h263p_depay_get_type (void);
static void gst_rtp_h263p_pay_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
-static gboolean gst_rtp_h263p_pay_setcaps (GstBaseRTPPayload * payload,
+static gboolean gst_rtp_h263p_pay_setcaps (GstRTPBasePayload * payload,
GstCaps * caps);
-static GstCaps *gst_rtp_h263p_pay_sink_getcaps (GstBaseRTPPayload * payload,
- GstPad * pad);
-static GstFlowReturn gst_rtp_h263p_pay_handle_buffer (GstBaseRTPPayload *
+static GstCaps *gst_rtp_h263p_pay_sink_getcaps (GstRTPBasePayload * payload,
+ GstPad * pad, GstCaps * filter);
+static GstFlowReturn gst_rtp_h263p_pay_handle_buffer (GstRTPBasePayload *
payload, GstBuffer * buffer);
-GST_BOILERPLATE (GstRtpH263PPay, gst_rtp_h263p_pay, GstBaseRTPPayload,
- GST_TYPE_BASE_RTP_PAYLOAD);
-
-static void
-gst_rtp_h263p_pay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_h263p_pay_src_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_h263p_pay_sink_template);
-
- gst_element_class_set_details_simple (element_class, "RTP H263 payloader",
- "Codec/Payloader/Network/RTP",
- "Payload-encodes H263/+/++ video in RTP packets (RFC 4629)",
- "Wim Taymans <wim.taymans@gmail.com>");
-}
+#define gst_rtp_h263p_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpH263PPay, gst_rtp_h263p_pay, GST_TYPE_RTP_BASE_PAYLOAD);
static void
gst_rtp_h263p_pay_class_init (GstRtpH263PPayClass * klass)
{
GObjectClass *gobject_class;
- GstBaseRTPPayloadClass *gstbasertppayload_class;
+ GstElementClass *gstelement_class;
+ GstRTPBasePayloadClass *gstrtpbasepayload_class;
gobject_class = (GObjectClass *) klass;
- gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasepayload_class = (GstRTPBasePayloadClass *) klass;
gobject_class->finalize = gst_rtp_h263p_pay_finalize;
gobject_class->set_property = gst_rtp_h263p_pay_set_property;
gobject_class->get_property = gst_rtp_h263p_pay_get_property;
- gstbasertppayload_class->set_caps = gst_rtp_h263p_pay_setcaps;
- gstbasertppayload_class->get_caps = gst_rtp_h263p_pay_sink_getcaps;
- gstbasertppayload_class->handle_buffer = gst_rtp_h263p_pay_handle_buffer;
+ gstrtpbasepayload_class->set_caps = gst_rtp_h263p_pay_setcaps;
+ gstrtpbasepayload_class->get_caps = gst_rtp_h263p_pay_sink_getcaps;
+ gstrtpbasepayload_class->handle_buffer = gst_rtp_h263p_pay_handle_buffer;
g_object_class_install_property (G_OBJECT_CLASS (klass),
PROP_FRAGMENTATION_MODE, g_param_spec_enum ("fragmentation-mode",
DEFAULT_FRAGMENTATION_MODE,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_h263p_pay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_h263p_pay_sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class, "RTP H263 payloader",
+ "Codec/Payloader/Network/RTP",
+ "Payload-encodes H263/+/++ video in RTP packets (RFC 4629)",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
GST_DEBUG_CATEGORY_INIT (rtph263ppay_debug, "rtph263ppay",
0, "rtph263ppay (RFC 4629)");
}
static void
-gst_rtp_h263p_pay_init (GstRtpH263PPay * rtph263ppay,
- GstRtpH263PPayClass * klass)
+gst_rtp_h263p_pay_init (GstRtpH263PPay * rtph263ppay)
{
rtph263ppay->adapter = gst_adapter_new ();
}
static gboolean
-gst_rtp_h263p_pay_setcaps (GstBaseRTPPayload * payload, GstCaps * caps)
+gst_rtp_h263p_pay_setcaps (GstRTPBasePayload * payload, GstCaps * caps)
{
gboolean res;
GstCaps *peercaps;
g_return_val_if_fail (gst_caps_is_fixed (caps), FALSE);
- peercaps = gst_pad_peer_get_caps (GST_BASE_RTP_PAYLOAD_SRCPAD (payload));
+ peercaps =
+ gst_pad_peer_query_caps (GST_RTP_BASE_PAYLOAD_SRCPAD (payload), NULL);
if (peercaps) {
GstCaps *intersect = gst_caps_intersect (peercaps,
- gst_pad_get_pad_template_caps (GST_BASE_RTP_PAYLOAD_SRCPAD (payload)));
+ gst_pad_get_pad_template_caps (GST_RTP_BASE_PAYLOAD_SRCPAD (payload)));
gst_caps_unref (peercaps);
if (!gst_caps_is_empty (intersect)) {
if (!encoding_name)
encoding_name = g_strdup ("H263-1998");
- gst_basertppayload_set_options (payload, "video", TRUE,
+ gst_rtp_base_payload_set_options (payload, "video", TRUE,
(gchar *) encoding_name, 90000);
- res = gst_basertppayload_set_outcaps (payload, NULL);
+ res = gst_rtp_base_payload_set_outcaps (payload, NULL);
g_free (encoding_name);
return res;
static GstCaps *
-gst_rtp_h263p_pay_sink_getcaps (GstBaseRTPPayload * payload, GstPad * pad)
+gst_rtp_h263p_pay_sink_getcaps (GstRTPBasePayload * payload, GstPad * pad,
+ GstCaps * filter)
{
GstRtpH263PPay *rtph263ppay;
GstCaps *caps = NULL;
rtph263ppay = GST_RTP_H263P_PAY (payload);
- peercaps = gst_pad_peer_get_caps (GST_BASE_RTP_PAYLOAD_SRCPAD (payload));
+ peercaps =
+ gst_pad_peer_query_caps (GST_RTP_BASE_PAYLOAD_SRCPAD (payload), filter);
if (!peercaps)
return
gst_caps_copy (gst_pad_get_pad_template_caps
- (GST_BASE_RTP_PAYLOAD_SINKPAD (payload)));
+ (GST_RTP_BASE_PAYLOAD_SINKPAD (payload)));
intersect = gst_caps_intersect (peercaps,
- gst_pad_get_pad_template_caps (GST_BASE_RTP_PAYLOAD_SRCPAD (payload)));
+ gst_pad_get_pad_template_caps (GST_RTP_BASE_PAYLOAD_SRCPAD (payload)));
gst_caps_unref (peercaps);
if (gst_caps_is_empty (intersect))
gint header_len;
guint next_gop = 0;
gboolean found_gob = FALSE;
+ GstRTPBuffer rtp = { NULL };
if (rtph263ppay->fragmentation_mode == GST_FRAGMENTATION_MODE_SYNC) {
/* start after 1st gop possible */
guint parsed_len = 3;
const guint8 *parse_data = NULL;
- parse_data = gst_adapter_peek (rtph263ppay->adapter, avail);
+ parse_data = gst_adapter_map (rtph263ppay->adapter, avail);
/* Check if we have a gob or eos , eossbs */
/* FIXME EOS and EOSSBS packets should never contain any gobs and vice-versa */
}
parsed_len++;
}
+ gst_adapter_unmap (rtph263ppay->adapter);
}
/* for picture start frames (non-fragmented), we need to remove the first
header_len = (fragmented && !found_gob) ? 2 : 0;
towrite = MIN (avail, gst_rtp_buffer_calc_payload_len
- (GST_BASE_RTP_PAYLOAD_MTU (rtph263ppay) - header_len, 0, 0));
+ (GST_RTP_BASE_PAYLOAD_MTU (rtph263ppay) - header_len, 0, 0));
if (next_gop > 0)
towrite = MIN (next_gop, towrite);
payload_len = header_len + towrite;
outbuf = gst_rtp_buffer_new_allocate (payload_len, 0, 0);
+
+ gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
/* last fragment gets the marker bit set */
- gst_rtp_buffer_set_marker (outbuf, avail > towrite ? 0 : 1);
+ gst_rtp_buffer_set_marker (&rtp, avail > towrite ? 0 : 1);
- payload = gst_rtp_buffer_get_payload (outbuf);
+ payload = gst_rtp_buffer_get_payload (&rtp);
gst_adapter_copy (rtph263ppay->adapter, &payload[header_len], 0, towrite);
GST_BUFFER_TIMESTAMP (outbuf) = rtph263ppay->first_timestamp;
GST_BUFFER_DURATION (outbuf) = rtph263ppay->first_duration;
+ gst_rtp_buffer_unmap (&rtp);
gst_adapter_flush (rtph263ppay->adapter, towrite);
- ret = gst_basertppayload_push (GST_BASE_RTP_PAYLOAD (rtph263ppay), outbuf);
+ ret =
+ gst_rtp_base_payload_push (GST_RTP_BASE_PAYLOAD (rtph263ppay), outbuf);
avail -= towrite;
fragmented = TRUE;
}
static GstFlowReturn
-gst_rtp_h263p_pay_handle_buffer (GstBaseRTPPayload * payload,
+gst_rtp_h263p_pay_handle_buffer (GstRTPBasePayload * payload,
GstBuffer * buffer)
{
GstRtpH263PPay *rtph263ppay;
#define __GST_RTP_H263P_PAY_H__
#include <gst/gst.h>
-#include <gst/rtp/gstbasertppayload.h>
+#include <gst/rtp/gstrtpbasepayload.h>
#include <gst/base/gstadapter.h>
G_BEGIN_DECLS
struct _GstRtpH263PPay
{
- GstBaseRTPPayload payload;
+ GstRTPBasePayload payload;
GstAdapter *adapter;
GstClockTime first_timestamp;
struct _GstRtpH263PPayClass
{
- GstBaseRTPPayloadClass parent_class;
+ GstRTPBasePayloadClass parent_class;
};
GType gst_rtp_h263p_pay_get_type (void);
/* "max-rcmd-nalu-size = (string) ANY " */
);
-GST_BOILERPLATE (GstRtpH264Depay, gst_rtp_h264_depay, GstBaseRTPDepayload,
- GST_TYPE_BASE_RTP_DEPAYLOAD);
+#define gst_rtp_h264_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpH264Depay, gst_rtp_h264_depay,
+ GST_TYPE_RTP_BASE_DEPAYLOAD);
static void gst_rtp_h264_depay_finalize (GObject * object);
static void gst_rtp_h264_depay_set_property (GObject * object, guint prop_id,
static GstStateChangeReturn gst_rtp_h264_depay_change_state (GstElement *
element, GstStateChange transition);
-static GstBuffer *gst_rtp_h264_depay_process (GstBaseRTPDepayload * depayload,
+static GstBuffer *gst_rtp_h264_depay_process (GstRTPBaseDepayload * depayload,
GstBuffer * buf);
-static gboolean gst_rtp_h264_depay_setcaps (GstBaseRTPDepayload * filter,
+static gboolean gst_rtp_h264_depay_setcaps (GstRTPBaseDepayload * filter,
GstCaps * caps);
-static gboolean gst_rtp_h264_depay_handle_event (GstBaseRTPDepayload * depay,
+static gboolean gst_rtp_h264_depay_handle_event (GstRTPBaseDepayload * depay,
GstEvent * event);
static void
-gst_rtp_h264_depay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_h264_depay_src_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_h264_depay_sink_template);
-
- gst_element_class_set_details_simple (element_class, "RTP H264 depayloader",
- "Codec/Depayloader/Network/RTP",
- "Extracts H264 video from RTP packets (RFC 3984)",
- "Wim Taymans <wim.taymans@gmail.com>");
-}
-
-static void
gst_rtp_h264_depay_class_init (GstRtpH264DepayClass * klass)
{
GObjectClass *gobject_class;
GstElementClass *gstelement_class;
- GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
- gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
+ gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
gobject_class->finalize = gst_rtp_h264_depay_finalize;
"Merge NALU into AU (picture) (deprecated; use caps)",
DEFAULT_ACCESS_UNIT, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_h264_depay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_h264_depay_sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP H264 depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts H264 video from RTP packets (RFC 3984)",
+ "Wim Taymans <wim.taymans@gmail.com>");
gstelement_class->change_state = gst_rtp_h264_depay_change_state;
- gstbasertpdepayload_class->process = gst_rtp_h264_depay_process;
- gstbasertpdepayload_class->set_caps = gst_rtp_h264_depay_setcaps;
- gstbasertpdepayload_class->handle_event = gst_rtp_h264_depay_handle_event;
+ gstrtpbasedepayload_class->process = gst_rtp_h264_depay_process;
+ gstrtpbasedepayload_class->set_caps = gst_rtp_h264_depay_setcaps;
+ gstrtpbasedepayload_class->handle_event = gst_rtp_h264_depay_handle_event;
GST_DEBUG_CATEGORY_INIT (rtph264depay_debug, "rtph264depay", 0,
"H264 Video RTP Depayloader");
}
static void
-gst_rtp_h264_depay_init (GstRtpH264Depay * rtph264depay,
- GstRtpH264DepayClass * klass)
+gst_rtp_h264_depay_init (GstRtpH264Depay * rtph264depay)
{
rtph264depay->adapter = gst_adapter_new ();
rtph264depay->picture_adapter = gst_adapter_new ();
gint merge = -1;
caps =
- gst_pad_get_allowed_caps (GST_BASE_RTP_DEPAYLOAD_SRCPAD (rtph264depay));
+ gst_pad_get_allowed_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (rtph264depay));
GST_DEBUG_OBJECT (rtph264depay, "allowed caps: %" GST_PTR_FORMAT, caps);
}
static gboolean
-gst_rtp_h264_depay_setcaps (GstBaseRTPDepayload * depayload, GstCaps * caps)
+gst_rtp_h264_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
{
GstCaps *srccaps;
gint clock_rate;
GstRtpH264Depay *rtph264depay;
const gchar *ps, *profile;
GstBuffer *codec_data;
- guint8 *b64;
+ GstMapInfo map;
+ guint8 *ptr;
gboolean res;
rtph264depay = GST_RTP_H264_DEPAY (depayload);
clock_rate = 90000;
depayload->clock_rate = clock_rate;
- srccaps = gst_caps_new_simple ("video/x-h264", NULL);
+ srccaps = gst_caps_new_empty_simple ("video/x-h264");
/* Base64 encoded, comma separated config NALs */
ps = gst_structure_get_string (structure, "sprop-parameter-sets");
}
/* we seriously overshoot the length, but it's fine. */
codec_data = gst_buffer_new_and_alloc (len);
- b64 = GST_BUFFER_DATA (codec_data);
+
+ gst_buffer_map (codec_data, &map, GST_MAP_WRITE);
+ ptr = map.data;
total = 0;
for (i = 0; params[i]; i++) {
guint save = 0;
gint state = 0;
GST_DEBUG_OBJECT (depayload, "decoding param %d (%s)", i, params[i]);
- memcpy (b64, sync_bytes, sizeof (sync_bytes));
- b64 += sizeof (sync_bytes);
+ memcpy (ptr, sync_bytes, sizeof (sync_bytes));
+ ptr += sizeof (sync_bytes);
len =
- g_base64_decode_step (params[i], strlen (params[i]), b64, &state,
+ g_base64_decode_step (params[i], strlen (params[i]), ptr, &state,
&save);
GST_DEBUG_OBJECT (depayload, "decoded %d bytes", len);
total += len + sizeof (sync_bytes);
- b64 += len;
+ ptr += len;
}
- GST_BUFFER_SIZE (codec_data) = total;
+ gst_buffer_unmap (codec_data, &map);
+ gst_buffer_resize (codec_data, 0, total);
g_strfreev (params);
/* keep the codec_data, we need to send it as the first buffer. We cannot
guint8 **sps, **pps;
guint len, num_sps, num_pps;
gint i;
- guint8 *data;
if (ps == NULL)
goto incomplete_caps;
}
codec_data = gst_buffer_new_and_alloc (len);
- data = GST_BUFFER_DATA (codec_data);
+
+ gst_buffer_map (codec_data, &map, GST_MAP_WRITE);
+ ptr = map.data;
/* 8 bits version == 1 */
- *data++ = 1;
+ *ptr++ = 1;
if (profile) {
guint32 profile_id;
/* hex: AVCProfileIndication:8 | profile_compat:8 | AVCLevelIndication:8 */
sscanf (profile, "%6x", &profile_id);
- *data++ = (profile_id >> 16) & 0xff;
- *data++ = (profile_id >> 8) & 0xff;
- *data++ = profile_id & 0xff;
+ *ptr++ = (profile_id >> 16) & 0xff;
+ *ptr++ = (profile_id >> 8) & 0xff;
+ *ptr++ = profile_id & 0xff;
} else {
/* extract from SPS */
- *data++ = sps[0][3];
- *data++ = sps[0][4];
- *data++ = sps[0][5];
+ *ptr++ = sps[0][3];
+ *ptr++ = sps[0][4];
+ *ptr++ = sps[0][5];
}
/* 6 bits reserved | 2 bits lengthSizeMinusOn */
- *data++ = 0xff;
+ *ptr++ = 0xff;
/* 3 bits reserved | 5 bits numOfSequenceParameterSets */
- *data++ = 0xe0 | (num_sps & 0x1f);
+ *ptr++ = 0xe0 | (num_sps & 0x1f);
/* copy all SPS */
for (i = 0; sps[i]; i++) {
len = ((sps[i][0] << 8) | sps[i][1]) + 2;
GST_DEBUG_OBJECT (depayload, "copy SPS %d of length %d", i, len);
- memcpy (data, sps[i], len);
+ memcpy (ptr, sps[i], len);
g_free (sps[i]);
- data += len;
+ ptr += len;
}
g_free (sps);
/* 8 bits numOfPictureParameterSets */
- *data++ = num_pps;
+ *ptr++ = num_pps;
/* copy all PPS */
for (i = 0; pps[i]; i++) {
len = ((pps[i][0] << 8) | pps[i][1]) + 2;
GST_DEBUG_OBJECT (depayload, "copy PPS %d of length %d", i, len);
- memcpy (data, pps[i], len);
+ memcpy (ptr, pps[i], len);
g_free (pps[i]);
- data += len;
+ ptr += len;
}
g_free (pps);
- GST_BUFFER_SIZE (codec_data) = data - GST_BUFFER_DATA (codec_data);
+ gst_buffer_resize (codec_data, 0, ptr - map.data);
+ gst_buffer_unmap (codec_data, &map);
gst_caps_set_simple (srccaps,
"codec_data", GST_TYPE_BUFFER, codec_data, NULL);
gst_rtp_h264_depay_handle_nal (GstRtpH264Depay * rtph264depay, GstBuffer * nal,
GstClockTime in_timestamp, gboolean marker)
{
- GstBaseRTPDepayload *depayload = GST_BASE_RTP_DEPAYLOAD (rtph264depay);
+ GstRTPBaseDepayload *depayload = GST_RTP_BASE_DEPAYLOAD (rtph264depay);
gint nal_type;
- guint size;
- guint8 *data;
+ GstMapInfo map;
GstBuffer *outbuf = NULL;
GstClockTime out_timestamp;
gboolean keyframe, out_keyframe;
- size = GST_BUFFER_SIZE (nal);
- if (G_UNLIKELY (size < 5))
+ gst_buffer_map (nal, &map, GST_MAP_READ);
+ if (G_UNLIKELY (map.size < 5))
goto short_nal;
- data = GST_BUFFER_DATA (nal);
-
- nal_type = data[4] & 0x1f;
+ nal_type = map.data[4] & 0x1f;
GST_DEBUG_OBJECT (rtph264depay, "handle NAL type %d", nal_type);
keyframe = NAL_TYPE_IS_KEY (nal_type);
if (nal_type == 1 || nal_type == 2 || nal_type == 5) {
/* we have a picture start */
start = TRUE;
- if (data[5] & 0x80) {
+ if (map.data[5] & 0x80) {
/* first_mb_in_slice == 0 completes a picture */
complete = TRUE;
}
GST_DEBUG_OBJECT (depayload, "using NAL as output");
outbuf = nal;
}
+ gst_buffer_unmap (nal, &map);
if (outbuf) {
/* prepend codec_data */
rtph264depay->codec_data = NULL;
out_keyframe = TRUE;
}
- outbuf = gst_buffer_make_metadata_writable (outbuf);
+ outbuf = gst_buffer_make_writable (outbuf);
GST_BUFFER_TIMESTAMP (outbuf) = out_timestamp;
GST_BUFFER_FLAG_UNSET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT);
else
GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT);
-
- gst_buffer_set_caps (outbuf, GST_PAD_CAPS (depayload->srcpad));
}
return outbuf;
short_nal:
{
GST_WARNING_OBJECT (depayload, "dropping short NAL");
+ gst_buffer_unmap (nal, &map);
gst_buffer_unref (nal);
return NULL;
}
gboolean send)
{
guint outsize;
- guint8 *outdata;
+ GstMapInfo map;
GstBuffer *outbuf;
outsize = gst_adapter_available (rtph264depay->adapter);
outbuf = gst_adapter_take_buffer (rtph264depay->adapter, outsize);
- outdata = GST_BUFFER_DATA (outbuf);
+ gst_buffer_map (outbuf, &map, GST_MAP_WRITE);
GST_DEBUG_OBJECT (rtph264depay, "output %d bytes", outsize);
if (rtph264depay->byte_stream) {
- memcpy (outdata, sync_bytes, sizeof (sync_bytes));
+ memcpy (map.data, sync_bytes, sizeof (sync_bytes));
} else {
outsize -= 4;
- outdata[0] = (outsize >> 24);
- outdata[1] = (outsize >> 16);
- outdata[2] = (outsize >> 8);
- outdata[3] = (outsize);
+ map.data[0] = (outsize >> 24);
+ map.data[1] = (outsize >> 16);
+ map.data[2] = (outsize >> 8);
+ map.data[3] = (outsize);
}
+ gst_buffer_unmap (outbuf, &map);
rtph264depay->current_fu_type = 0;
outbuf = gst_rtp_h264_depay_handle_nal (rtph264depay, outbuf,
rtph264depay->fu_timestamp, rtph264depay->fu_marker);
if (outbuf)
- gst_base_rtp_depayload_push (GST_BASE_RTP_DEPAYLOAD (rtph264depay),
+ gst_rtp_base_depayload_push (GST_RTP_BASE_DEPAYLOAD (rtph264depay),
outbuf);
return NULL;
} else {
}
static GstBuffer *
-gst_rtp_h264_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
+gst_rtp_h264_depay_process (GstRTPBaseDepayload * depayload, GstBuffer * buf)
{
GstRtpH264Depay *rtph264depay;
GstBuffer *outbuf = NULL;
guint8 nal_unit_type;
+ GstRTPBuffer rtp = { NULL };
rtph264depay = GST_RTP_H264_DEPAY (depayload);
guint8 *payload;
guint header_len;
guint8 nal_ref_idc;
- guint8 *outdata;
+ GstMapInfo map;
guint outsize, nalu_size;
GstClockTime timestamp;
gboolean marker;
timestamp = GST_BUFFER_TIMESTAMP (buf);
- payload_len = gst_rtp_buffer_get_payload_len (buf);
- payload = gst_rtp_buffer_get_payload (buf);
- marker = gst_rtp_buffer_get_marker (buf);
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
+
+ payload_len = gst_rtp_buffer_get_payload_len (&rtp);
+ payload = gst_rtp_buffer_get_payload (&rtp);
+ marker = gst_rtp_buffer_get_marker (&rtp);
GST_DEBUG_OBJECT (rtph264depay, "receiving %d bytes", payload_len);
outsize = nalu_size + sizeof (sync_bytes);
outbuf = gst_buffer_new_and_alloc (outsize);
- outdata = GST_BUFFER_DATA (outbuf);
+
+ gst_buffer_map (outbuf, &map, GST_MAP_WRITE);
if (rtph264depay->byte_stream) {
- memcpy (outdata, sync_bytes, sizeof (sync_bytes));
+ memcpy (map.data, sync_bytes, sizeof (sync_bytes));
} else {
- outdata[0] = outdata[1] = 0;
- outdata[2] = payload[0];
- outdata[3] = payload[1];
+ map.data[0] = map.data[1] = 0;
+ map.data[2] = payload[0];
+ map.data[3] = payload[1];
}
/* strip NALU size */
payload += 2;
payload_len -= 2;
- outdata += sizeof (sync_bytes);
- memcpy (outdata, payload, nalu_size);
+ memcpy (map.data + sizeof (sync_bytes), payload, nalu_size);
+ gst_buffer_unmap (outbuf, &map);
gst_adapter_push (rtph264depay->adapter, outbuf);
nalu_size = payload_len;
outsize = nalu_size + sizeof (sync_bytes);
outbuf = gst_buffer_new_and_alloc (outsize);
- outdata = GST_BUFFER_DATA (outbuf);
- outdata += sizeof (sync_bytes);
- memcpy (outdata, payload, nalu_size);
- outdata[0] = nal_header;
+
+ gst_buffer_map (outbuf, &map, GST_MAP_WRITE);
+ memcpy (map.data + sizeof (sync_bytes), payload, nalu_size);
+ map.data[sizeof (sync_bytes)] = nal_header;
+ gst_buffer_unmap (outbuf, &map);
GST_DEBUG_OBJECT (rtph264depay, "queueing %d bytes", outsize);
outsize = payload_len;
outbuf = gst_buffer_new_and_alloc (outsize);
- outdata = GST_BUFFER_DATA (outbuf);
- memcpy (outdata, payload, outsize);
+ gst_buffer_fill (outbuf, 0, payload, outsize);
GST_DEBUG_OBJECT (rtph264depay, "queueing %d bytes", outsize);
nalu_size = payload_len;
outsize = nalu_size + sizeof (sync_bytes);
outbuf = gst_buffer_new_and_alloc (outsize);
- outdata = GST_BUFFER_DATA (outbuf);
+
+ gst_buffer_map (outbuf, &map, GST_MAP_WRITE);
if (rtph264depay->byte_stream) {
- memcpy (outdata, sync_bytes, sizeof (sync_bytes));
+ memcpy (map.data, sync_bytes, sizeof (sync_bytes));
} else {
- outdata[0] = outdata[1] = 0;
- outdata[2] = nalu_size >> 8;
- outdata[3] = nalu_size & 0xff;
+ map.data[0] = map.data[1] = 0;
+ map.data[2] = nalu_size >> 8;
+ map.data[3] = nalu_size & 0xff;
}
- outdata += sizeof (sync_bytes);
- memcpy (outdata, payload, nalu_size);
+ memcpy (map.data + sizeof (sync_bytes), payload, nalu_size);
+ gst_buffer_unmap (outbuf, &map);
outbuf = gst_rtp_h264_depay_handle_nal (rtph264depay, outbuf, timestamp,
marker);
break;
}
}
+ gst_rtp_buffer_unmap (&rtp);
}
return outbuf;
{
GST_ELEMENT_WARNING (rtph264depay, STREAM, DECODE,
(NULL), ("Undefined packet type"));
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
waiting_start:
{
GST_DEBUG_OBJECT (rtph264depay, "waiting for start");
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
not_implemented:
{
GST_ELEMENT_ERROR (rtph264depay, STREAM, FORMAT,
(NULL), ("NAL unit type %d not supported yet", nal_unit_type));
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
}
static gboolean
-gst_rtp_h264_depay_handle_event (GstBaseRTPDepayload * depay, GstEvent * event)
+gst_rtp_h264_depay_handle_event (GstRTPBaseDepayload * depay, GstEvent * event)
{
GstRtpH264Depay *rtph264depay;
}
return
- GST_BASE_RTP_DEPAYLOAD_CLASS (parent_class)->handle_event (depay, event);
+ GST_RTP_BASE_DEPAYLOAD_CLASS (parent_class)->handle_event (depay, event);
}
static GstStateChangeReturn
#include <gst/gst.h>
#include <gst/base/gstadapter.h>
-#include <gst/rtp/gstbasertpdepayload.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
G_BEGIN_DECLS
struct _GstRtpH264Depay
{
- GstBaseRTPDepayload depayload;
+ GstRTPBaseDepayload depayload;
gboolean byte_stream;
struct _GstRtpH264DepayClass
{
- GstBaseRTPDepayloadClass parent_class;
+ GstRTPBaseDepayloadClass parent_class;
};
GType gst_rtp_h264_depay_get_type (void);
static void gst_rtp_h264_pay_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
-static GstCaps *gst_rtp_h264_pay_getcaps (GstBaseRTPPayload * payload,
- GstPad * pad);
-static gboolean gst_rtp_h264_pay_setcaps (GstBaseRTPPayload * basepayload,
+static GstCaps *gst_rtp_h264_pay_getcaps (GstRTPBasePayload * payload,
+ GstPad * pad, GstCaps * filter);
+static gboolean gst_rtp_h264_pay_setcaps (GstRTPBasePayload * basepayload,
GstCaps * caps);
-static GstFlowReturn gst_rtp_h264_pay_handle_buffer (GstBaseRTPPayload * pad,
+static GstFlowReturn gst_rtp_h264_pay_handle_buffer (GstRTPBasePayload * pad,
GstBuffer * buffer);
-static gboolean gst_rtp_h264_pay_handle_event (GstPad * pad, GstEvent * event);
-static GstStateChangeReturn gst_basertppayload_change_state (GstElement *
+static gboolean gst_rtp_h264_pay_sink_event (GstRTPBasePayload * payload,
+ GstEvent * event);
+static GstStateChangeReturn gst_rtp_h264_pay_change_state (GstElement *
element, GstStateChange transition);
-GST_BOILERPLATE (GstRtpH264Pay, gst_rtp_h264_pay, GstBaseRTPPayload,
- GST_TYPE_BASE_RTP_PAYLOAD);
-
-static void
-gst_rtp_h264_pay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_h264_pay_src_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_h264_pay_sink_template);
-
- gst_element_class_set_details_simple (element_class, "RTP H264 payloader",
- "Codec/Payloader/Network/RTP",
- "Payload-encode H264 video into RTP packets (RFC 3984)",
- "Laurent Glayal <spglegle@yahoo.fr>");
-}
+#define gst_rtp_h264_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpH264Pay, gst_rtp_h264_pay, GST_TYPE_RTP_BASE_PAYLOAD);
static void
gst_rtp_h264_pay_class_init (GstRtpH264PayClass * klass)
{
GObjectClass *gobject_class;
GstElementClass *gstelement_class;
- GstBaseRTPPayloadClass *gstbasertppayload_class;
+ GstRTPBasePayloadClass *gstrtpbasepayload_class;
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
- gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
+ gstrtpbasepayload_class = (GstRTPBasePayloadClass *) klass;
gobject_class->set_property = gst_rtp_h264_pay_set_property;
gobject_class->get_property = gst_rtp_h264_pay_get_property;
gobject_class->finalize = gst_rtp_h264_pay_finalize;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_h264_pay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_h264_pay_sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class, "RTP H264 payloader",
+ "Codec/Payloader/Network/RTP",
+ "Payload-encode H264 video into RTP packets (RFC 3984)",
+ "Laurent Glayal <spglegle@yahoo.fr>");
+
gstelement_class->change_state =
- GST_DEBUG_FUNCPTR (gst_basertppayload_change_state);
+ GST_DEBUG_FUNCPTR (gst_rtp_h264_pay_change_state);
- gstbasertppayload_class->get_caps = gst_rtp_h264_pay_getcaps;
- gstbasertppayload_class->set_caps = gst_rtp_h264_pay_setcaps;
- gstbasertppayload_class->handle_buffer = gst_rtp_h264_pay_handle_buffer;
- gstbasertppayload_class->handle_event = gst_rtp_h264_pay_handle_event;
+ gstrtpbasepayload_class->get_caps = gst_rtp_h264_pay_getcaps;
+ gstrtpbasepayload_class->set_caps = gst_rtp_h264_pay_setcaps;
+ gstrtpbasepayload_class->handle_buffer = gst_rtp_h264_pay_handle_buffer;
+ gstrtpbasepayload_class->sink_event = gst_rtp_h264_pay_sink_event;
GST_DEBUG_CATEGORY_INIT (rtph264pay_debug, "rtph264pay", 0,
"H264 RTP Payloader");
}
static void
-gst_rtp_h264_pay_init (GstRtpH264Pay * rtph264pay, GstRtpH264PayClass * klass)
+gst_rtp_h264_pay_init (GstRtpH264Pay * rtph264pay)
{
rtph264pay->queue = g_array_new (FALSE, FALSE, sizeof (guint));
rtph264pay->profile = 0;
};
static GstCaps *
-gst_rtp_h264_pay_getcaps (GstBaseRTPPayload * payload, GstPad * pad)
+gst_rtp_h264_pay_getcaps (GstRTPBasePayload * payload, GstPad * pad,
+ GstCaps * filter)
{
GstCaps *template_caps;
GstCaps *allowed_caps;
guint i;
allowed_caps =
- gst_pad_peer_get_caps_reffed (GST_BASE_RTP_PAYLOAD_SRCPAD (payload));
+ gst_pad_peer_query_caps (GST_RTP_BASE_PAYLOAD_SRCPAD (payload), filter);
if (allowed_caps == NULL)
return NULL;
for (i = 0; i < gst_caps_get_size (allowed_caps); i++) {
GstStructure *s = gst_caps_get_structure (allowed_caps, i);
- GstStructure *new_s = gst_structure_new ("video/x-h264", NULL);
+ GstStructure *new_s = gst_structure_new_empty ("video/x-h264");
const gchar *profile_level_id;
profile_level_id = gst_structure_get_string (s, "profile-level-id");
/* take the currently configured SPS and PPS lists and set them on the caps as
* sprop-parameter-sets */
static gboolean
-gst_rtp_h264_pay_set_sps_pps (GstBaseRTPPayload * basepayload)
+gst_rtp_h264_pay_set_sps_pps (GstRTPBasePayload * basepayload)
{
GstRtpH264Pay *payloader = GST_RTP_H264_PAY (basepayload);
gchar *profile;
GString *sprops;
guint count;
gboolean res;
+ GstMapInfo map;
sprops = g_string_new ("");
count = 0;
for (walk = payloader->sps; walk; walk = g_list_next (walk)) {
GstBuffer *sps_buf = GST_BUFFER_CAST (walk->data);
- set =
- g_base64_encode (GST_BUFFER_DATA (sps_buf), GST_BUFFER_SIZE (sps_buf));
+ gst_buffer_map (sps_buf, &map, GST_MAP_READ);
+ set = g_base64_encode (map.data, map.size);
+ gst_buffer_unmap (sps_buf, &map);
+
g_string_append_printf (sprops, "%s%s", count ? "," : "", set);
g_free (set);
count++;
for (walk = payloader->pps; walk; walk = g_list_next (walk)) {
GstBuffer *pps_buf = GST_BUFFER_CAST (walk->data);
- set =
- g_base64_encode (GST_BUFFER_DATA (pps_buf), GST_BUFFER_SIZE (pps_buf));
+ gst_buffer_map (pps_buf, &map, GST_MAP_READ);
+ set = g_base64_encode (map.data, map.size);
+ gst_buffer_unmap (pps_buf, &map);
+
g_string_append_printf (sprops, "%s%s", count ? "," : "", set);
g_free (set);
count++;
/* profile is 24 bit. Force it to respect the limit */
profile = g_strdup_printf ("%06x", payloader->profile & 0xffffff);
/* combine into output caps */
- res = gst_basertppayload_set_outcaps (basepayload,
+ res = gst_rtp_base_payload_set_outcaps (basepayload,
"sprop-parameter-sets", G_TYPE_STRING, sprops->str, NULL);
g_string_free (sprops, TRUE);
g_free (profile);
}
static gboolean
-gst_rtp_h264_pay_setcaps (GstBaseRTPPayload * basepayload, GstCaps * caps)
+gst_rtp_h264_pay_setcaps (GstRTPBasePayload * basepayload, GstCaps * caps)
{
GstRtpH264Pay *rtph264pay;
GstStructure *str;
const GValue *value;
+ GstMapInfo map;
guint8 *data;
- guint size;
+ gsize size;
+ GstBuffer *buffer;
const gchar *alignment;
rtph264pay = GST_RTP_H264_PAY (basepayload);
/* we can only set the output caps when we found the sprops and profile
* NALs */
- gst_basertppayload_set_options (basepayload, "video", TRUE, "H264", 90000);
+ gst_rtp_base_payload_set_options (basepayload, "video", TRUE, "H264", 90000);
alignment = gst_structure_get_string (str, "alignment");
if (alignment && !strcmp (alignment, "au"))
/* packetized AVC video has a codec_data */
if ((value = gst_structure_get_value (str, "codec_data"))) {
- GstBuffer *buffer;
guint num_sps, num_pps;
gint i, nal_size;
rtph264pay->packetized = TRUE;
buffer = gst_value_get_buffer (value);
- data = GST_BUFFER_DATA (buffer);
- size = GST_BUFFER_SIZE (buffer);
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ data = map.data;
+ size = map.size;
/* parse the avcC data */
if (size < 7)
/* make a buffer out of it and add to SPS list */
sps_buf = gst_buffer_new_and_alloc (nal_size);
- memcpy (GST_BUFFER_DATA (sps_buf), data, nal_size);
+ gst_buffer_fill (sps_buf, 0, data, nal_size);
rtph264pay->sps = g_list_append (rtph264pay->sps, sps_buf);
data += nal_size;
/* make a buffer out of it and add to PPS list */
pps_buf = gst_buffer_new_and_alloc (nal_size);
- memcpy (GST_BUFFER_DATA (pps_buf), data, nal_size);
+ gst_buffer_fill (pps_buf, 0, data, nal_size);
rtph264pay->pps = g_list_append (rtph264pay->pps, pps_buf);
data += nal_size;
size -= nal_size;
}
+ gst_buffer_unmap (buffer, &map);
/* and update the caps with the collected data */
if (!gst_rtp_h264_pay_set_sps_pps (basepayload))
- return FALSE;
+ goto set_sps_pps_failed;
} else {
GST_DEBUG_OBJECT (rtph264pay, "have bytestream h264");
rtph264pay->packetized = FALSE;
avcc_too_small:
{
- GST_ERROR_OBJECT (rtph264pay, "avcC size %u < 7", size);
- return FALSE;
+ GST_ERROR_OBJECT (rtph264pay, "avcC size %" G_GSIZE_FORMAT " < 7", size);
+ goto error;
}
wrong_version:
{
GST_ERROR_OBJECT (rtph264pay, "wrong avcC version");
- return FALSE;
+ goto error;
}
avcc_error:
{
GST_ERROR_OBJECT (rtph264pay, "avcC too small ");
+ goto error;
+ }
+set_sps_pps_failed:
+ {
+ GST_ERROR_OBJECT (rtph264pay, "failed to set sps/pps");
+ goto error;
+ }
+error:
+ {
+ gst_buffer_unmap (buffer, &map);
return FALSE;
}
}
for (i = 0; params[i]; i++) {
gsize nal_len;
+ GstMapInfo map;
guint8 *nalp;
guint save = 0;
gint state = 0;
+ guint8 nal_type;
nal_len = strlen (params[i]);
buf = gst_buffer_new_and_alloc (nal_len);
- nalp = GST_BUFFER_DATA (buf);
+ gst_buffer_map (buf, &map, GST_MAP_WRITE);
+ nalp = map.data;
nal_len = g_base64_decode_step (params[i], nal_len, nalp, &state, &save);
- GST_BUFFER_SIZE (buf) = nal_len;
+ nal_type = nalp[0];
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_resize (buf, 0, nal_len);
if (!nal_len) {
gst_buffer_unref (buf);
}
/* append to the right list */
- if ((nalp[0] & 0x1f) == 7) {
+ if ((nal_type & 0x1f) == 7) {
GST_DEBUG_OBJECT (rtph264pay, "adding param %d as SPS %d", i, num_sps);
rtph264pay->sps = g_list_append (rtph264pay->sps, buf);
num_sps++;
if (payloader->sps != NULL) {
sps_buf = GST_BUFFER_CAST (payloader->sps->data);
- if ((GST_BUFFER_SIZE (sps_buf) != sps_len)
- || memcmp (GST_BUFFER_DATA (sps_buf), sps, sps_len)) {
+ if (gst_buffer_memcmp (sps_buf, 0, sps, sps_len)) {
/* something changed, update */
payloader->profile = (sps[1] << 16) + (sps[2] << 8) + sps[3];
GST_DEBUG ("Profile level IDC = %06x", payloader->profile);
if (updated) {
sps_buf = gst_buffer_new_and_alloc (sps_len);
- memcpy (GST_BUFFER_DATA (sps_buf), sps, sps_len);
+ gst_buffer_fill (sps_buf, 0, sps, sps_len);
if (payloader->sps) {
/* replace old buffer */
if (payloader->pps != NULL) {
pps_buf = GST_BUFFER_CAST (payloader->pps->data);
- if ((GST_BUFFER_SIZE (pps_buf) != pps_len)
- || memcmp (GST_BUFFER_DATA (pps_buf), pps, pps_len)) {
+ if (gst_buffer_memcmp (pps_buf, 0, pps, pps_len)) {
/* something changed, update */
updated = TRUE;
}
if (updated) {
pps_buf = gst_buffer_new_and_alloc (pps_len);
- memcpy (GST_BUFFER_DATA (pps_buf), pps, pps_len);
+ gst_buffer_fill (pps_buf, 0, pps, pps_len);
if (payloader->pps) {
/* replace old buffer */
}
static GstFlowReturn
-gst_rtp_h264_pay_payload_nal (GstBaseRTPPayload * basepayload,
+gst_rtp_h264_pay_payload_nal (GstRTPBasePayload * basepayload,
const guint8 * data, guint size, GstClockTime timestamp,
GstBuffer * buffer_orig, gboolean end_of_au);
static GstFlowReturn
-gst_rtp_h264_pay_send_sps_pps (GstBaseRTPPayload * basepayload,
+gst_rtp_h264_pay_send_sps_pps (GstRTPBasePayload * basepayload,
GstRtpH264Pay * rtph264pay, GstClockTime timestamp)
{
GstFlowReturn ret = GST_FLOW_OK;
GList *walk;
+ GstMapInfo map;
for (walk = rtph264pay->sps; walk; walk = g_list_next (walk)) {
GstBuffer *sps_buf = GST_BUFFER_CAST (walk->data);
GST_DEBUG_OBJECT (rtph264pay, "inserting SPS in the stream");
/* resend SPS */
+ gst_buffer_map (sps_buf, &map, GST_MAP_READ);
ret = gst_rtp_h264_pay_payload_nal (basepayload,
- GST_BUFFER_DATA (sps_buf), GST_BUFFER_SIZE (sps_buf), timestamp,
- sps_buf, FALSE);
+ map.data, map.size, timestamp, sps_buf, FALSE);
+ gst_buffer_unmap (sps_buf, &map);
/* Not critical here; but throw a warning */
if (ret != GST_FLOW_OK)
GST_WARNING ("Problem pushing SPS");
GST_DEBUG_OBJECT (rtph264pay, "inserting PPS in the stream");
/* resend PPS */
+ gst_buffer_map (pps_buf, &map, GST_MAP_READ);
ret = gst_rtp_h264_pay_payload_nal (basepayload,
- GST_BUFFER_DATA (pps_buf), GST_BUFFER_SIZE (pps_buf), timestamp,
- pps_buf, FALSE);
+ map.data, map.size, timestamp, pps_buf, FALSE);
+ gst_buffer_unmap (pps_buf, &map);
/* Not critical here; but throw a warning */
if (ret != GST_FLOW_OK)
GST_WARNING ("Problem pushing PPS");
}
static GstFlowReturn
-gst_rtp_h264_pay_payload_nal (GstBaseRTPPayload * basepayload,
+gst_rtp_h264_pay_payload_nal (GstRTPBasePayload * basepayload,
const guint8 * data, guint size, GstClockTime timestamp,
GstBuffer * buffer_orig, gboolean end_of_au)
{
guint packet_len, payload_len, mtu;
GstBuffer *outbuf;
guint8 *payload;
+#if 0
GstBufferList *list = NULL;
- GstBufferListIterator *it = NULL;
+#endif
gboolean send_spspps;
+ GstRTPBuffer rtp = { NULL };
rtph264pay = GST_RTP_H264_PAY (basepayload);
- mtu = GST_BASE_RTP_PAYLOAD_MTU (rtph264pay);
+ mtu = GST_RTP_BASE_PAYLOAD_MTU (rtph264pay);
nalType = data[0] & 0x1f;
GST_DEBUG_OBJECT (rtph264pay, "Processing Buffer with NAL TYPE=%d", nalType);
"NAL Unit fit in one packet datasize=%d mtu=%d", size, mtu);
/* will fit in one packet */
+#if 0
if (rtph264pay->buffer_list) {
/* use buffer lists
* first create buffer without payload containing only the RTP header
* and then another buffer containing the payload. both buffers will
* be then added to the list */
outbuf = gst_rtp_buffer_new_allocate (0, 0, 0);
- } else {
+ } else
+#endif
+ {
/* use the old-fashioned way with a single buffer and memcpy */
outbuf = gst_rtp_buffer_new_allocate (size, 0, 0);
}
+ gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
+
/* only set the marker bit on packets containing access units */
if (IS_ACCESS_UNIT (nalType) && end_of_au) {
- gst_rtp_buffer_set_marker (outbuf, 1);
+ gst_rtp_buffer_set_marker (&rtp, 1);
}
/* timestamp the outbuffer */
GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
+#if 0
if (rtph264pay->buffer_list) {
GstBuffer *paybuf;
GST_BUFFER_DATA (buffer_orig), size);
else {
paybuf = gst_buffer_new_and_alloc (size);
- memcpy (GST_BUFFER_DATA (paybuf), data, size);
+ gst_buffer_fill (paybuf, 0, data, size);
}
list = gst_buffer_list_new ();
- it = gst_buffer_list_iterate (list);
/* add both buffers to the buffer list */
- gst_buffer_list_iterator_add_group (it);
- gst_buffer_list_iterator_add (it, outbuf);
- gst_buffer_list_iterator_add (it, paybuf);
-
- gst_buffer_list_iterator_free (it);
+ gst_buffer_list_add (list, outbuf);
+ gst_buffer_list_add (list, paybuf);
/* push the list to the next element in the pipe */
- ret = gst_basertppayload_push_list (basepayload, list);
- } else {
- payload = gst_rtp_buffer_get_payload (outbuf);
+ ret = gst_rtp_base_payload_push_list (basepayload, list);
+ } else
+#endif
+ {
+ payload = gst_rtp_buffer_get_payload (&rtp);
GST_DEBUG_OBJECT (basepayload, "Copying %d bytes to outbuf", size);
memcpy (payload, data, size);
+ gst_rtp_buffer_unmap (&rtp);
- ret = gst_basertppayload_push (basepayload, outbuf);
+ ret = gst_rtp_base_payload_push (basepayload, outbuf);
}
} else {
/* fragmentation Units FU-A */
/* We keep 2 bytes for FU indicator and FU Header */
payload_len = gst_rtp_buffer_calc_payload_len (mtu - 2, 0, 0);
+#if 0
if (rtph264pay->buffer_list) {
list = gst_buffer_list_new ();
it = gst_buffer_list_iterate (list);
}
+#endif
while (end == 0) {
limitedSize = size < payload_len ? size : payload_len;
"Inside FU-A fragmentation limitedSize=%d iteration=%d", limitedSize,
ii);
+#if 0
if (rtph264pay->buffer_list) {
/* use buffer lists
* first create buffer without payload containing only the RTP header
* and then another buffer containing the payload. both buffers will
* be then added to the list */
outbuf = gst_rtp_buffer_new_allocate (2, 0, 0);
- } else {
+ } else
+#endif
+ {
/* use the old-fashioned way with a single buffer and memcpy
* first create buffer to hold the payload */
outbuf = gst_rtp_buffer_new_allocate (limitedSize + 2, 0, 0);
}
+ gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
+
GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
- payload = gst_rtp_buffer_get_payload (outbuf);
+ payload = gst_rtp_buffer_get_payload (&rtp);
if (limitedSize == size) {
GST_DEBUG_OBJECT (basepayload, "end size=%d iteration=%d", size, ii);
end = 1;
}
if (IS_ACCESS_UNIT (nalType)) {
- gst_rtp_buffer_set_marker (outbuf, end && end_of_au);
+ gst_rtp_buffer_set_marker (&rtp, end && end_of_au);
}
/* FU indicator */
/* FU Header */
payload[1] = (start << 7) | (end << 6) | (nalHeader & 0x1f);
+#if 0
if (rtph264pay->buffer_list) {
GstBuffer *paybuf;
gst_buffer_list_iterator_add (it, outbuf);
gst_buffer_list_iterator_add (it, paybuf);
- } else {
+ } else
+#endif
+ {
memcpy (&payload[2], data + pos, limitedSize);
+ gst_rtp_buffer_unmap (&rtp);
GST_DEBUG_OBJECT (basepayload,
"recorded %d payload bytes into packet iteration=%d",
limitedSize + 2, ii);
- ret = gst_basertppayload_push (basepayload, outbuf);
+ ret = gst_rtp_base_payload_push (basepayload, outbuf);
if (ret != GST_FLOW_OK)
break;
}
start = 0;
}
+#if 0
if (rtph264pay->buffer_list) {
/* free iterator and push the whole buffer list at once */
gst_buffer_list_iterator_free (it);
- ret = gst_basertppayload_push_list (basepayload, list);
+ ret = gst_rtp_base_payload_push_list (basepayload, list);
}
+#endif
}
return ret;
}
static GstFlowReturn
-gst_rtp_h264_pay_handle_buffer (GstBaseRTPPayload * basepayload,
+gst_rtp_h264_pay_handle_buffer (GstRTPBasePayload * basepayload,
GstBuffer * buffer)
{
GstRtpH264Pay *rtph264pay;
GstFlowReturn ret;
- guint size, nal_len, i;
+ gsize size;
+ guint nal_len, i;
+ GstMapInfo map;
const guint8 *data, *nal_data;
GstClockTime timestamp;
GArray *nal_queue;
guint pushed = 0;
+ gboolean bytestream;
rtph264pay = GST_RTP_H264_PAY (basepayload);
/* the input buffer contains one or more NAL units */
- if (rtph264pay->scan_mode == GST_H264_SCAN_MODE_BYTESTREAM) {
+ bytestream = (rtph264pay->scan_mode == GST_H264_SCAN_MODE_BYTESTREAM);
+
+ if (bytestream) {
timestamp = gst_adapter_prev_timestamp (rtph264pay->adapter, NULL);
gst_adapter_push (rtph264pay->adapter, buffer);
size = gst_adapter_available (rtph264pay->adapter);
- data = gst_adapter_peek (rtph264pay->adapter, size);
- GST_DEBUG_OBJECT (basepayload, "got %d bytes (%d)", size,
- GST_BUFFER_SIZE (buffer));
+ data = gst_adapter_map (rtph264pay->adapter, size);
+ GST_DEBUG_OBJECT (basepayload,
+ "got %" G_GSIZE_FORMAT " bytes (%" G_GSIZE_FORMAT ")", size,
+ gst_buffer_get_size (buffer));
if (!GST_CLOCK_TIME_IS_VALID (timestamp))
timestamp = GST_BUFFER_TIMESTAMP (buffer);
} else {
- size = GST_BUFFER_SIZE (buffer);
- data = GST_BUFFER_DATA (buffer);
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ data = map.data;
+ size = map.size;
timestamp = GST_BUFFER_TIMESTAMP (buffer);
- GST_DEBUG_OBJECT (basepayload, "got %d bytes", size);
+ GST_DEBUG_OBJECT (basepayload, "got %" G_GSIZE_FORMAT " bytes", size);
}
ret = GST_FLOW_OK;
/* array must be empty when we get here */
g_assert (nal_queue->len == 0);
- GST_DEBUG_OBJECT (basepayload, "found first start at %u, bytes left %u",
- next, size);
+ GST_DEBUG_OBJECT (basepayload,
+ "found first start at %u, bytes left %" G_GSIZE_FORMAT, next, size);
/* first pass to locate NALs and parse SPS/PPS */
while (size > 4) {
if (rtph264pay->sprop_parameter_sets != NULL) {
/* explicitly set profile and sprop, use those */
if (rtph264pay->update_caps) {
- if (!gst_basertppayload_set_outcaps (basepayload,
+ if (!gst_rtp_base_payload_set_outcaps (basepayload,
"sprop-parameter-sets", G_TYPE_STRING,
rtph264pay->sprop_parameter_sets, NULL))
goto caps_rejected;
g_array_set_size (nal_queue, 0);
}
- if (rtph264pay->scan_mode == GST_H264_SCAN_MODE_BYTESTREAM)
+done:
+ if (bytestream) {
+ gst_adapter_unmap (rtph264pay->adapter);
gst_adapter_flush (rtph264pay->adapter, pushed);
- else
+ } else {
+ gst_buffer_unmap (buffer, &map);
gst_buffer_unref (buffer);
+ }
return ret;
{
GST_WARNING_OBJECT (basepayload, "Could not set outcaps");
g_array_set_size (nal_queue, 0);
- gst_buffer_unref (buffer);
- return GST_FLOW_NOT_NEGOTIATED;
+ ret = GST_FLOW_NOT_NEGOTIATED;
+ goto done;
}
}
static gboolean
-gst_rtp_h264_pay_handle_event (GstPad * pad, GstEvent * event)
+gst_rtp_h264_pay_sink_event (GstRTPBasePayload * payload, GstEvent * event)
{
+ gboolean res;
const GstStructure *s;
- GstRtpH264Pay *rtph264pay = GST_RTP_H264_PAY (GST_PAD_PARENT (pad));
+ GstRtpH264Pay *rtph264pay = GST_RTP_H264_PAY (payload);
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_FLUSH_STOP:
break;
}
- return FALSE;
+ res = GST_RTP_BASE_PAYLOAD_CLASS (parent_class)->sink_event (payload, event);
+
+ return res;
}
static GstStateChangeReturn
-gst_basertppayload_change_state (GstElement * element,
- GstStateChange transition)
+gst_rtp_h264_pay_change_state (GstElement * element, GstStateChange transition)
{
GstStateChangeReturn ret;
GstRtpH264Pay *rtph264pay = GST_RTP_H264_PAY (element);
#include <gst/gst.h>
#include <gst/base/gstadapter.h>
-#include <gst/rtp/gstbasertppayload.h>
+#include <gst/rtp/gstrtpbasepayload.h>
G_BEGIN_DECLS
struct _GstRtpH264Pay
{
- GstBaseRTPPayload payload;
+ GstRTPBasePayload payload;
guint profile;
GList *sps, *pps;
struct _GstRtpH264PayClass
{
- GstBaseRTPPayloadClass parent_class;
+ GstRTPBasePayloadClass parent_class;
};
GType gst_rtp_h264_pay_get_type (void);
static void gst_ilbc_depay_get_property (GObject * object,
guint prop_id, GValue * value, GParamSpec * pspec);
-static GstBuffer *gst_rtp_ilbc_depay_process (GstBaseRTPDepayload * depayload,
+static GstBuffer *gst_rtp_ilbc_depay_process (GstRTPBaseDepayload * depayload,
GstBuffer * buf);
-static gboolean gst_rtp_ilbc_depay_setcaps (GstBaseRTPDepayload * depayload,
+static gboolean gst_rtp_ilbc_depay_setcaps (GstRTPBaseDepayload * depayload,
GstCaps * caps);
-GST_BOILERPLATE (GstRTPiLBCDepay, gst_rtp_ilbc_depay, GstBaseRTPDepayload,
- GST_TYPE_BASE_RTP_DEPAYLOAD);
+#define gst_rtp_ilbc_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRTPiLBCDepay, gst_rtp_ilbc_depay,
+ GST_TYPE_RTP_BASE_DEPAYLOAD);
#define GST_TYPE_ILBC_MODE (gst_ilbc_mode_get_type())
static GType
}
static void
-gst_rtp_ilbc_depay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_ilbc_depay_src_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_ilbc_depay_sink_template);
- gst_element_class_set_details_simple (element_class, "RTP iLBC depayloader",
- "Codec/Depayloader/Network/RTP",
- "Extracts iLBC audio from RTP packets (RFC 3952)",
- "Philippe Kalaf <philippe.kalaf@collabora.co.uk>");
-}
-
-static void
gst_rtp_ilbc_depay_class_init (GstRTPiLBCDepayClass * klass)
{
GObjectClass *gobject_class;
- GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
+ GstElementClass *gstelement_class;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
gobject_class = (GObjectClass *) klass;
- gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
gobject_class->set_property = gst_ilbc_depay_set_property;
gobject_class->get_property = gst_ilbc_depay_get_property;
GST_TYPE_ILBC_MODE, DEFAULT_MODE,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
- gstbasertpdepayload_class->process = gst_rtp_ilbc_depay_process;
- gstbasertpdepayload_class->set_caps = gst_rtp_ilbc_depay_setcaps;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_ilbc_depay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_ilbc_depay_sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP iLBC depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts iLBC audio from RTP packets (RFC 3952)",
+ "Philippe Kalaf <philippe.kalaf@collabora.co.uk>");
+
+ gstrtpbasedepayload_class->process = gst_rtp_ilbc_depay_process;
+ gstrtpbasedepayload_class->set_caps = gst_rtp_ilbc_depay_setcaps;
}
static void
-gst_rtp_ilbc_depay_init (GstRTPiLBCDepay * rtpilbcdepay,
- GstRTPiLBCDepayClass * klass)
+gst_rtp_ilbc_depay_init (GstRTPiLBCDepay * rtpilbcdepay)
{
/* Set default mode */
rtpilbcdepay->mode = DEFAULT_MODE;
}
static gboolean
-gst_rtp_ilbc_depay_setcaps (GstBaseRTPDepayload * depayload, GstCaps * caps)
+gst_rtp_ilbc_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
{
GstRTPiLBCDepay *rtpilbcdepay = GST_RTP_ILBC_DEPAY (depayload);
GstCaps *srccaps;
srccaps = gst_caps_new_simple ("audio/x-iLBC",
"mode", G_TYPE_INT, rtpilbcdepay->mode, NULL);
- ret = gst_pad_set_caps (GST_BASE_RTP_DEPAYLOAD_SRCPAD (depayload), srccaps);
+ ret = gst_pad_set_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload), srccaps);
GST_DEBUG ("set caps on source: %" GST_PTR_FORMAT " (ret=%d)", srccaps, ret);
gst_caps_unref (srccaps);
}
static GstBuffer *
-gst_rtp_ilbc_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
+gst_rtp_ilbc_depay_process (GstRTPBaseDepayload * depayload, GstBuffer * buf)
{
GstBuffer *outbuf;
gboolean marker;
+ GstRTPBuffer rtp = { NULL };
+
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
+
+ marker = gst_rtp_buffer_get_marker (&rtp);
- marker = gst_rtp_buffer_get_marker (buf);
+ GST_DEBUG ("process : got %" G_GSIZE_FORMAT " bytes, mark %d ts %u seqn %d",
+ gst_buffer_get_size (buf), marker,
+ gst_rtp_buffer_get_timestamp (&rtp), gst_rtp_buffer_get_seq (&rtp));
- GST_DEBUG ("process : got %d bytes, mark %d ts %u seqn %d",
- GST_BUFFER_SIZE (buf), marker,
- gst_rtp_buffer_get_timestamp (buf), gst_rtp_buffer_get_seq (buf));
+ outbuf = gst_rtp_buffer_get_payload_buffer (&rtp);
- outbuf = gst_rtp_buffer_get_payload_buffer (buf);
+ gst_rtp_buffer_unmap (&rtp);
if (marker && outbuf) {
/* mark start of talkspurt with DISCONT */
#define __GST_RTP_ILBC_DEPAY_H__
#include <gst/gst.h>
-#include <gst/rtp/gstbasertpdepayload.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
G_BEGIN_DECLS
struct _GstRTPiLBCDepay
{
- GstBaseRTPDepayload depayload;
+ GstRTPBaseDepayload depayload;
GstiLBCMode mode;
};
struct _GstRTPiLBCDepayClass
{
- GstBaseRTPDepayloadClass parent_class;
+ GstRTPBaseDepayloadClass parent_class;
};
GType gst_rtp_ilbc_depay_get_type (void);
);
-static GstCaps *gst_rtp_ilbc_pay_sink_getcaps (GstBaseRTPPayload * payload,
- GstPad * pad);
-static gboolean gst_rtp_ilbc_pay_sink_setcaps (GstBaseRTPPayload * payload,
+static GstCaps *gst_rtp_ilbc_pay_sink_getcaps (GstRTPBasePayload * payload,
+ GstPad * pad, GstCaps * filter);
+static gboolean gst_rtp_ilbc_pay_sink_setcaps (GstRTPBasePayload * payload,
GstCaps * caps);
-GST_BOILERPLATE (GstRTPILBCPay, gst_rtp_ilbc_pay, GstBaseRTPAudioPayload,
- GST_TYPE_BASE_RTP_AUDIO_PAYLOAD);
+#define gst_rtp_ilbc_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRTPILBCPay, gst_rtp_ilbc_pay,
+ GST_TYPE_RTP_BASE_AUDIO_PAYLOAD);
static void
-gst_rtp_ilbc_pay_base_init (gpointer klass)
+gst_rtp_ilbc_pay_class_init (GstRTPILBCPayClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstElementClass *gstelement_class;
+ GstRTPBasePayloadClass *gstrtpbasepayload_class;
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_ilbc_pay_sink_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_ilbc_pay_src_template);
- gst_element_class_set_details_simple (element_class, "RTP iLBC Payloader",
- "Codec/Payloader/Network/RTP",
- "Packetize iLBC audio streams into RTP packets",
- "Philippe Kalaf <philippe.kalaf@collabora.co.uk>");
-}
+ GST_DEBUG_CATEGORY_INIT (rtpilbcpay_debug, "rtpilbcpay", 0,
+ "iLBC audio RTP payloader");
-static void
-gst_rtp_ilbc_pay_class_init (GstRTPILBCPayClass * klass)
-{
- GstBaseRTPPayloadClass *gstbasertppayload_class;
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasepayload_class = (GstRTPBasePayloadClass *) klass;
- gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_ilbc_pay_sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_ilbc_pay_src_template));
- gstbasertppayload_class->set_caps = gst_rtp_ilbc_pay_sink_setcaps;
- gstbasertppayload_class->get_caps = gst_rtp_ilbc_pay_sink_getcaps;
+ gst_element_class_set_details_simple (gstelement_class, "RTP iLBC Payloader",
+ "Codec/Payloader/Network/RTP",
+ "Packetize iLBC audio streams into RTP packets",
+ "Philippe Kalaf <philippe.kalaf@collabora.co.uk>");
- GST_DEBUG_CATEGORY_INIT (rtpilbcpay_debug, "rtpilbcpay", 0,
- "iLBC audio RTP payloader");
+ gstrtpbasepayload_class->set_caps = gst_rtp_ilbc_pay_sink_setcaps;
+ gstrtpbasepayload_class->get_caps = gst_rtp_ilbc_pay_sink_getcaps;
}
static void
-gst_rtp_ilbc_pay_init (GstRTPILBCPay * rtpilbcpay, GstRTPILBCPayClass * klass)
+gst_rtp_ilbc_pay_init (GstRTPILBCPay * rtpilbcpay)
{
- GstBaseRTPPayload *basertppayload;
- GstBaseRTPAudioPayload *basertpaudiopayload;
+ GstRTPBasePayload *rtpbasepayload;
+ GstRTPBaseAudioPayload *rtpbaseaudiopayload;
- basertppayload = GST_BASE_RTP_PAYLOAD (rtpilbcpay);
- basertpaudiopayload = GST_BASE_RTP_AUDIO_PAYLOAD (rtpilbcpay);
+ rtpbasepayload = GST_RTP_BASE_PAYLOAD (rtpilbcpay);
+ rtpbaseaudiopayload = GST_RTP_BASE_AUDIO_PAYLOAD (rtpilbcpay);
/* we don't set the payload type, it should be set by the application using
* the pt property or the default 96 will be used */
- basertppayload->clock_rate = 8000;
+ rtpbasepayload->clock_rate = 8000;
rtpilbcpay->mode = -1;
- /* tell basertpaudiopayload that this is a frame based codec */
- gst_base_rtp_audio_payload_set_frame_based (basertpaudiopayload);
+ /* tell rtpbaseaudiopayload that this is a frame based codec */
+ gst_rtp_base_audio_payload_set_frame_based (rtpbaseaudiopayload);
}
static gboolean
-gst_rtp_ilbc_pay_sink_setcaps (GstBaseRTPPayload * basertppayload,
+gst_rtp_ilbc_pay_sink_setcaps (GstRTPBasePayload * rtpbasepayload,
GstCaps * caps)
{
GstRTPILBCPay *rtpilbcpay;
- GstBaseRTPAudioPayload *basertpaudiopayload;
+ GstRTPBaseAudioPayload *rtpbaseaudiopayload;
gboolean ret;
gint mode;
gchar *mode_str;
GstStructure *structure;
const char *payload_name;
- rtpilbcpay = GST_RTP_ILBC_PAY (basertppayload);
- basertpaudiopayload = GST_BASE_RTP_AUDIO_PAYLOAD (basertppayload);
+ rtpilbcpay = GST_RTP_ILBC_PAY (rtpbasepayload);
+ rtpbaseaudiopayload = GST_RTP_BASE_AUDIO_PAYLOAD (rtpbasepayload);
structure = gst_caps_get_structure (caps, 0);
if (mode != 20 && mode != 30)
goto wrong_mode;
- gst_basertppayload_set_options (basertppayload, "audio", TRUE, "ILBC", 8000);
+ gst_rtp_base_payload_set_options (rtpbasepayload, "audio", TRUE, "ILBC",
+ 8000);
/* set options for this frame based audio codec */
- gst_base_rtp_audio_payload_set_frame_options (basertpaudiopayload,
+ gst_rtp_base_audio_payload_set_frame_options (rtpbaseaudiopayload,
mode, mode == 30 ? 50 : 38);
mode_str = g_strdup_printf ("%d", mode);
ret =
- gst_basertppayload_set_outcaps (basertppayload, "mode", G_TYPE_STRING,
+ gst_rtp_base_payload_set_outcaps (rtpbasepayload, "mode", G_TYPE_STRING,
mode_str, NULL);
g_free (mode_str);
/* we return the padtemplate caps with the mode field fixated to a value if we
* can */
static GstCaps *
-gst_rtp_ilbc_pay_sink_getcaps (GstBaseRTPPayload * rtppayload, GstPad * pad)
+gst_rtp_ilbc_pay_sink_getcaps (GstRTPBasePayload * rtppayload, GstPad * pad,
+ GstCaps * filter)
{
GstCaps *otherpadcaps;
GstCaps *caps;
#define __GST_RTP_ILBC_PAY_H__
#include <gst/gst.h>
-#include <gst/rtp/gstbasertpaudiopayload.h>
+#include <gst/rtp/gstrtpbaseaudiopayload.h>
G_BEGIN_DECLS
struct _GstRTPILBCPay
{
- GstBaseRTPAudioPayload audiopayload;
+ GstRTPBaseAudioPayload audiopayload;
gint mode;
};
struct _GstRTPILBCPayClass
{
- GstBaseRTPAudioPayloadClass parent_class;
+ GstRTPBaseAudioPayloadClass parent_class;
};
GType gst_rtp_ilbc_pay_get_type (void);
PROP_LAST
};
-GST_BOILERPLATE (GstRtpJ2KDepay, gst_rtp_j2k_depay, GstBaseRTPDepayload,
- GST_TYPE_BASE_RTP_DEPAYLOAD);
+#define gst_rtp_j2k_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpJ2KDepay, gst_rtp_j2k_depay, GST_TYPE_RTP_BASE_DEPAYLOAD);
static void gst_rtp_j2k_depay_finalize (GObject * object);
gst_rtp_j2k_depay_change_state (GstElement * element,
GstStateChange transition);
-static gboolean gst_rtp_j2k_depay_setcaps (GstBaseRTPDepayload * depayload,
+static gboolean gst_rtp_j2k_depay_setcaps (GstRTPBaseDepayload * depayload,
GstCaps * caps);
-static GstBuffer *gst_rtp_j2k_depay_process (GstBaseRTPDepayload * depayload,
+static GstBuffer *gst_rtp_j2k_depay_process (GstRTPBaseDepayload * depayload,
GstBuffer * buf);
static void
-gst_rtp_j2k_depay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_j2k_depay_src_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_j2k_depay_sink_template);
-
- gst_element_class_set_details_simple (element_class,
- "RTP JPEG 2000 depayloader", "Codec/Depayloader/Network/RTP",
- "Extracts JPEG 2000 video from RTP packets (RFC 5371)",
- "Wim Taymans <wim.taymans@gmail.com>");
-}
-
-static void
gst_rtp_j2k_depay_class_init (GstRtpJ2KDepayClass * klass)
{
GObjectClass *gobject_class;
GstElementClass *gstelement_class;
- GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
- gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
+ gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
gobject_class->finalize = gst_rtp_j2k_depay_finalize;
"Use Buffer Lists",
DEFAULT_BUFFER_LIST, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_j2k_depay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_j2k_depay_sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP JPEG 2000 depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts JPEG 2000 video from RTP packets (RFC 5371)",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
gstelement_class->change_state = gst_rtp_j2k_depay_change_state;
- gstbasertpdepayload_class->set_caps = gst_rtp_j2k_depay_setcaps;
- gstbasertpdepayload_class->process = gst_rtp_j2k_depay_process;
+ gstrtpbasedepayload_class->set_caps = gst_rtp_j2k_depay_setcaps;
+ gstrtpbasedepayload_class->process = gst_rtp_j2k_depay_process;
GST_DEBUG_CATEGORY_INIT (rtpj2kdepay_debug, "rtpj2kdepay", 0,
"J2K Video RTP Depayloader");
}
static void
-gst_rtp_j2k_depay_init (GstRtpJ2KDepay * rtpj2kdepay,
- GstRtpJ2KDepayClass * klass)
+gst_rtp_j2k_depay_init (GstRtpJ2KDepay * rtpj2kdepay)
{
rtpj2kdepay->buffer_list = DEFAULT_BUFFER_LIST;
}
static gboolean
-gst_rtp_j2k_depay_setcaps (GstBaseRTPDepayload * depayload, GstCaps * caps)
+gst_rtp_j2k_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
{
GstStructure *structure;
gint clock_rate;
outcaps =
gst_caps_new_simple ("image/x-jpc", "framerate", GST_TYPE_FRACTION, 0, 1,
- "fields", G_TYPE_INT, 1, "fourcc", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('s',
- 'Y', 'U', 'V'), NULL);
+ "fields", G_TYPE_INT, 1, "colorspace", G_TYPE_STRING, "sYUV", NULL);
res = gst_pad_set_caps (depayload->srcpad, outcaps);
gst_caps_unref (outcaps);
}
static GstFlowReturn
-gst_rtp_j2k_depay_flush_pu (GstBaseRTPDepayload * depayload)
+gst_rtp_j2k_depay_flush_pu (GstRTPBaseDepayload * depayload)
{
GstRtpJ2KDepay *rtpj2kdepay;
GstBuffer *mheader;
/* append packets */
for (walk = packets; walk; walk = g_list_next (walk)) {
GstBuffer *buf = GST_BUFFER_CAST (walk->data);
- GST_DEBUG_OBJECT (rtpj2kdepay, "append pu packet of size %u",
- GST_BUFFER_SIZE (buf));
+ GST_DEBUG_OBJECT (rtpj2kdepay,
+ "append pu packet of size %" G_GSIZE_FORMAT,
+ gst_buffer_get_size (buf));
gst_adapter_push (rtpj2kdepay->t_adapter, buf);
}
g_list_free (packets);
}
static GstFlowReturn
-gst_rtp_j2k_depay_flush_tile (GstBaseRTPDepayload * depayload)
+gst_rtp_j2k_depay_flush_tile (GstRTPBaseDepayload * depayload)
{
GstRtpJ2KDepay *rtpj2kdepay;
guint avail, mh_id;
GList *packets, *walk;
guint8 end[2];
GstFlowReturn ret = GST_FLOW_OK;
+ GstMapInfo map;
+ GstBuffer *buf;
rtpj2kdepay = GST_RTP_J2K_DEPAY (depayload);
/* now append the tile packets to the frame */
packets = gst_adapter_take_list (rtpj2kdepay->t_adapter, avail);
for (walk = packets; walk; walk = g_list_next (walk)) {
- GstBuffer *buf = GST_BUFFER_CAST (walk->data);
+ buf = GST_BUFFER_CAST (walk->data);
if (walk == packets) {
- guint8 *data;
- guint size;
-
/* first buffer should contain the SOT */
- data = GST_BUFFER_DATA (buf);
- size = GST_BUFFER_SIZE (buf);
+ gst_buffer_map (buf, &map, GST_MAP_READ);
- if (size < 12)
+ if (map.size < 12)
goto invalid_tile;
- if (data[0] == 0xff && data[1] == J2K_MARKER_SOT) {
+ if (map.data[0] == 0xff && map.data[1] == J2K_MARKER_SOT) {
guint Psot, nPsot;
if (end[0] == 0xff && end[1] == J2K_MARKER_EOC)
else
nPsot = avail;
- Psot = GST_READ_UINT32_BE (&data[6]);
+ Psot = GST_READ_UINT32_BE (&map.data[6]);
if (Psot != nPsot && Psot != 0) {
/* Psot must match the size of the tile */
GST_DEBUG_OBJECT (rtpj2kdepay, "set Psot from %u to %u", Psot, nPsot);
+ gst_buffer_unmap (buf, &map);
+
buf = gst_buffer_make_writable (buf);
- data = GST_BUFFER_DATA (buf);
- GST_WRITE_UINT32_BE (&data[6], nPsot);
+
+ gst_buffer_map (buf, &map, GST_MAP_WRITE);
+ GST_WRITE_UINT32_BE (&map.data[6], nPsot);
}
}
+ gst_buffer_unmap (buf, &map);
}
- GST_DEBUG_OBJECT (rtpj2kdepay, "append pu packet of size %u",
- GST_BUFFER_SIZE (buf));
+ GST_DEBUG_OBJECT (rtpj2kdepay, "append pu packet of size %" G_GSIZE_FORMAT,
+ gst_buffer_get_size (buf));
gst_adapter_push (rtpj2kdepay->f_adapter, buf);
}
g_list_free (packets);
invalid_tile:
{
GST_ELEMENT_WARNING (rtpj2kdepay, STREAM, DECODE, ("Invalid tile"), (NULL));
+ gst_buffer_unmap (buf, &map);
gst_adapter_clear (rtpj2kdepay->t_adapter);
rtpj2kdepay->last_tile = -1;
return ret;
}
static GstFlowReturn
-gst_rtp_j2k_depay_flush_frame (GstBaseRTPDepayload * depayload)
+gst_rtp_j2k_depay_flush_frame (GstRTPBaseDepayload * depayload)
{
GstRtpJ2KDepay *rtpj2kdepay;
guint8 end[2];
- guint8 *data;
guint avail;
GstFlowReturn ret = GST_FLOW_OK;
gst_adapter_copy (rtpj2kdepay->f_adapter, end, avail - 2, 2);
if (end[0] != 0xff && end[1] != 0xd9) {
+ end[0] = 0xff;
+ end[1] = 0xd9;
+
GST_DEBUG_OBJECT (rtpj2kdepay, "no EOC marker, adding one");
/* no EOI marker, add one */
outbuf = gst_buffer_new_and_alloc (2);
- data = GST_BUFFER_DATA (outbuf);
- data[0] = 0xff;
- data[1] = 0xd9;
+ gst_buffer_fill (outbuf, 0, end, 2);
gst_adapter_push (rtpj2kdepay->f_adapter, outbuf);
avail += 2;
}
-
+#if 0
if (rtpj2kdepay->buffer_list) {
GList *list;
GstBufferList *buflist;
gst_buffer_list_iterator_add_list (it, list);
gst_buffer_list_iterator_free (it);
- ret = gst_base_rtp_depayload_push_list (depayload, buflist);
- } else {
+ ret = gst_rtp_base_depayload_push_list (depayload, buflist);
+ } else
+#endif
+ {
GST_DEBUG_OBJECT (rtpj2kdepay, "pushing buffer of %u bytes", avail);
outbuf = gst_adapter_take_buffer (rtpj2kdepay->f_adapter, avail);
- ret = gst_base_rtp_depayload_push (depayload, outbuf);
+ ret = gst_rtp_base_depayload_push (depayload, outbuf);
}
} else {
GST_WARNING_OBJECT (rtpj2kdepay, "empty packet");
}
static GstBuffer *
-gst_rtp_j2k_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
+gst_rtp_j2k_depay_process (GstRTPBaseDepayload * depayload, GstBuffer * buf)
{
GstRtpJ2KDepay *rtpj2kdepay;
guint8 *payload;
guint MHF, mh_id, frag_offset, tile, payload_len, j2klen;
gint gap;
guint32 rtptime;
+ GstRTPBuffer rtp = { NULL };
rtpj2kdepay = GST_RTP_J2K_DEPAY (depayload);
- payload = gst_rtp_buffer_get_payload (buf);
- payload_len = gst_rtp_buffer_get_payload_len (buf);
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
+
+ payload = gst_rtp_buffer_get_payload (&rtp);
+ payload_len = gst_rtp_buffer_get_payload_len (&rtp);
/* we need at least a header */
if (payload_len < 8)
goto empty_packet;
- rtptime = gst_rtp_buffer_get_timestamp (buf);
+ rtptime = gst_rtp_buffer_get_timestamp (&rtp);
/* new timestamp marks new frame */
if (rtpj2kdepay->last_rtptime != rtptime) {
}
/* and push in pu adapter */
GST_DEBUG_OBJECT (rtpj2kdepay, "push pu of size %u in adapter", j2klen);
- pu_frag = gst_rtp_buffer_get_payload_subbuffer (buf, 8, -1);
+ pu_frag = gst_rtp_buffer_get_payload_subbuffer (&rtp, 8, -1);
gst_adapter_push (rtpj2kdepay->pu_adapter, pu_frag);
if (MHF & 2) {
}
/* marker bit finishes the frame */
- if (gst_rtp_buffer_get_marker (buf)) {
+ if (gst_rtp_buffer_get_marker (&rtp)) {
GST_DEBUG_OBJECT (rtpj2kdepay, "marker set, last buffer");
/* then flush frame */
gst_rtp_j2k_depay_flush_frame (depayload);
}
+ gst_rtp_buffer_unmap (&rtp);
+
return NULL;
/* ERRORS */
{
GST_ELEMENT_WARNING (rtpj2kdepay, STREAM, DECODE,
("Empty Payload."), (NULL));
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
wrong_mh_id:
("Invalid mh_id %u, expected %u", mh_id, rtpj2kdepay->last_mh_id),
(NULL));
gst_rtp_j2k_depay_clear_pu (rtpj2kdepay);
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
}
#include <gst/gst.h>
#include <gst/base/gstadapter.h>
-#include <gst/rtp/gstbasertpdepayload.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
G_BEGIN_DECLS
struct _GstRtpJ2KDepay
{
- GstBaseRTPDepayload depayload;
+ GstRTPBaseDepayload depayload;
guint64 last_rtptime;
guint last_mh_id;
struct _GstRtpJ2KDepayClass
{
- GstBaseRTPDepayloadClass parent_class;
+ GstRTPBaseDepayloadClass parent_class;
};
GType gst_rtp_j2k_depay_get_type (void);
static void gst_rtp_j2k_pay_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
-static gboolean gst_rtp_j2k_pay_setcaps (GstBaseRTPPayload * basepayload,
+static gboolean gst_rtp_j2k_pay_setcaps (GstRTPBasePayload * basepayload,
GstCaps * caps);
-static GstFlowReturn gst_rtp_j2k_pay_handle_buffer (GstBaseRTPPayload * pad,
+static GstFlowReturn gst_rtp_j2k_pay_handle_buffer (GstRTPBasePayload * pad,
GstBuffer * buffer);
-GST_BOILERPLATE (GstRtpJ2KPay, gst_rtp_j2k_pay, GstBaseRTPPayload,
- GST_TYPE_BASE_RTP_PAYLOAD);
-
-static void
-gst_rtp_j2k_pay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_j2k_pay_src_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_j2k_pay_sink_template);
-
- gst_element_class_set_details_simple (element_class,
- "RTP JPEG 2000 payloader", "Codec/Payloader/Network/RTP",
- "Payload-encodes JPEG 2000 pictures into RTP packets (RFC 5371)",
- "Wim Taymans <wim.taymans@gmail.com>");
-}
+#define gst_rtp_j2k_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpJ2KPay, gst_rtp_j2k_pay, GST_TYPE_RTP_BASE_PAYLOAD);
static void
gst_rtp_j2k_pay_class_init (GstRtpJ2KPayClass * klass)
{
GObjectClass *gobject_class;
- GstBaseRTPPayloadClass *gstbasertppayload_class;
+ GstElementClass *gstelement_class;
+ GstRTPBasePayloadClass *gstrtpbasepayload_class;
gobject_class = (GObjectClass *) klass;
- gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasepayload_class = (GstRTPBasePayloadClass *) klass;
gobject_class->set_property = gst_rtp_j2k_pay_set_property;
gobject_class->get_property = gst_rtp_j2k_pay_get_property;
"Use Buffer Lists",
DEFAULT_BUFFER_LIST, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
- gstbasertppayload_class->set_caps = gst_rtp_j2k_pay_setcaps;
- gstbasertppayload_class->handle_buffer = gst_rtp_j2k_pay_handle_buffer;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_j2k_pay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_j2k_pay_sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP JPEG 2000 payloader", "Codec/Payloader/Network/RTP",
+ "Payload-encodes JPEG 2000 pictures into RTP packets (RFC 5371)",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
+ gstrtpbasepayload_class->set_caps = gst_rtp_j2k_pay_setcaps;
+ gstrtpbasepayload_class->handle_buffer = gst_rtp_j2k_pay_handle_buffer;
GST_DEBUG_CATEGORY_INIT (rtpj2kpay_debug, "rtpj2kpay", 0,
"JPEG 2000 RTP Payloader");
}
static void
-gst_rtp_j2k_pay_init (GstRtpJ2KPay * pay, GstRtpJ2KPayClass * klass)
+gst_rtp_j2k_pay_init (GstRtpJ2KPay * pay)
{
pay->buffer_list = DEFAULT_BUFFER_LIST;
}
static gboolean
-gst_rtp_j2k_pay_setcaps (GstBaseRTPPayload * basepayload, GstCaps * caps)
+gst_rtp_j2k_pay_setcaps (GstRTPBasePayload * basepayload, GstCaps * caps)
{
GstStructure *caps_structure = gst_caps_get_structure (caps, 0);
GstRtpJ2KPay *pay;
pay->width = width;
}
- gst_basertppayload_set_options (basepayload, "video", TRUE, "JPEG2000",
+ gst_rtp_base_payload_set_options (basepayload, "video", TRUE, "JPEG2000",
90000);
- res = gst_basertppayload_set_outcaps (basepayload, NULL);
+ res = gst_rtp_base_payload_set_outcaps (basepayload, NULL);
return res;
}
}
static GstFlowReturn
-gst_rtp_j2k_pay_handle_buffer (GstBaseRTPPayload * basepayload,
+gst_rtp_j2k_pay_handle_buffer (GstRTPBasePayload * basepayload,
GstBuffer * buffer)
{
GstRtpJ2KPay *pay;
GstClockTime timestamp;
GstFlowReturn ret = GST_FLOW_ERROR;
RtpJ2KState state;
+#if 0
GstBufferList *list = NULL;
- GstBufferListIterator *it = NULL;
- guint8 *data;
- guint size;
+#endif
+ GstMapInfo map;
guint mtu, max_size;
guint offset;
guint end, pos;
pay = GST_RTP_J2K_PAY (basepayload);
- mtu = GST_BASE_RTP_PAYLOAD_MTU (pay);
+ mtu = GST_RTP_BASE_PAYLOAD_MTU (pay);
- size = GST_BUFFER_SIZE (buffer);
- data = GST_BUFFER_DATA (buffer);
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
timestamp = GST_BUFFER_TIMESTAMP (buffer);
offset = pos = end = 0;
- GST_LOG_OBJECT (pay, "got buffer size %u, timestamp %" GST_TIME_FORMAT, size,
- GST_TIME_ARGS (timestamp));
+ GST_LOG_OBJECT (pay,
+ "got buffer size %" G_GSIZE_FORMAT ", timestamp %" GST_TIME_FORMAT,
+ map.size, GST_TIME_ARGS (timestamp));
/* do some header defaults first */
state.header.tp = 0; /* only progressive scan */
state.next_sot = 0;
state.force_packet = FALSE;
+#if 0
if (pay->buffer_list) {
list = gst_buffer_list_new ();
it = gst_buffer_list_iterate (list);
}
+#endif
/* get max packet length */
max_size = gst_rtp_buffer_calc_payload_len (mtu - HEADER_SIZE, 0, 0);
guint8 *header;
guint payload_size;
guint pu_size;
+ GstRTPBuffer rtp = { NULL };
/* try to pack as much as we can */
do {
pos = end;
/* exit when finished */
- if (pos == size)
+ if (pos == map.size)
break;
/* scan next packetization unit and fill in the header */
- end = find_pu_end (pay, data, size, pos, &state);
+ end = find_pu_end (pay, map.data, map.size, pos, &state);
} while (TRUE);
while (pu_size > 0) {
payload_size = gst_rtp_buffer_calc_payload_len (packet_size, 0, 0);
data_size = payload_size - HEADER_SIZE;
+#if 0
if (pay->buffer_list) {
/* make buffer for header */
outbuf = gst_rtp_buffer_new_allocate (HEADER_SIZE, 0, 0);
- } else {
+ } else
+#endif
+ {
/* make buffer for header and data */
outbuf = gst_rtp_buffer_new_allocate (payload_size, 0, 0);
}
GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
+ gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
+
/* get pointer to header */
- header = gst_rtp_buffer_get_payload (outbuf);
+ header = gst_rtp_buffer_get_payload (&rtp);
pu_size -= data_size;
if (pu_size == 0) {
else
state.header.MHF = 2;
}
- if (end >= size)
- gst_rtp_buffer_set_marker (outbuf, TRUE);
+ if (end >= map.size)
+ gst_rtp_buffer_set_marker (&rtp, TRUE);
}
/*
header[6] = (state.header.offset >> 8) & 0xff;
header[7] = state.header.offset & 0xff;
+#if 0
if (pay->buffer_list) {
GstBuffer *paybuf;
/* add both buffers to the buffer list */
gst_buffer_list_iterator_add (it, outbuf);
gst_buffer_list_iterator_add (it, paybuf);
- } else {
+ } else
+#endif
+ {
/* copy payload */
- memcpy (header + HEADER_SIZE, &data[offset], data_size);
+ memcpy (header + HEADER_SIZE, &map.data[offset], data_size);
+ gst_rtp_buffer_unmap (&rtp);
- ret = gst_basertppayload_push (basepayload, outbuf);
+ ret = gst_rtp_base_payload_push (basepayload, outbuf);
if (ret != GST_FLOW_OK)
goto done;
}
offset += data_size;
}
offset = pos;
- } while (offset < size);
+ } while (offset < map.size);
done:
gst_buffer_unref (buffer);
+#if 0
if (pay->buffer_list) {
/* free iterator and push the whole buffer list at once */
gst_buffer_list_iterator_free (it);
- ret = gst_basertppayload_push_list (basepayload, list);
+ ret = gst_rtp_base_payload_push_list (basepayload, list);
}
+#endif
return ret;
}
#define __GST_RTP_J2K_PAY_H__
#include <gst/gst.h>
-#include <gst/rtp/gstbasertppayload.h>
+#include <gst/rtp/gstrtpbasepayload.h>
G_BEGIN_DECLS
struct _GstRtpJ2KPay
{
- GstBaseRTPPayload payload;
+ GstRTPBasePayload payload;
gboolean buffer_list;
struct _GstRtpJ2KPayClass
{
- GstBaseRTPPayloadClass parent_class;
+ GstRTPBasePayloadClass parent_class;
};
GType gst_rtp_j2k_pay_get_type (void);
)
);
-GST_BOILERPLATE (GstRtpJPEGDepay, gst_rtp_jpeg_depay, GstBaseRTPDepayload,
- GST_TYPE_BASE_RTP_DEPAYLOAD);
+#define gst_rtp_jpeg_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpJPEGDepay, gst_rtp_jpeg_depay,
+ GST_TYPE_RTP_BASE_DEPAYLOAD);
static void gst_rtp_jpeg_depay_finalize (GObject * object);
static GstStateChangeReturn gst_rtp_jpeg_depay_change_state (GstElement *
element, GstStateChange transition);
-static gboolean gst_rtp_jpeg_depay_setcaps (GstBaseRTPDepayload * depayload,
+static gboolean gst_rtp_jpeg_depay_setcaps (GstRTPBaseDepayload * depayload,
GstCaps * caps);
-static GstBuffer *gst_rtp_jpeg_depay_process (GstBaseRTPDepayload * depayload,
+static GstBuffer *gst_rtp_jpeg_depay_process (GstRTPBaseDepayload * depayload,
GstBuffer * buf);
static void
-gst_rtp_jpeg_depay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_jpeg_depay_src_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_jpeg_depay_sink_template);
-
- gst_element_class_set_details_simple (element_class, "RTP JPEG depayloader",
- "Codec/Depayloader/Network/RTP",
- "Extracts JPEG video from RTP packets (RFC 2435)",
- "Wim Taymans <wim.taymans@gmail.com>");
-}
-
-static void
gst_rtp_jpeg_depay_class_init (GstRtpJPEGDepayClass * klass)
{
GObjectClass *gobject_class;
GstElementClass *gstelement_class;
- GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
- gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
+ gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
gobject_class->finalize = gst_rtp_jpeg_depay_finalize;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_jpeg_depay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_jpeg_depay_sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP JPEG depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts JPEG video from RTP packets (RFC 2435)",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
gstelement_class->change_state = gst_rtp_jpeg_depay_change_state;
- gstbasertpdepayload_class->set_caps = gst_rtp_jpeg_depay_setcaps;
- gstbasertpdepayload_class->process = gst_rtp_jpeg_depay_process;
+ gstrtpbasedepayload_class->set_caps = gst_rtp_jpeg_depay_setcaps;
+ gstrtpbasedepayload_class->process = gst_rtp_jpeg_depay_process;
GST_DEBUG_CATEGORY_INIT (rtpjpegdepay_debug, "rtpjpegdepay", 0,
"JPEG Video RTP Depayloader");
}
static void
-gst_rtp_jpeg_depay_init (GstRtpJPEGDepay * rtpjpegdepay,
- GstRtpJPEGDepayClass * klass)
+gst_rtp_jpeg_depay_init (GstRtpJPEGDepay * rtpjpegdepay)
{
rtpjpegdepay->adapter = gst_adapter_new ();
}
};
static gboolean
-gst_rtp_jpeg_depay_setcaps (GstBaseRTPDepayload * depayload, GstCaps * caps)
+gst_rtp_jpeg_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
{
GstRtpJPEGDepay *rtpjpegdepay;
GstStructure *structure;
}
static GstBuffer *
-gst_rtp_jpeg_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
+gst_rtp_jpeg_depay_process (GstRTPBaseDepayload * depayload, GstBuffer * buf)
{
GstRtpJPEGDepay *rtpjpegdepay;
GstBuffer *outbuf;
guint type, width, height;
guint16 dri, precision, length;
guint8 *qtable;
+ GstRTPBuffer rtp = { NULL };
rtpjpegdepay = GST_RTP_JPEG_DEPAY (depayload);
rtpjpegdepay->discont = TRUE;
}
- payload_len = gst_rtp_buffer_get_payload_len (buf);
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
+ payload_len = gst_rtp_buffer_get_payload_len (&rtp);
if (payload_len < 8)
goto empty_packet;
- payload = gst_rtp_buffer_get_payload (buf);
+ payload = gst_rtp_buffer_get_payload (&rtp);
header_len = 0;
/* 0 1 2 3
}
if (frag_offset == 0) {
+ GstMapInfo map;
guint size;
if (rtpjpegdepay->width != width || rtpjpegdepay->height != height) {
}
/* max header length, should be big enough */
outbuf = gst_buffer_new_and_alloc (1000);
- size = MakeHeaders (GST_BUFFER_DATA (outbuf), type,
- width, height, qtable, precision, dri);
+ gst_buffer_map (outbuf, &map, GST_MAP_WRITE);
+ size = MakeHeaders (map.data, type, width, height, qtable, precision, dri);
+ gst_buffer_unmap (outbuf, &map);
+ gst_buffer_resize (outbuf, 0, size);
GST_DEBUG_OBJECT (rtpjpegdepay, "pushing %u bytes of header", size);
- GST_BUFFER_SIZE (outbuf) = size;
-
gst_adapter_push (rtpjpegdepay->adapter, outbuf);
}
/* take JPEG data, push in the adapter */
GST_DEBUG_OBJECT (rtpjpegdepay, "pushing data at offset %d", header_len);
- outbuf = gst_rtp_buffer_get_payload_subbuffer (buf, header_len, -1);
+ outbuf = gst_rtp_buffer_get_payload_subbuffer (&rtp, header_len, -1);
gst_adapter_push (rtpjpegdepay->adapter, outbuf);
outbuf = NULL;
- if (gst_rtp_buffer_get_marker (buf)) {
+ if (gst_rtp_buffer_get_marker (&rtp)) {
guint avail;
guint8 end[2];
- guint8 *data;
+ GstMapInfo map;
/* last buffer take all data out of the adapter */
avail = gst_adapter_available (rtpjpegdepay->adapter);
/* no EOI marker, add one */
outbuf = gst_buffer_new_and_alloc (2);
- data = GST_BUFFER_DATA (outbuf);
- data[0] = 0xff;
- data[1] = 0xd9;
+ gst_buffer_map (outbuf, &map, GST_MAP_WRITE);
+ map.data[0] = 0xff;
+ map.data[1] = 0xd9;
+ gst_buffer_unmap (outbuf, &map);
gst_adapter_push (rtpjpegdepay->adapter, outbuf);
avail += 2;
GST_DEBUG_OBJECT (rtpjpegdepay, "returning %u bytes", avail);
}
+ gst_rtp_buffer_unmap (&rtp);
+
return outbuf;
/* ERRORS */
{
GST_ELEMENT_WARNING (rtpjpegdepay, STREAM, DECODE,
("Empty Payload."), (NULL));
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
invalid_dimension:
{
GST_ELEMENT_WARNING (rtpjpegdepay, STREAM, FORMAT,
("Invalid Dimension %dx%d.", width, height), (NULL));
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
no_qtable:
{
GST_WARNING_OBJECT (rtpjpegdepay, "no qtable");
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
}
#include <gst/gst.h>
#include <gst/base/gstadapter.h>
-#include <gst/rtp/gstbasertpdepayload.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
G_BEGIN_DECLS
struct _GstRtpJPEGDepay
{
- GstBaseRTPDepayload depayload;
+ GstRTPBaseDepayload depayload;
GstAdapter *adapter;
gboolean discont;
struct _GstRtpJPEGDepayClass
{
- GstBaseRTPDepayloadClass parent_class;
+ GstRTPBaseDepayloadClass parent_class;
};
GType gst_rtp_jpeg_depay_get_type (void);
static void gst_rtp_jpeg_pay_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
-static gboolean gst_rtp_jpeg_pay_setcaps (GstBaseRTPPayload * basepayload,
+static gboolean gst_rtp_jpeg_pay_setcaps (GstRTPBasePayload * basepayload,
GstCaps * caps);
-static GstFlowReturn gst_rtp_jpeg_pay_handle_buffer (GstBaseRTPPayload * pad,
+static GstFlowReturn gst_rtp_jpeg_pay_handle_buffer (GstRTPBasePayload * pad,
GstBuffer * buffer);
-GST_BOILERPLATE (GstRtpJPEGPay, gst_rtp_jpeg_pay, GstBaseRTPPayload,
- GST_TYPE_BASE_RTP_PAYLOAD);
-
-static void
-gst_rtp_jpeg_pay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_jpeg_pay_src_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_jpeg_pay_sink_template);
-
- gst_element_class_set_details_simple (element_class, "RTP JPEG payloader",
- "Codec/Payloader/Network/RTP",
- "Payload-encodes JPEG pictures into RTP packets (RFC 2435)",
- "Axis Communications <dev-gstreamer@axis.com>");
-}
+#define gst_rtp_jpeg_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpJPEGPay, gst_rtp_jpeg_pay, GST_TYPE_RTP_BASE_PAYLOAD);
static void
gst_rtp_jpeg_pay_class_init (GstRtpJPEGPayClass * klass)
{
GObjectClass *gobject_class;
- GstBaseRTPPayloadClass *gstbasertppayload_class;
+ GstElementClass *gstelement_class;
+ GstRTPBasePayloadClass *gstrtpbasepayload_class;
gobject_class = (GObjectClass *) klass;
- gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasepayload_class = (GstRTPBasePayloadClass *) klass;
gobject_class->set_property = gst_rtp_jpeg_pay_set_property;
gobject_class->get_property = gst_rtp_jpeg_pay_get_property;
- gstbasertppayload_class->set_caps = gst_rtp_jpeg_pay_setcaps;
- gstbasertppayload_class->handle_buffer = gst_rtp_jpeg_pay_handle_buffer;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_jpeg_pay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_jpeg_pay_sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class, "RTP JPEG payloader",
+ "Codec/Payloader/Network/RTP",
+ "Payload-encodes JPEG pictures into RTP packets (RFC 2435)",
+ "Axis Communications <dev-gstreamer@axis.com>");
+
+ gstrtpbasepayload_class->set_caps = gst_rtp_jpeg_pay_setcaps;
+ gstrtpbasepayload_class->handle_buffer = gst_rtp_jpeg_pay_handle_buffer;
g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_JPEG_QUALITY,
g_param_spec_int ("quality", "Quality",
}
static void
-gst_rtp_jpeg_pay_init (GstRtpJPEGPay * pay, GstRtpJPEGPayClass * klass)
+gst_rtp_jpeg_pay_init (GstRtpJPEGPay * pay)
{
pay->quality = DEFAULT_JPEG_QUALITY;
pay->quant = DEFAULT_JPEG_QUANT;
}
static gboolean
-gst_rtp_jpeg_pay_setcaps (GstBaseRTPPayload * basepayload, GstCaps * caps)
+gst_rtp_jpeg_pay_setcaps (GstRTPBasePayload * basepayload, GstCaps * caps)
{
GstStructure *caps_structure = gst_caps_get_structure (caps, 0);
GstRtpJPEGPay *pay;
}
pay->width = GST_ROUND_UP_8 (width) / 8;
- gst_basertppayload_set_options (basepayload, "video", TRUE, "JPEG", 90000);
- res = gst_basertppayload_set_outcaps (basepayload, NULL);
+ gst_rtp_base_payload_set_options (basepayload, "video", TRUE, "JPEG", 90000);
+ res = gst_rtp_base_payload_set_outcaps (basepayload, NULL);
return res;
}
static GstFlowReturn
-gst_rtp_jpeg_pay_handle_buffer (GstBaseRTPPayload * basepayload,
+gst_rtp_jpeg_pay_handle_buffer (GstRTPBasePayload * basepayload,
GstBuffer * buffer)
{
GstRtpJPEGPay *pay;
RtpQuantTable tables[15] = { {0, NULL}, };
CompInfo info[3] = { {0,}, };
guint quant_data_size;
+ GstMapInfo map;
guint8 *data;
- guint size;
+ gsize size;
guint mtu;
guint bytes_left;
guint jpeg_header_size = 0;
gboolean sos_found, sof_found, dqt_found, dri_found;
gint i;
GstBufferList *list = NULL;
- GstBufferListIterator *it = NULL;
pay = GST_RTP_JPEG_PAY (basepayload);
- mtu = GST_BASE_RTP_PAYLOAD_MTU (pay);
+ mtu = GST_RTP_BASE_PAYLOAD_MTU (pay);
- size = GST_BUFFER_SIZE (buffer);
- data = GST_BUFFER_DATA (buffer);
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ data = map.data;
+ size = map.size;
timestamp = GST_BUFFER_TIMESTAMP (buffer);
offset = 0;
- GST_LOG_OBJECT (pay, "got buffer size %u, timestamp %" GST_TIME_FORMAT, size,
- GST_TIME_ARGS (timestamp));
+ GST_LOG_OBJECT (pay, "got buffer size %" G_GSIZE_FORMAT
+ " , timestamp %" GST_TIME_FORMAT, size, GST_TIME_ARGS (timestamp));
/* parse the jpeg header for 'start of scan' and read quant tables if needed */
sos_found = FALSE;
if (pay->buffer_list) {
list = gst_buffer_list_new ();
- it = gst_buffer_list_iterate (list);
}
bytes_left = sizeof (jpeg_header) + quant_data_size + size;
GstBuffer *outbuf;
guint8 *payload;
guint payload_size = (bytes_left < mtu ? bytes_left : mtu);
+ guint header_size;
+ GstBuffer *paybuf;
+ GstRTPBuffer rtp = { NULL };
- if (pay->buffer_list) {
- guint header_size;
+ header_size = sizeof (jpeg_header) + quant_data_size;
+ if (dri_found)
+ header_size += sizeof (restart_marker_header);
- header_size = sizeof (jpeg_header) + quant_data_size;
- if (dri_found)
- header_size += sizeof (restart_marker_header);
+ outbuf = gst_rtp_buffer_new_allocate (header_size, 0, 0);
- outbuf = gst_rtp_buffer_new_allocate (header_size, 0, 0);
- } else {
- outbuf = gst_rtp_buffer_new_allocate (payload_size, 0, 0);
- }
- GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
+ gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
if (payload_size == bytes_left) {
GST_LOG_OBJECT (pay, "last packet of frame");
frame_done = TRUE;
- gst_rtp_buffer_set_marker (outbuf, 1);
+ gst_rtp_buffer_set_marker (&rtp, 1);
}
- payload = gst_rtp_buffer_get_payload (outbuf);
+ payload = gst_rtp_buffer_get_payload (&rtp);
/* update offset */
#if (G_BYTE_ORDER == G_LITTLE_ENDIAN)
quant_data_size = 0;
}
GST_LOG_OBJECT (pay, "sending payload size %d", payload_size);
+ gst_rtp_buffer_unmap (&rtp);
- if (pay->buffer_list) {
- GstBuffer *paybuf;
+ /* create a new buf to hold the payload */
+ paybuf = gst_buffer_copy_region (buffer, GST_BUFFER_COPY_MEMORY,
+ jpeg_header_size + offset, payload_size);
- /* create a new buf to hold the payload */
- paybuf = gst_buffer_create_sub (buffer, jpeg_header_size + offset,
- payload_size);
+ /* join memory parts */
+ outbuf = gst_buffer_join (outbuf, paybuf);
- /* create a new group to hold the rtp header and the payload */
- gst_buffer_list_iterator_add_group (it);
- gst_buffer_list_iterator_add (it, outbuf);
- gst_buffer_list_iterator_add (it, paybuf);
+ GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
+
+ if (pay->buffer_list) {
+ /* and add to list */
+ gst_buffer_list_insert (list, -1, outbuf);
} else {
- memcpy (payload, data, payload_size);
- ret = gst_basertppayload_push (basepayload, outbuf);
+ ret = gst_rtp_base_payload_push (basepayload, outbuf);
if (ret != GST_FLOW_OK)
break;
}
while (!frame_done);
if (pay->buffer_list) {
- gst_buffer_list_iterator_free (it);
/* push the whole buffer list at once */
- ret = gst_basertppayload_push_list (basepayload, list);
+ ret = gst_rtp_base_payload_push_list (basepayload, list);
}
+ gst_buffer_unmap (buffer, &map);
gst_buffer_unref (buffer);
return ret;
unsupported_jpeg:
{
GST_ELEMENT_ERROR (pay, STREAM, FORMAT, ("Unsupported JPEG"), (NULL));
+ gst_buffer_unmap (buffer, &map);
gst_buffer_unref (buffer);
return GST_FLOW_NOT_SUPPORTED;
}
no_dimension:
{
GST_ELEMENT_ERROR (pay, STREAM, FORMAT, ("No size given"), (NULL));
+ gst_buffer_unmap (buffer, &map);
gst_buffer_unref (buffer);
return GST_FLOW_NOT_NEGOTIATED;
}
invalid_format:
{
/* error was posted */
+ gst_buffer_unmap (buffer, &map);
gst_buffer_unref (buffer);
return GST_FLOW_ERROR;
}
invalid_quant:
{
GST_ELEMENT_ERROR (pay, STREAM, FORMAT, ("Invalid quant tables"), (NULL));
+ gst_buffer_unmap (buffer, &map);
gst_buffer_unref (buffer);
return GST_FLOW_ERROR;
}
#define __GST_RTP_JPEG_PAY_H__
#include <gst/gst.h>
-#include <gst/rtp/gstbasertppayload.h>
+#include <gst/rtp/gstrtpbasepayload.h>
G_BEGIN_DECLS
#define GST_TYPE_RTP_JPEG_PAY \
struct _GstRtpJPEGPay
{
- GstBaseRTPPayload payload;
+ GstRTPBasePayload payload;
guint8 quality;
guint8 type;
struct _GstRtpJPEGPayClass
{
- GstBaseRTPPayloadClass parent_class;
+ GstRTPBasePayloadClass parent_class;
};
GType gst_rtp_jpeg_pay_get_type (void);
"clock-rate = (int) [1, MAX ], " "encoding-name = (string) \"MP1S\"")
);
-GST_BOILERPLATE (GstRtpMP1SDepay, gst_rtp_mp1s_depay, GstBaseRTPDepayload,
- GST_TYPE_BASE_RTP_DEPAYLOAD);
+G_DEFINE_TYPE (GstRtpMP1SDepay, gst_rtp_mp1s_depay,
+ GST_TYPE_RTP_BASE_DEPAYLOAD);
-static gboolean gst_rtp_mp1s_depay_setcaps (GstBaseRTPDepayload * depayload,
+static gboolean gst_rtp_mp1s_depay_setcaps (GstRTPBaseDepayload * depayload,
GstCaps * caps);
-static GstBuffer *gst_rtp_mp1s_depay_process (GstBaseRTPDepayload * depayload,
+static GstBuffer *gst_rtp_mp1s_depay_process (GstRTPBaseDepayload * depayload,
GstBuffer * buf);
static void
-gst_rtp_mp1s_depay_base_init (gpointer klass)
+gst_rtp_mp1s_depay_class_init (GstRtpMP1SDepayClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstElementClass *gstelement_class;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
+
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
+
+ gstrtpbasedepayload_class->process = gst_rtp_mp1s_depay_process;
+ gstrtpbasedepayload_class->set_caps = gst_rtp_mp1s_depay_setcaps;
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_mp1s_depay_src_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_mp1s_depay_sink_template);
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_mp1s_depay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_mp1s_depay_sink_template));
- gst_element_class_set_details_simple (element_class,
+ gst_element_class_set_details_simple (gstelement_class,
"RTP MPEG1 System Stream depayloader", "Codec/Depayloader/Network/RTP",
"Extracts MPEG1 System Streams from RTP packets (RFC 3555)",
"Wim Taymans <wim.taymans@gmail.com>");
}
static void
-gst_rtp_mp1s_depay_class_init (GstRtpMP1SDepayClass * klass)
-{
- GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
-
- gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
-
- gstbasertpdepayload_class->process = gst_rtp_mp1s_depay_process;
- gstbasertpdepayload_class->set_caps = gst_rtp_mp1s_depay_setcaps;
-}
-
-static void
-gst_rtp_mp1s_depay_init (GstRtpMP1SDepay * rtpmp1sdepay,
- GstRtpMP1SDepayClass * klass)
+gst_rtp_mp1s_depay_init (GstRtpMP1SDepay * rtpmp1sdepay)
{
}
static gboolean
-gst_rtp_mp1s_depay_setcaps (GstBaseRTPDepayload * depayload, GstCaps * caps)
+gst_rtp_mp1s_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
{
GstCaps *srccaps;
GstStructure *structure;
srccaps = gst_caps_new_simple ("video/mpeg",
"systemstream", G_TYPE_BOOLEAN, TRUE, NULL);
- res = gst_pad_set_caps (GST_BASE_RTP_DEPAYLOAD_SRCPAD (depayload), srccaps);
+ res = gst_pad_set_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload), srccaps);
gst_caps_unref (srccaps);
return res;
}
static GstBuffer *
-gst_rtp_mp1s_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
+gst_rtp_mp1s_depay_process (GstRTPBaseDepayload * depayload, GstBuffer * buf)
{
GstBuffer *outbuf;
+ GstRTPBuffer rtp = { NULL };
- outbuf = gst_rtp_buffer_get_payload_buffer (buf);
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
+ outbuf = gst_rtp_buffer_get_payload_buffer (&rtp);
+ gst_rtp_buffer_unmap (&rtp);
if (outbuf)
- GST_DEBUG ("gst_rtp_mp1s_depay_chain: pushing buffer of size %d",
- GST_BUFFER_SIZE (outbuf));
+ GST_DEBUG ("gst_rtp_mp1s_depay_chain: pushing buffer of size %"
+ G_GSIZE_FORMAT, gst_buffer_get_size (outbuf));
return outbuf;
}
#include <gst/gst.h>
#include <gst/base/gstadapter.h>
-#include <gst/rtp/gstbasertpdepayload.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
G_BEGIN_DECLS
struct _GstRtpMP1SDepay
{
- GstBaseRTPDepayload depayload;
+ GstRTPBaseDepayload depayload;
};
struct _GstRtpMP1SDepayClass
{
- GstBaseRTPDepayloadClass parent_class;
+ GstRTPBaseDepayloadClass parent_class;
};
GType gst_rtp_mp1s_depay_get_type (void);
GST_STATIC_CAPS ("application/x-rtp, "
"media = (string) \"video\", "
"payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ", "
- "clock-rate = (int) [1, MAX ], " "encoding-name = (string) \"MP2T-ES\";"
+ "clock-rate = (int) [1, MAX ], " "encoding-name = (string) \"MP2T\";"
/* All optional parameters
*
* "profile-level-id=[1,MAX]"
"clock-rate = (int) [1, MAX ]")
);
-GST_BOILERPLATE (GstRtpMP2TDepay, gst_rtp_mp2t_depay, GstBaseRTPDepayload,
- GST_TYPE_BASE_RTP_DEPAYLOAD);
+G_DEFINE_TYPE (GstRtpMP2TDepay, gst_rtp_mp2t_depay,
+ GST_TYPE_RTP_BASE_DEPAYLOAD);
-static gboolean gst_rtp_mp2t_depay_setcaps (GstBaseRTPDepayload * depayload,
+static gboolean gst_rtp_mp2t_depay_setcaps (GstRTPBaseDepayload * depayload,
GstCaps * caps);
-static GstBuffer *gst_rtp_mp2t_depay_process (GstBaseRTPDepayload * depayload,
+static GstBuffer *gst_rtp_mp2t_depay_process (GstRTPBaseDepayload * depayload,
GstBuffer * buf);
static void gst_rtp_mp2t_depay_set_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
static void
-gst_rtp_mp2t_depay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_mp2t_depay_src_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_mp2t_depay_sink_template);
-
- gst_element_class_set_details_simple (element_class,
- "RTP MPEG Transport Stream depayloader", "Codec/Depayloader/Network/RTP",
- "Extracts MPEG2 TS from RTP packets (RFC 2250)",
- "Wim Taymans <wim.taymans@gmail.com>, "
- "Thijs Vermeir <thijs.vermeir@barco.com>");
-}
-
-static void
gst_rtp_mp2t_depay_class_init (GstRtpMP2TDepayClass * klass)
{
GObjectClass *gobject_class;
- GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
+ GstElementClass *gstelement_class;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
- gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
-
- gstbasertpdepayload_class->process = gst_rtp_mp2t_depay_process;
- gstbasertpdepayload_class->set_caps = gst_rtp_mp2t_depay_setcaps;
+ gstrtpbasedepayload_class->process = gst_rtp_mp2t_depay_process;
+ gstrtpbasedepayload_class->set_caps = gst_rtp_mp2t_depay_setcaps;
gobject_class->set_property = gst_rtp_mp2t_depay_set_property;
gobject_class->get_property = gst_rtp_mp2t_depay_get_property;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_mp2t_depay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_mp2t_depay_sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP MPEG Transport Stream depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts MPEG2 TS from RTP packets (RFC 2250)",
+ "Wim Taymans <wim.taymans@gmail.com>, "
+ "Thijs Vermeir <thijs.vermeir@barco.com>");
+
g_object_class_install_property (gobject_class, PROP_SKIP_FIRST_BYTES,
g_param_spec_uint ("skip-first-bytes",
"Skip first bytes",
}
static void
-gst_rtp_mp2t_depay_init (GstRtpMP2TDepay * rtpmp2tdepay,
- GstRtpMP2TDepayClass * klass)
+gst_rtp_mp2t_depay_init (GstRtpMP2TDepay * rtpmp2tdepay)
{
rtpmp2tdepay->skip_first_bytes = DEFAULT_SKIP_FIRST_BYTES;
}
static gboolean
-gst_rtp_mp2t_depay_setcaps (GstBaseRTPDepayload * depayload, GstCaps * caps)
+gst_rtp_mp2t_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
{
GstCaps *srccaps;
GstStructure *structure;
srccaps = gst_caps_new_simple ("video/mpegts",
"packetsize", G_TYPE_INT, 188,
"systemstream", G_TYPE_BOOLEAN, TRUE, NULL);
- res = gst_pad_set_caps (GST_BASE_RTP_DEPAYLOAD_SRCPAD (depayload), srccaps);
+ res = gst_pad_set_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload), srccaps);
gst_caps_unref (srccaps);
return res;
}
static GstBuffer *
-gst_rtp_mp2t_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
+gst_rtp_mp2t_depay_process (GstRTPBaseDepayload * depayload, GstBuffer * buf)
{
GstRtpMP2TDepay *rtpmp2tdepay;
GstBuffer *outbuf;
gint payload_len;
+ GstRTPBuffer rtp = { NULL };
rtpmp2tdepay = GST_RTP_MP2T_DEPAY (depayload);
- payload_len = gst_rtp_buffer_get_payload_len (buf);
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
+ payload_len = gst_rtp_buffer_get_payload_len (&rtp);
if (G_UNLIKELY (payload_len <= rtpmp2tdepay->skip_first_bytes))
goto empty_packet;
- outbuf =
- gst_rtp_buffer_get_payload_subbuffer (buf, rtpmp2tdepay->skip_first_bytes,
- -1);
+ outbuf = gst_rtp_buffer_get_payload_subbuffer (&rtp,
+ rtpmp2tdepay->skip_first_bytes, -1);
+
+ gst_rtp_buffer_unmap (&rtp);
if (outbuf)
- GST_DEBUG ("gst_rtp_mp2t_depay_chain: pushing buffer of size %d",
- GST_BUFFER_SIZE (outbuf));
+ GST_DEBUG ("gst_rtp_mp2t_depay_chain: pushing buffer of size %"
+ G_GSIZE_FORMAT, gst_buffer_get_size (outbuf));
return outbuf;
{
GST_ELEMENT_WARNING (rtpmp2tdepay, STREAM, DECODE,
(NULL), ("Packet was empty"));
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
}
#include <gst/gst.h>
#include <gst/base/gstadapter.h>
-#include <gst/rtp/gstbasertpdepayload.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
G_BEGIN_DECLS
struct _GstRtpMP2TDepay
{
- GstBaseRTPDepayload depayload;
+ GstRTPBaseDepayload depayload;
guint8 skip_first_bytes;
};
struct _GstRtpMP2TDepayClass
{
- GstBaseRTPDepayloadClass parent_class;
+ GstRTPBaseDepayloadClass parent_class;
};
GType gst_rtp_mp2t_depay_get_type (void);
GST_STATIC_CAPS ("application/x-rtp, "
"media = (string) \"video\", "
"payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ", "
- "clock-rate = (int) 90000, " "encoding-name = (string) \"MP2T-ES\"")
+ "clock-rate = (int) 90000, " "encoding-name = (string) \"MP2T\"")
);
-static gboolean gst_rtp_mp2t_pay_setcaps (GstBaseRTPPayload * payload,
+static gboolean gst_rtp_mp2t_pay_setcaps (GstRTPBasePayload * payload,
GstCaps * caps);
-static GstFlowReturn gst_rtp_mp2t_pay_handle_buffer (GstBaseRTPPayload *
+static GstFlowReturn gst_rtp_mp2t_pay_handle_buffer (GstRTPBasePayload *
payload, GstBuffer * buffer);
static GstFlowReturn gst_rtp_mp2t_pay_flush (GstRTPMP2TPay * rtpmp2tpay);
static void gst_rtp_mp2t_pay_finalize (GObject * object);
-GST_BOILERPLATE (GstRTPMP2TPay, gst_rtp_mp2t_pay, GstBaseRTPPayload,
- GST_TYPE_BASE_RTP_PAYLOAD);
-
-static void
-gst_rtp_mp2t_pay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_mp2t_pay_sink_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_mp2t_pay_src_template);
- gst_element_class_set_details_simple (element_class,
- "RTP MPEG2 Transport Stream payloader", "Codec/Payloader/Network/RTP",
- "Payload-encodes MPEG2 TS into RTP packets (RFC 2250)",
- "Wim Taymans <wim.taymans@gmail.com>");
-}
+#define gst_rtp_mp2t_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRTPMP2TPay, gst_rtp_mp2t_pay, GST_TYPE_RTP_BASE_PAYLOAD);
static void
gst_rtp_mp2t_pay_class_init (GstRTPMP2TPayClass * klass)
{
GObjectClass *gobject_class;
- GstBaseRTPPayloadClass *gstbasertppayload_class;
+ GstElementClass *gstelement_class;
+ GstRTPBasePayloadClass *gstrtpbasepayload_class;
gobject_class = (GObjectClass *) klass;
- gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasepayload_class = (GstRTPBasePayloadClass *) klass;
gobject_class->finalize = gst_rtp_mp2t_pay_finalize;
- gstbasertppayload_class->set_caps = gst_rtp_mp2t_pay_setcaps;
- gstbasertppayload_class->handle_buffer = gst_rtp_mp2t_pay_handle_buffer;
+ gstrtpbasepayload_class->set_caps = gst_rtp_mp2t_pay_setcaps;
+ gstrtpbasepayload_class->handle_buffer = gst_rtp_mp2t_pay_handle_buffer;
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_mp2t_pay_sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_mp2t_pay_src_template));
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP MPEG2 Transport Stream payloader", "Codec/Payloader/Network/RTP",
+ "Payload-encodes MPEG2 TS into RTP packets (RFC 2250)",
+ "Wim Taymans <wim.taymans@gmail.com>");
}
static void
-gst_rtp_mp2t_pay_init (GstRTPMP2TPay * rtpmp2tpay, GstRTPMP2TPayClass * klass)
+gst_rtp_mp2t_pay_init (GstRTPMP2TPay * rtpmp2tpay)
{
- GST_BASE_RTP_PAYLOAD (rtpmp2tpay)->clock_rate = 90000;
- GST_BASE_RTP_PAYLOAD_PT (rtpmp2tpay) = GST_RTP_PAYLOAD_MP2T;
+ GST_RTP_BASE_PAYLOAD (rtpmp2tpay)->clock_rate = 90000;
+ GST_RTP_BASE_PAYLOAD_PT (rtpmp2tpay) = GST_RTP_PAYLOAD_MP2T;
rtpmp2tpay->adapter = gst_adapter_new ();
}
}
static gboolean
-gst_rtp_mp2t_pay_setcaps (GstBaseRTPPayload * payload, GstCaps * caps)
+gst_rtp_mp2t_pay_setcaps (GstRTPBasePayload * payload, GstCaps * caps)
{
gboolean res;
- gst_basertppayload_set_options (payload, "video", TRUE, "MP2T-ES", 90000);
- res = gst_basertppayload_set_outcaps (payload, NULL);
+ gst_rtp_base_payload_set_options (payload, "video", TRUE, "MP2T", 90000);
+ res = gst_rtp_base_payload_set_outcaps (payload, NULL);
return res;
}
guint8 *payload;
GstFlowReturn ret;
GstBuffer *outbuf;
+ GstRTPBuffer rtp = { NULL };
avail = gst_adapter_available (rtpmp2tpay->adapter);
if (avail == 0)
outbuf = gst_rtp_buffer_new_allocate (avail, 0, 0);
/* get payload */
- payload = gst_rtp_buffer_get_payload (outbuf);
+ gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
+ payload = gst_rtp_buffer_get_payload (&rtp);
/* copy stuff from adapter to payload */
gst_adapter_copy (rtpmp2tpay->adapter, payload, 0, avail);
+ gst_rtp_buffer_unmap (&rtp);
GST_BUFFER_TIMESTAMP (outbuf) = rtpmp2tpay->first_ts;
GST_BUFFER_DURATION (outbuf) = rtpmp2tpay->duration;
- GST_DEBUG_OBJECT (rtpmp2tpay, "pushing buffer of size %d",
- GST_BUFFER_SIZE (outbuf));
+ GST_DEBUG_OBJECT (rtpmp2tpay, "pushing buffer of size %" G_GSIZE_FORMAT,
+ gst_buffer_get_size (outbuf));
- ret = gst_basertppayload_push (GST_BASE_RTP_PAYLOAD (rtpmp2tpay), outbuf);
+ ret = gst_rtp_base_payload_push (GST_RTP_BASE_PAYLOAD (rtpmp2tpay), outbuf);
/* flush the adapter content */
gst_adapter_flush (rtpmp2tpay->adapter, avail);
}
static GstFlowReturn
-gst_rtp_mp2t_pay_handle_buffer (GstBaseRTPPayload * basepayload,
+gst_rtp_mp2t_pay_handle_buffer (GstRTPBasePayload * basepayload,
GstBuffer * buffer)
{
GstRTPMP2TPay *rtpmp2tpay;
rtpmp2tpay = GST_RTP_MP2T_PAY (basepayload);
- size = GST_BUFFER_SIZE (buffer);
+ size = gst_buffer_get_size (buffer);
timestamp = GST_BUFFER_TIMESTAMP (buffer);
duration = GST_BUFFER_DURATION (buffer);
/* if this buffer is going to overflow the packet, flush what we
* have. */
- if (gst_basertppayload_is_filled (basepayload,
+ if (gst_rtp_base_payload_is_filled (basepayload,
packet_len, rtpmp2tpay->duration + duration)) {
ret = gst_rtp_mp2t_pay_flush (rtpmp2tpay);
rtpmp2tpay->first_ts = timestamp;
#define __GST_RTP_MP2T_PAY_H__
#include <gst/gst.h>
-#include <gst/rtp/gstbasertppayload.h>
+#include <gst/rtp/gstrtpbasepayload.h>
#include <gst/base/gstadapter.h>
G_BEGIN_DECLS
struct _GstRTPMP2TPay
{
- GstBaseRTPPayload payload;
+ GstRTPBasePayload payload;
GstAdapter *adapter;
GstClockTime first_ts;
struct _GstRTPMP2TPayClass
{
- GstBaseRTPPayloadClass parent_class;
+ GstRTPBasePayloadClass parent_class;
};
GType gst_rtp_mp2t_pay_get_type (void);
)
);
-GST_BOILERPLATE (GstRtpMP4ADepay, gst_rtp_mp4a_depay, GstBaseRTPDepayload,
- GST_TYPE_BASE_RTP_DEPAYLOAD);
+#define gst_rtp_mp4a_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpMP4ADepay, gst_rtp_mp4a_depay,
+ GST_TYPE_RTP_BASE_DEPAYLOAD);
static void gst_rtp_mp4a_depay_finalize (GObject * object);
-static gboolean gst_rtp_mp4a_depay_setcaps (GstBaseRTPDepayload * depayload,
+static gboolean gst_rtp_mp4a_depay_setcaps (GstRTPBaseDepayload * depayload,
GstCaps * caps);
-static GstBuffer *gst_rtp_mp4a_depay_process (GstBaseRTPDepayload * depayload,
+static GstBuffer *gst_rtp_mp4a_depay_process (GstRTPBaseDepayload * depayload,
GstBuffer * buf);
static GstStateChangeReturn gst_rtp_mp4a_depay_change_state (GstElement *
static void
-gst_rtp_mp4a_depay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_mp4a_depay_src_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_mp4a_depay_sink_template);
-
- gst_element_class_set_details_simple (element_class,
- "RTP MPEG4 audio depayloader", "Codec/Depayloader/Network/RTP",
- "Extracts MPEG4 audio from RTP packets (RFC 3016)",
- "Nokia Corporation (contact <stefan.kost@nokia.com>), "
- "Wim Taymans <wim.taymans@gmail.com>");
-}
-
-static void
gst_rtp_mp4a_depay_class_init (GstRtpMP4ADepayClass * klass)
{
GObjectClass *gobject_class;
GstElementClass *gstelement_class;
- GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
- gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
+ gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
gobject_class->finalize = gst_rtp_mp4a_depay_finalize;
gstelement_class->change_state = gst_rtp_mp4a_depay_change_state;
- gstbasertpdepayload_class->process = gst_rtp_mp4a_depay_process;
- gstbasertpdepayload_class->set_caps = gst_rtp_mp4a_depay_setcaps;
+ gstrtpbasedepayload_class->process = gst_rtp_mp4a_depay_process;
+ gstrtpbasedepayload_class->set_caps = gst_rtp_mp4a_depay_setcaps;
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_mp4a_depay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_mp4a_depay_sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP MPEG4 audio depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts MPEG4 audio from RTP packets (RFC 3016)",
+ "Nokia Corporation (contact <stefan.kost@nokia.com>), "
+ "Wim Taymans <wim.taymans@gmail.com>");
GST_DEBUG_CATEGORY_INIT (rtpmp4adepay_debug, "rtpmp4adepay", 0,
"MPEG4 audio RTP Depayloader");
}
static void
-gst_rtp_mp4a_depay_init (GstRtpMP4ADepay * rtpmp4adepay,
- GstRtpMP4ADepayClass * klass)
+gst_rtp_mp4a_depay_init (GstRtpMP4ADepay * rtpmp4adepay)
{
rtpmp4adepay->adapter = gst_adapter_new ();
}
};
static gboolean
-gst_rtp_mp4a_depay_setcaps (GstBaseRTPDepayload * depayload, GstCaps * caps)
+gst_rtp_mp4a_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
{
GstStructure *structure;
GstRtpMP4ADepay *rtpmp4adepay;
g_value_init (&v, GST_TYPE_BUFFER);
if (gst_value_deserialize (&v, str)) {
GstBuffer *buffer;
+ GstMapInfo map;
guint8 *data;
- guint size;
+ gsize size;
gint i;
guint32 rate = 0;
guint8 obj_type = 0, sr_idx = 0, channels = 0;
gst_buffer_ref (buffer);
g_value_unset (&v);
- data = GST_BUFFER_DATA (buffer);
- size = GST_BUFFER_SIZE (buffer);
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ data = map.data;
+ size = map.size;
if (size < 2) {
- GST_WARNING_OBJECT (depayload, "config too short (%d < 2)", size);
+ GST_WARNING_OBJECT (depayload, "config too short (%d < 2)",
+ (gint) size);
goto bad_config;
}
for (i = 0; i < size; i++) {
data[i] = ((data[i + 1] & 1) << 7) | ((data[i + 2] & 0xfe) >> 1);
}
- /* ignore remaining bit, we're only interested in full bytes */
- GST_BUFFER_SIZE (buffer) = size;
gst_bit_reader_init (&br, data, size);
break;
}
+ /* ignore remaining bit, we're only interested in full bytes */
+ gst_buffer_resize (buffer, 0, size);
+ gst_buffer_unmap (buffer, &map);
+ data = NULL;
+
gst_caps_set_simple (srccaps,
"channels", G_TYPE_INT, (gint) channels,
"rate", G_TYPE_INT, (gint) rate,
"codec_data", GST_TYPE_BUFFER, buffer, NULL);
+ bad_config:
+ if (data)
+ gst_buffer_unmap (buffer, &map);
gst_buffer_unref (buffer);
} else {
g_warning ("cannot convert config to buffer");
}
}
-bad_config:
res = gst_pad_set_caps (depayload->srcpad, srccaps);
gst_caps_unref (srccaps);
}
static GstBuffer *
-gst_rtp_mp4a_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
+gst_rtp_mp4a_depay_process (GstRTPBaseDepayload * depayload, GstBuffer * buf)
{
GstRtpMP4ADepay *rtpmp4adepay;
GstBuffer *outbuf;
+ GstRTPBuffer rtp = { NULL };
+ GstMapInfo map;
rtpmp4adepay = GST_RTP_MP4A_DEPAY (depayload);
gst_adapter_clear (rtpmp4adepay->adapter);
}
- outbuf = gst_rtp_buffer_get_payload_buffer (buf);
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
+ outbuf = gst_rtp_buffer_get_payload_buffer (&rtp);
- gst_buffer_copy_metadata (outbuf, buf, GST_BUFFER_COPY_TIMESTAMPS);
+ outbuf = gst_buffer_make_writable (outbuf);
+ GST_BUFFER_TIMESTAMP (outbuf) = GST_BUFFER_TIMESTAMP (buf);
gst_adapter_push (rtpmp4adepay->adapter, outbuf);
/* RTP marker bit indicates the last packet of the AudioMuxElement => create
* and push a buffer */
- if (gst_rtp_buffer_get_marker (buf)) {
+ if (gst_rtp_buffer_get_marker (&rtp)) {
guint avail;
guint i;
guint8 *data;
GST_LOG_OBJECT (rtpmp4adepay, "have marker and %u available", avail);
outbuf = gst_adapter_take_buffer (rtpmp4adepay->adapter, avail);
- data = GST_BUFFER_DATA (outbuf);
+ gst_buffer_map (outbuf, &map, GST_MAP_READ);
+ data = map.data;
/* position in data we are at */
pos = 0;
/* take data out, skip the header */
pos += skip;
- tmp = gst_buffer_create_sub (outbuf, pos, data_len);
+ tmp = gst_buffer_copy_region (outbuf, GST_BUFFER_COPY_MEMORY, pos,
+ data_len);
/* skip data too */
skip += data_len;
avail -= skip;
GST_BUFFER_TIMESTAMP (tmp) = timestamp;
- gst_base_rtp_depayload_push (depayload, tmp);
+ gst_rtp_base_depayload_push (depayload, tmp);
/* shift ts for next buffers */
if (rtpmp4adepay->frame_len && timestamp != -1
"possible wrongly encoded packet."));
}
+ gst_buffer_unmap (outbuf, &map);
gst_buffer_unref (outbuf);
}
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
/* ERRORS */
{
GST_ELEMENT_WARNING (rtpmp4adepay, STREAM, DECODE,
("Packet did not validate"), ("wrong packet size"));
+ gst_buffer_unmap (outbuf, &map);
gst_buffer_unref (outbuf);
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
}
#include <gst/gst.h>
#include <gst/base/gstadapter.h>
-#include <gst/rtp/gstbasertpdepayload.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
G_BEGIN_DECLS
struct _GstRtpMP4ADepay
{
- GstBaseRTPDepayload depayload;
+ GstRTPBaseDepayload depayload;
GstAdapter *adapter;
guint8 numSubFrames;
guint frame_len;
struct _GstRtpMP4ADepayClass
{
- GstBaseRTPDepayloadClass parent_class;
+ GstRTPBaseDepayloadClass parent_class;
};
GType gst_rtp_mp4a_depay_get_type (void);
static void gst_rtp_mp4a_pay_finalize (GObject * object);
-static gboolean gst_rtp_mp4a_pay_setcaps (GstBaseRTPPayload * payload,
+static gboolean gst_rtp_mp4a_pay_setcaps (GstRTPBasePayload * payload,
GstCaps * caps);
-static GstFlowReturn gst_rtp_mp4a_pay_handle_buffer (GstBaseRTPPayload *
+static GstFlowReturn gst_rtp_mp4a_pay_handle_buffer (GstRTPBasePayload *
payload, GstBuffer * buffer);
-GST_BOILERPLATE (GstRtpMP4APay, gst_rtp_mp4a_pay, GstBaseRTPPayload,
- GST_TYPE_BASE_RTP_PAYLOAD)
+#define gst_rtp_mp4a_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpMP4APay, gst_rtp_mp4a_pay, GST_TYPE_RTP_BASE_PAYLOAD)
- static void gst_rtp_mp4a_pay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_mp4a_pay_src_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_mp4a_pay_sink_template);
-
- gst_element_class_set_details_simple (element_class,
- "RTP MPEG4 audio payloader", "Codec/Payloader/Network/RTP",
- "Payload MPEG4 audio as RTP packets (RFC 3016)",
- "Wim Taymans <wim.taymans@gmail.com>");
-}
-
-static void
-gst_rtp_mp4a_pay_class_init (GstRtpMP4APayClass * klass)
+ static void gst_rtp_mp4a_pay_class_init (GstRtpMP4APayClass * klass)
{
GObjectClass *gobject_class;
- GstBaseRTPPayloadClass *gstbasertppayload_class;
+ GstElementClass *gstelement_class;
+ GstRTPBasePayloadClass *gstrtpbasepayload_class;
gobject_class = (GObjectClass *) klass;
- gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasepayload_class = (GstRTPBasePayloadClass *) klass;
gobject_class->finalize = gst_rtp_mp4a_pay_finalize;
- gstbasertppayload_class->set_caps = gst_rtp_mp4a_pay_setcaps;
- gstbasertppayload_class->handle_buffer = gst_rtp_mp4a_pay_handle_buffer;
+ gstrtpbasepayload_class->set_caps = gst_rtp_mp4a_pay_setcaps;
+ gstrtpbasepayload_class->handle_buffer = gst_rtp_mp4a_pay_handle_buffer;
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_mp4a_pay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_mp4a_pay_sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP MPEG4 audio payloader", "Codec/Payloader/Network/RTP",
+ "Payload MPEG4 audio as RTP packets (RFC 3016)",
+ "Wim Taymans <wim.taymans@gmail.com>");
GST_DEBUG_CATEGORY_INIT (rtpmp4apay_debug, "rtpmp4apay", 0,
"MP4A-LATM RTP Payloader");
}
static void
-gst_rtp_mp4a_pay_init (GstRtpMP4APay * rtpmp4apay, GstRtpMP4APayClass * klass)
+gst_rtp_mp4a_pay_init (GstRtpMP4APay * rtpmp4apay)
{
rtpmp4apay->rate = 90000;
rtpmp4apay->profile = g_strdup ("1");
gst_rtp_mp4a_pay_parse_audio_config (GstRtpMP4APay * rtpmp4apay,
GstBuffer * buffer)
{
+ GstMapInfo map;
guint8 *data;
- guint size;
+ gsize size;
guint8 objectType;
guint8 samplingIdx;
guint8 channelCfg;
- data = GST_BUFFER_DATA (buffer);
- size = GST_BUFFER_SIZE (buffer);
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ data = map.data;
+ size = map.size;
if (size < 2)
goto too_short;
"objectType: %d, samplingIdx: %d (%d), channelCfg: %d", objectType,
samplingIdx, rtpmp4apay->rate, channelCfg);
+ gst_buffer_unmap (buffer, &map);
+
return TRUE;
/* ERROR */
too_short:
{
GST_ELEMENT_ERROR (rtpmp4apay, STREAM, FORMAT,
- (NULL), ("config string too short, expected 2 bytes, got %d", size));
+ (NULL),
+ ("config string too short, expected 2 bytes, got %" G_GSIZE_FORMAT,
+ size));
+ gst_buffer_unmap (buffer, &map);
return FALSE;
}
invalid_object:
{
GST_ELEMENT_ERROR (rtpmp4apay, STREAM, FORMAT,
(NULL), ("invalid object type 0"));
+ gst_buffer_unmap (buffer, &map);
return FALSE;
}
wrong_freq:
{
GST_ELEMENT_ERROR (rtpmp4apay, STREAM, NOT_IMPLEMENTED,
(NULL), ("unsupported frequency index %d", samplingIdx));
+ gst_buffer_unmap (buffer, &map);
return FALSE;
}
wrong_channels:
{
GST_ELEMENT_ERROR (rtpmp4apay, STREAM, NOT_IMPLEMENTED,
(NULL), ("unsupported number of channels %d, must < 8", channelCfg));
+ gst_buffer_unmap (buffer, &map);
return FALSE;
}
}
gst_value_set_buffer (&v, rtpmp4apay->config);
config = gst_value_serialize (&v);
- res = gst_basertppayload_set_outcaps (GST_BASE_RTP_PAYLOAD (rtpmp4apay),
+ res = gst_rtp_base_payload_set_outcaps (GST_RTP_BASE_PAYLOAD (rtpmp4apay),
"cpresent", G_TYPE_STRING, "0", "config", G_TYPE_STRING, config, NULL);
g_value_unset (&v);
}
static gboolean
-gst_rtp_mp4a_pay_setcaps (GstBaseRTPPayload * payload, GstCaps * caps)
+gst_rtp_mp4a_pay_setcaps (GstRTPBasePayload * payload, GstCaps * caps)
{
GstRtpMP4APay *rtpmp4apay;
GstStructure *structure;
GST_LOG_OBJECT (rtpmp4apay, "got codec_data");
if (G_VALUE_TYPE (codec_data) == GST_TYPE_BUFFER) {
GstBuffer *buffer, *cbuffer;
- guint8 *config;
- guint8 *data;
- guint size, i;
+ GstMapInfo map;
+ GstMapInfo cmap;
+ guint i;
buffer = gst_value_get_buffer (codec_data);
GST_LOG_OBJECT (rtpmp4apay, "configuring codec_data");
if (!res)
goto config_failed;
- size = GST_BUFFER_SIZE (buffer);
- data = GST_BUFFER_DATA (buffer);
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
/* make the StreamMuxConfig, we need 15 bits for the header */
- config = g_malloc0 (size + 2);
+ cbuffer = gst_buffer_new_and_alloc (map.size + 2);
+ gst_buffer_map (cbuffer, &cmap, GST_MAP_WRITE);
/* Create StreamMuxConfig according to ISO/IEC 14496-3:
*
* numProgram == 0 (4 bits)
* numLayer == 0 (3 bits)
*/
- config[0] = 0x40;
- config[1] = 0x00;
+ cmap.data[0] = 0x40;
+ cmap.data[1] = 0x00;
/* append the config bits, shifting them 1 bit left */
- for (i = 0; i < size; i++) {
- config[i + 1] |= ((data[i] & 0x80) >> 7);
- config[i + 2] |= ((data[i] & 0x7f) << 1);
+ for (i = 0; i < map.size; i++) {
+ cmap.data[i + 1] |= ((map.data[i] & 0x80) >> 7);
+ cmap.data[i + 2] |= ((map.data[i] & 0x7f) << 1);
}
- cbuffer = gst_buffer_new ();
- GST_BUFFER_DATA (cbuffer) = config;
- GST_BUFFER_MALLOCDATA (cbuffer) = config;
- GST_BUFFER_SIZE (cbuffer) = size + 2;
+ gst_buffer_unmap (cbuffer, &cmap);
+ gst_buffer_unmap (buffer, &map);
/* now we can configure the buffer */
if (rtpmp4apay->config)
GST_WARNING_OBJECT (payload, "Need framed AAC data as input!");
}
- gst_basertppayload_set_options (payload, "audio", TRUE, "MP4A-LATM",
+ gst_rtp_base_payload_set_options (payload, "audio", TRUE, "MP4A-LATM",
rtpmp4apay->rate);
res = gst_rtp_mp4a_pay_new_caps (rtpmp4apay);
/* we expect buffers as exactly one complete AU
*/
static GstFlowReturn
-gst_rtp_mp4a_pay_handle_buffer (GstBaseRTPPayload * basepayload,
+gst_rtp_mp4a_pay_handle_buffer (GstRTPBasePayload * basepayload,
GstBuffer * buffer)
{
GstRtpMP4APay *rtpmp4apay;
GstFlowReturn ret;
GstBuffer *outbuf;
- guint count, mtu, size;
+ guint count, mtu;
+ GstMapInfo map;
+ gsize size;
guint8 *data;
gboolean fragmented;
GstClockTime timestamp;
rtpmp4apay = GST_RTP_MP4A_PAY (basepayload);
- size = GST_BUFFER_SIZE (buffer);
- data = GST_BUFFER_DATA (buffer);
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ size = map.size;
+ data = map.data;
+
timestamp = GST_BUFFER_TIMESTAMP (buffer);
fragmented = FALSE;
- mtu = GST_BASE_RTP_PAYLOAD_MTU (rtpmp4apay);
+ mtu = GST_RTP_BASE_PAYLOAD_MTU (rtpmp4apay);
while (size > 0) {
guint towrite;
guint8 *payload;
guint payload_len;
guint packet_len;
+ GstRTPBuffer rtp = { NULL };
/* this will be the total lenght of the packet */
packet_len = gst_rtp_buffer_calc_packet_len (size, 0, 0);
payload_len = gst_rtp_buffer_calc_payload_len (towrite, 0, 0);
GST_DEBUG_OBJECT (rtpmp4apay,
- "avail %d, towrite %d, packet_len %d, payload_len %d", size, towrite,
- packet_len, payload_len);
+ "avail %" G_GSIZE_FORMAT ", towrite %d, packet_len %d, payload_len %d",
+ size, towrite, packet_len, payload_len);
/* create buffer to hold the payload. */
outbuf = gst_rtp_buffer_new_allocate (payload_len, 0, 0);
/* copy payload */
- payload = gst_rtp_buffer_get_payload (outbuf);
+ gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
+ payload = gst_rtp_buffer_get_payload (&rtp);
if (!fragmented) {
/* first packet write the header */
size -= payload_len;
/* marker only if the packet is complete */
- gst_rtp_buffer_set_marker (outbuf, size == 0);
+ gst_rtp_buffer_set_marker (&rtp, size == 0);
+
+ gst_rtp_buffer_unmap (&rtp);
/* copy incomming timestamp (if any) to outgoing buffers */
GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
- ret = gst_basertppayload_push (GST_BASE_RTP_PAYLOAD (rtpmp4apay), outbuf);
+ ret = gst_rtp_base_payload_push (GST_RTP_BASE_PAYLOAD (rtpmp4apay), outbuf);
fragmented = TRUE;
}
+ gst_buffer_unmap (buffer, &map);
gst_buffer_unref (buffer);
return ret;
#define __GST_RTP_MP4A_PAY_H__
#include <gst/gst.h>
-#include <gst/rtp/gstbasertppayload.h>
+#include <gst/rtp/gstrtpbasepayload.h>
#include <gst/base/gstadapter.h>
G_BEGIN_DECLS
struct _GstRtpMP4APay
{
- GstBaseRTPPayload payload;
+ GstRTPBasePayload payload;
gint rate;
gchar *params;
struct _GstRtpMP4APayClass
{
- GstBaseRTPPayloadClass parent_class;
+ GstRTPBasePayloadClass parent_class;
};
GType gst_rtp_mp4a_pay_get_type (void);
}
-GST_BOILERPLATE (GstRtpMP4GDepay, gst_rtp_mp4g_depay, GstBaseRTPDepayload,
- GST_TYPE_BASE_RTP_DEPAYLOAD);
+#define gst_rtp_mp4g_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpMP4GDepay, gst_rtp_mp4g_depay,
+ GST_TYPE_RTP_BASE_DEPAYLOAD);
static void gst_rtp_mp4g_depay_finalize (GObject * object);
-static gboolean gst_rtp_mp4g_depay_setcaps (GstBaseRTPDepayload * depayload,
+static gboolean gst_rtp_mp4g_depay_setcaps (GstRTPBaseDepayload * depayload,
GstCaps * caps);
-static GstBuffer *gst_rtp_mp4g_depay_process (GstBaseRTPDepayload * depayload,
+static GstBuffer *gst_rtp_mp4g_depay_process (GstRTPBaseDepayload * depayload,
GstBuffer * buf);
-static gboolean gst_rtp_mp4g_depay_handle_event (GstBaseRTPDepayload * filter,
+static gboolean gst_rtp_mp4g_depay_handle_event (GstRTPBaseDepayload * filter,
GstEvent * event);
static GstStateChangeReturn gst_rtp_mp4g_depay_change_state (GstElement *
static void
-gst_rtp_mp4g_depay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_mp4g_depay_src_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_mp4g_depay_sink_template);
-
- gst_element_class_set_details_simple (element_class,
- "RTP MPEG4 ES depayloader", "Codec/Depayloader/Network/RTP",
- "Extracts MPEG4 elementary streams from RTP packets (RFC 3640)",
- "Wim Taymans <wim.taymans@gmail.com>");
-}
-
-static void
gst_rtp_mp4g_depay_class_init (GstRtpMP4GDepayClass * klass)
{
GObjectClass *gobject_class;
GstElementClass *gstelement_class;
- GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
- gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
+ gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
gobject_class->finalize = gst_rtp_mp4g_depay_finalize;
gstelement_class->change_state = gst_rtp_mp4g_depay_change_state;
- gstbasertpdepayload_class->process = gst_rtp_mp4g_depay_process;
- gstbasertpdepayload_class->set_caps = gst_rtp_mp4g_depay_setcaps;
- gstbasertpdepayload_class->handle_event = gst_rtp_mp4g_depay_handle_event;
+ gstrtpbasedepayload_class->process = gst_rtp_mp4g_depay_process;
+ gstrtpbasedepayload_class->set_caps = gst_rtp_mp4g_depay_setcaps;
+ gstrtpbasedepayload_class->handle_event = gst_rtp_mp4g_depay_handle_event;
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_mp4g_depay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_mp4g_depay_sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP MPEG4 ES depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts MPEG4 elementary streams from RTP packets (RFC 3640)",
+ "Wim Taymans <wim.taymans@gmail.com>");
GST_DEBUG_CATEGORY_INIT (rtpmp4gdepay_debug, "rtpmp4gdepay", 0,
"MP4-generic RTP Depayloader");
}
static void
-gst_rtp_mp4g_depay_init (GstRtpMP4GDepay * rtpmp4gdepay,
- GstRtpMP4GDepayClass * klass)
+gst_rtp_mp4g_depay_init (GstRtpMP4GDepay * rtpmp4gdepay)
{
rtpmp4gdepay->adapter = gst_adapter_new ();
rtpmp4gdepay->packets = g_queue_new ();
}
static gboolean
-gst_rtp_mp4g_depay_setcaps (GstBaseRTPDepayload * depayload, GstCaps * caps)
+gst_rtp_mp4g_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
{
GstStructure *structure;
GstRtpMP4GDepay *rtpmp4gdepay;
}
GST_DEBUG_OBJECT (rtpmp4gdepay, "pushing AU_index %u", AU_index);
- gst_base_rtp_depayload_push (GST_BASE_RTP_DEPAYLOAD (rtpmp4gdepay), outbuf);
+ gst_rtp_base_depayload_push (GST_RTP_BASE_DEPAYLOAD (rtpmp4gdepay), outbuf);
rtpmp4gdepay->next_AU_index = AU_index + 1;
}
}
/* we received the expected packet, push it and flush as much as we can from
* the queue */
- gst_base_rtp_depayload_push (GST_BASE_RTP_DEPAYLOAD (rtpmp4gdepay), outbuf);
+ gst_rtp_base_depayload_push (GST_RTP_BASE_DEPAYLOAD (rtpmp4gdepay), outbuf);
rtpmp4gdepay->next_AU_index++;
while ((outbuf = g_queue_peek_head (rtpmp4gdepay->packets))) {
GST_DEBUG_OBJECT (rtpmp4gdepay, "pushing expected AU_index %u",
AU_index);
outbuf = g_queue_pop_head (rtpmp4gdepay->packets);
- gst_base_rtp_depayload_push (GST_BASE_RTP_DEPAYLOAD (rtpmp4gdepay),
+ gst_rtp_base_depayload_push (GST_RTP_BASE_DEPAYLOAD (rtpmp4gdepay),
outbuf);
rtpmp4gdepay->next_AU_index++;
} else {
}
static GstBuffer *
-gst_rtp_mp4g_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
+gst_rtp_mp4g_depay_process (GstRTPBaseDepayload * depayload, GstBuffer * buf)
{
GstRtpMP4GDepay *rtpmp4gdepay;
- GstBuffer *outbuf;
+ GstBuffer *outbuf = NULL;
GstClockTime timestamp;
+ GstRTPBuffer rtp = { NULL };
rtpmp4gdepay = GST_RTP_MP4G_DEPAY (depayload);
guint AU_size, AU_index, AU_index_delta, payload_AU_size;
gboolean M;
- payload_len = gst_rtp_buffer_get_payload_len (buf);
- payload = gst_rtp_buffer_get_payload (buf);
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
+ payload_len = gst_rtp_buffer_get_payload_len (&rtp);
+ payload = gst_rtp_buffer_get_payload (&rtp);
GST_DEBUG_OBJECT (rtpmp4gdepay, "received payload of %d", payload_len);
- rtptime = gst_rtp_buffer_get_timestamp (buf);
- M = gst_rtp_buffer_get_marker (buf);
+ rtptime = gst_rtp_buffer_get_timestamp (&rtp);
+ M = gst_rtp_buffer_get_marker (&rtp);
+
+ gst_rtp_buffer_unmap (&rtp);
if (rtpmp4gdepay->sizelength > 0) {
gint num_AU_headers, AU_headers_bytes, i;
/* collect stuff in the adapter, strip header from payload and push in
* the adapter */
outbuf =
- gst_rtp_buffer_get_payload_subbuffer (buf, payload_AU, AU_size);
+ gst_rtp_buffer_get_payload_subbuffer (&rtp, payload_AU, AU_size);
gst_adapter_push (rtpmp4gdepay->adapter, outbuf);
if (M) {
avail = gst_adapter_available (rtpmp4gdepay->adapter);
outbuf = gst_adapter_take_buffer (rtpmp4gdepay->adapter, avail);
- gst_buffer_set_caps (outbuf, GST_PAD_CAPS (depayload->srcpad));
/* copy some of the fields we calculated above on the buffer. We also
* copy the AU_index so that we can sort the packets in our queue. */
* RTP packet. */
timestamp = -1;
- GST_DEBUG_OBJECT (depayload, "pushing buffer of size %d",
- GST_BUFFER_SIZE (outbuf));
+ GST_DEBUG_OBJECT (depayload,
+ "pushing buffer of size %" G_GSIZE_FORMAT,
+ gst_buffer_get_size (outbuf));
gst_rtp_mp4g_depay_queue (rtpmp4gdepay, outbuf);
}
} else {
/* push complete buffer in adapter */
- outbuf = gst_rtp_buffer_get_payload_subbuffer (buf, 0, payload_len);
+ outbuf = gst_rtp_buffer_get_payload_subbuffer (&rtp, 0, payload_len);
gst_adapter_push (rtpmp4gdepay->adapter, outbuf);
/* if this was the last packet of the VOP, create and push a buffer */
outbuf = gst_adapter_take_buffer (rtpmp4gdepay->adapter, avail);
- GST_DEBUG ("gst_rtp_mp4g_depay_chain: pushing buffer of size %d",
- GST_BUFFER_SIZE (outbuf));
+ GST_DEBUG ("gst_rtp_mp4g_depay_chain: pushing buffer of size %"
+ G_GSIZE_FORMAT, gst_buffer_get_size (outbuf));
+ gst_rtp_buffer_unmap (&rtp);
return outbuf;
}
}
}
+
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
/* ERRORS */
{
GST_ELEMENT_WARNING (rtpmp4gdepay, STREAM, DECODE,
("Packet payload was too short."), (NULL));
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
}
static gboolean
-gst_rtp_mp4g_depay_handle_event (GstBaseRTPDepayload * filter, GstEvent * event)
+gst_rtp_mp4g_depay_handle_event (GstRTPBaseDepayload * filter, GstEvent * event)
{
gboolean ret;
GstRtpMP4GDepay *rtpmp4gdepay;
}
ret =
- GST_BASE_RTP_DEPAYLOAD_CLASS (parent_class)->handle_event (filter, event);
+ GST_RTP_BASE_DEPAYLOAD_CLASS (parent_class)->handle_event (filter, event);
return ret;
}
#include <gst/gst.h>
#include <gst/base/gstadapter.h>
-#include <gst/rtp/gstbasertpdepayload.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
G_BEGIN_DECLS
struct _GstRtpMP4GDepay
{
- GstBaseRTPDepayload depayload;
+ GstRTPBaseDepayload depayload;
gint profile_level_id;
gint streamtype;
struct _GstRtpMP4GDepayClass
{
- GstBaseRTPDepayloadClass parent_class;
+ GstRTPBaseDepayloadClass parent_class;
};
GType gst_rtp_mp4g_depay_get_type (void);
static GstStateChangeReturn gst_rtp_mp4g_pay_change_state (GstElement * element,
GstStateChange transition);
-static gboolean gst_rtp_mp4g_pay_setcaps (GstBaseRTPPayload * payload,
+static gboolean gst_rtp_mp4g_pay_setcaps (GstRTPBasePayload * payload,
GstCaps * caps);
-static GstFlowReturn gst_rtp_mp4g_pay_handle_buffer (GstBaseRTPPayload *
+static GstFlowReturn gst_rtp_mp4g_pay_handle_buffer (GstRTPBasePayload *
payload, GstBuffer * buffer);
-static gboolean gst_rtp_mp4g_pay_handle_event (GstPad * pad, GstEvent * event);
+static gboolean gst_rtp_mp4g_pay_sink_event (GstRTPBasePayload * payload,
+ GstEvent * event);
-GST_BOILERPLATE (GstRtpMP4GPay, gst_rtp_mp4g_pay, GstBaseRTPPayload,
- GST_TYPE_BASE_RTP_PAYLOAD)
+#define gst_rtp_mp4g_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpMP4GPay, gst_rtp_mp4g_pay, GST_TYPE_RTP_BASE_PAYLOAD)
- static void gst_rtp_mp4g_pay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_mp4g_pay_src_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_mp4g_pay_sink_template);
-
- gst_element_class_set_details_simple (element_class, "RTP MPEG4 ES payloader",
- "Codec/Payloader/Network/RTP",
- "Payload MPEG4 elementary streams as RTP packets (RFC 3640)",
- "Wim Taymans <wim.taymans@gmail.com>");
-}
-
-static void
-gst_rtp_mp4g_pay_class_init (GstRtpMP4GPayClass * klass)
+ static void gst_rtp_mp4g_pay_class_init (GstRtpMP4GPayClass * klass)
{
GObjectClass *gobject_class;
GstElementClass *gstelement_class;
- GstBaseRTPPayloadClass *gstbasertppayload_class;
+ GstRTPBasePayloadClass *gstrtpbasepayload_class;
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
- gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
+ gstrtpbasepayload_class = (GstRTPBasePayloadClass *) klass;
gobject_class->finalize = gst_rtp_mp4g_pay_finalize;
gstelement_class->change_state = gst_rtp_mp4g_pay_change_state;
- gstbasertppayload_class->set_caps = gst_rtp_mp4g_pay_setcaps;
- gstbasertppayload_class->handle_buffer = gst_rtp_mp4g_pay_handle_buffer;
- gstbasertppayload_class->handle_event = gst_rtp_mp4g_pay_handle_event;
+ gstrtpbasepayload_class->set_caps = gst_rtp_mp4g_pay_setcaps;
+ gstrtpbasepayload_class->handle_buffer = gst_rtp_mp4g_pay_handle_buffer;
+ gstrtpbasepayload_class->sink_event = gst_rtp_mp4g_pay_sink_event;
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_mp4g_pay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_mp4g_pay_sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP MPEG4 ES payloader",
+ "Codec/Payloader/Network/RTP",
+ "Payload MPEG4 elementary streams as RTP packets (RFC 3640)",
+ "Wim Taymans <wim.taymans@gmail.com>");
GST_DEBUG_CATEGORY_INIT (rtpmp4gpay_debug, "rtpmp4gpay", 0,
"MP4-generic RTP Payloader");
}
static void
-gst_rtp_mp4g_pay_init (GstRtpMP4GPay * rtpmp4gpay, GstRtpMP4GPayClass * klass)
+gst_rtp_mp4g_pay_init (GstRtpMP4GPay * rtpmp4gpay)
{
rtpmp4gpay->adapter = gst_adapter_new ();
}
gst_rtp_mp4g_pay_parse_audio_config (GstRtpMP4GPay * rtpmp4gpay,
GstBuffer * buffer)
{
- guint8 *data;
- guint size;
+ GstMapInfo map;
guint8 objectType = 0;
guint8 samplingIdx = 0;
guint8 channelCfg = 0;
GstBitReader br;
- data = GST_BUFFER_DATA (buffer);
- size = GST_BUFFER_SIZE (buffer);
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
- gst_bit_reader_init (&br, data, size);
+ gst_bit_reader_init (&br, map.data, map.size);
/* any object type is fine, we need to copy it to the profile-level-id field. */
if (!gst_bit_reader_get_bits_uint8 (&br, &objectType, 5))
objectType, samplingIdx, rtpmp4gpay->rate, channelCfg,
rtpmp4gpay->frame_len);
+ gst_buffer_unmap (buffer, &map);
return TRUE;
/* ERROR */
{
GST_ELEMENT_ERROR (rtpmp4gpay, STREAM, FORMAT,
(NULL), ("config string too short"));
+ gst_buffer_unmap (buffer, &map);
return FALSE;
}
invalid_object:
{
GST_ELEMENT_ERROR (rtpmp4gpay, STREAM, FORMAT,
(NULL), ("invalid object type"));
+ gst_buffer_unmap (buffer, &map);
return FALSE;
}
wrong_freq:
{
GST_ELEMENT_ERROR (rtpmp4gpay, STREAM, NOT_IMPLEMENTED,
(NULL), ("unsupported frequency index %d", samplingIdx));
+ gst_buffer_unmap (buffer, &map);
return FALSE;
}
wrong_channels:
{
GST_ELEMENT_ERROR (rtpmp4gpay, STREAM, NOT_IMPLEMENTED,
(NULL), ("unsupported number of channels %d, must < 8", channelCfg));
+ gst_buffer_unmap (buffer, &map);
return FALSE;
}
}
gst_rtp_mp4g_pay_parse_video_config (GstRtpMP4GPay * rtpmp4gpay,
GstBuffer * buffer)
{
- guint8 *data;
- guint size;
+ GstMapInfo map;
guint32 code;
- data = GST_BUFFER_DATA (buffer);
- size = GST_BUFFER_SIZE (buffer);
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
- if (size < 5)
+ if (map.size < 5)
goto too_short;
- code = GST_READ_UINT32_BE (data);
+ code = GST_READ_UINT32_BE (map.data);
g_free (rtpmp4gpay->profile);
if (code == VOS_STARTCODE) {
/* get profile */
- rtpmp4gpay->profile = g_strdup_printf ("%d", (gint) data[4]);
+ rtpmp4gpay->profile = g_strdup_printf ("%d", (gint) map.data[4]);
} else {
GST_ELEMENT_WARNING (rtpmp4gpay, STREAM, FORMAT,
(NULL), ("profile not found in config string, assuming \'1\'"));
GST_LOG_OBJECT (rtpmp4gpay, "profile %s", rtpmp4gpay->profile);
+ gst_buffer_unmap (buffer, &map);
+
return TRUE;
/* ERROR */
{
GST_ELEMENT_ERROR (rtpmp4gpay, STREAM, FORMAT,
(NULL), ("config string too short"));
+ gst_buffer_unmap (buffer, &map);
return FALSE;
}
}
/* hmm, silly */
if (rtpmp4gpay->params) {
- res = gst_basertppayload_set_outcaps (GST_BASE_RTP_PAYLOAD (rtpmp4gpay),
+ res = gst_rtp_base_payload_set_outcaps (GST_RTP_BASE_PAYLOAD (rtpmp4gpay),
"encoding-params", G_TYPE_STRING, rtpmp4gpay->params, MP4GCAPS);
} else {
- res = gst_basertppayload_set_outcaps (GST_BASE_RTP_PAYLOAD (rtpmp4gpay),
+ res = gst_rtp_base_payload_set_outcaps (GST_RTP_BASE_PAYLOAD (rtpmp4gpay),
MP4GCAPS);
}
}
static gboolean
-gst_rtp_mp4g_pay_setcaps (GstBaseRTPPayload * payload, GstCaps * caps)
+gst_rtp_mp4g_pay_setcaps (GstRTPBasePayload * payload, GstCaps * caps)
{
GstRtpMP4GPay *rtpmp4gpay;
GstStructure *structure;
if (media_type == NULL)
goto config_failed;
- gst_basertppayload_set_options (payload, media_type, TRUE, "MPEG4-GENERIC",
+ gst_rtp_base_payload_set_options (payload, media_type, TRUE, "MPEG4-GENERIC",
rtpmp4gpay->rate);
res = gst_rtp_mp4g_pay_new_caps (rtpmp4gpay);
total = avail = gst_adapter_available (rtpmp4gpay->adapter);
ret = GST_FLOW_OK;
- mtu = GST_BASE_RTP_PAYLOAD_MTU (rtpmp4gpay);
+ mtu = GST_RTP_BASE_PAYLOAD_MTU (rtpmp4gpay);
while (avail > 0) {
guint towrite;
guint8 *payload;
guint payload_len;
guint packet_len;
+ GstRTPBuffer rtp = { NULL };
/* this will be the total lenght of the packet */
packet_len = gst_rtp_buffer_calc_packet_len (avail, 0, 0);
/* create buffer to hold the payload, also make room for the 4 header bytes. */
outbuf = gst_rtp_buffer_new_allocate (payload_len + 4, 0, 0);
+ gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
+
/* copy payload */
- payload = gst_rtp_buffer_get_payload (outbuf);
+ payload = gst_rtp_buffer_get_payload (&rtp);
/* +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+- .. -+-+-+-+-+-+-+-+-+-+
* |AU-headers-length|AU-header|AU-header| |AU-header|padding|
gst_adapter_flush (rtpmp4gpay->adapter, payload_len);
/* marker only if the packet is complete */
- gst_rtp_buffer_set_marker (outbuf, avail <= payload_len);
+ gst_rtp_buffer_set_marker (&rtp, avail <= payload_len);
+
+ gst_rtp_buffer_unmap (&rtp);
GST_BUFFER_TIMESTAMP (outbuf) = rtpmp4gpay->first_timestamp;
GST_BUFFER_DURATION (outbuf) = rtpmp4gpay->first_duration;
rtpmp4gpay->offset += rtpmp4gpay->frame_len;
}
- ret = gst_basertppayload_push (GST_BASE_RTP_PAYLOAD (rtpmp4gpay), outbuf);
+ ret = gst_rtp_base_payload_push (GST_RTP_BASE_PAYLOAD (rtpmp4gpay), outbuf);
avail -= payload_len;
}
/* we expect buffers as exactly one complete AU
*/
static GstFlowReturn
-gst_rtp_mp4g_pay_handle_buffer (GstBaseRTPPayload * basepayload,
+gst_rtp_mp4g_pay_handle_buffer (GstRTPBasePayload * basepayload,
GstBuffer * buffer)
{
GstRtpMP4GPay *rtpmp4gpay;
}
static gboolean
-gst_rtp_mp4g_pay_handle_event (GstPad * pad, GstEvent * event)
+gst_rtp_mp4g_pay_sink_event (GstRTPBasePayload * payload, GstEvent * event)
{
GstRtpMP4GPay *rtpmp4gpay;
- rtpmp4gpay = GST_RTP_MP4G_PAY (gst_pad_get_parent (pad));
+ rtpmp4gpay = GST_RTP_MP4G_PAY (payload);
GST_DEBUG ("Got event: %s", GST_EVENT_TYPE_NAME (event));
switch (GST_EVENT_TYPE (event)) {
- case GST_EVENT_NEWSEGMENT:
+ case GST_EVENT_SEGMENT:
case GST_EVENT_EOS:
/* This flush call makes sure that the last buffer is always pushed
* to the base payloader */
break;
}
- g_object_unref (rtpmp4gpay);
-
/* let parent handle event too */
- return FALSE;
+ return GST_RTP_BASE_PAYLOAD_CLASS (parent_class)->sink_event (payload, event);
}
static GstStateChangeReturn
#define __GST_RTP_MP4G_PAY_H__
#include <gst/gst.h>
-#include <gst/rtp/gstbasertppayload.h>
+#include <gst/rtp/gstrtpbasepayload.h>
#include <gst/base/gstadapter.h>
G_BEGIN_DECLS
struct _GstRtpMP4GPay
{
- GstBaseRTPPayload payload;
+ GstRTPBasePayload payload;
GstAdapter *adapter;
GstClockTime first_timestamp;
struct _GstRtpMP4GPayClass
{
- GstBaseRTPPayloadClass parent_class;
+ GstRTPBasePayloadClass parent_class;
};
GType gst_rtp_mp4g_pay_get_type (void);
)
);
-GST_BOILERPLATE (GstRtpMP4VDepay, gst_rtp_mp4v_depay, GstBaseRTPDepayload,
- GST_TYPE_BASE_RTP_DEPAYLOAD);
+#define gst_rtp_mp4v_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpMP4VDepay, gst_rtp_mp4v_depay,
+ GST_TYPE_RTP_BASE_DEPAYLOAD);
static void gst_rtp_mp4v_depay_finalize (GObject * object);
-static gboolean gst_rtp_mp4v_depay_setcaps (GstBaseRTPDepayload * depayload,
+static gboolean gst_rtp_mp4v_depay_setcaps (GstRTPBaseDepayload * depayload,
GstCaps * caps);
-static GstBuffer *gst_rtp_mp4v_depay_process (GstBaseRTPDepayload * depayload,
+static GstBuffer *gst_rtp_mp4v_depay_process (GstRTPBaseDepayload * depayload,
GstBuffer * buf);
static GstStateChangeReturn gst_rtp_mp4v_depay_change_state (GstElement *
element, GstStateChange transition);
-
-static void
-gst_rtp_mp4v_depay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_mp4v_depay_src_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_mp4v_depay_sink_template);
-
- gst_element_class_set_details_simple (element_class,
- "RTP MPEG4 video depayloader", "Codec/Depayloader/Network/RTP",
- "Extracts MPEG4 video from RTP packets (RFC 3016)",
- "Wim Taymans <wim.taymans@gmail.com>");
-}
-
static void
gst_rtp_mp4v_depay_class_init (GstRtpMP4VDepayClass * klass)
{
GObjectClass *gobject_class;
GstElementClass *gstelement_class;
- GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
- gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
+ gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
gobject_class->finalize = gst_rtp_mp4v_depay_finalize;
gstelement_class->change_state = gst_rtp_mp4v_depay_change_state;
- gstbasertpdepayload_class->process = gst_rtp_mp4v_depay_process;
- gstbasertpdepayload_class->set_caps = gst_rtp_mp4v_depay_setcaps;
+ gstrtpbasedepayload_class->process = gst_rtp_mp4v_depay_process;
+ gstrtpbasedepayload_class->set_caps = gst_rtp_mp4v_depay_setcaps;
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_mp4v_depay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_mp4v_depay_sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP MPEG4 video depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts MPEG4 video from RTP packets (RFC 3016)",
+ "Wim Taymans <wim.taymans@gmail.com>");
GST_DEBUG_CATEGORY_INIT (rtpmp4vdepay_debug, "rtpmp4vdepay", 0,
"MPEG4 video RTP Depayloader");
}
static void
-gst_rtp_mp4v_depay_init (GstRtpMP4VDepay * rtpmp4vdepay,
- GstRtpMP4VDepayClass * klass)
+gst_rtp_mp4v_depay_init (GstRtpMP4VDepay * rtpmp4vdepay)
{
rtpmp4vdepay->adapter = gst_adapter_new ();
}
}
static gboolean
-gst_rtp_mp4v_depay_setcaps (GstBaseRTPDepayload * depayload, GstCaps * caps)
+gst_rtp_mp4v_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
{
GstStructure *structure;
GstCaps *srccaps;
}
static GstBuffer *
-gst_rtp_mp4v_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
+gst_rtp_mp4v_depay_process (GstRTPBaseDepayload * depayload, GstBuffer * buf)
{
GstRtpMP4VDepay *rtpmp4vdepay;
- GstBuffer *outbuf;
+ GstBuffer *outbuf = NULL;
+ GstRTPBuffer rtp = { NULL };
rtpmp4vdepay = GST_RTP_MP4V_DEPAY (depayload);
if (GST_BUFFER_IS_DISCONT (buf))
gst_adapter_clear (rtpmp4vdepay->adapter);
- outbuf = gst_rtp_buffer_get_payload_buffer (buf);
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
+ outbuf = gst_rtp_buffer_get_payload_buffer (&rtp);
gst_adapter_push (rtpmp4vdepay->adapter, outbuf);
/* if this was the last packet of the VOP, create and push a buffer */
- if (gst_rtp_buffer_get_marker (buf)) {
+ if (gst_rtp_buffer_get_marker (&rtp)) {
guint avail;
avail = gst_adapter_available (rtpmp4vdepay->adapter);
outbuf = gst_adapter_take_buffer (rtpmp4vdepay->adapter, avail);
- GST_DEBUG ("gst_rtp_mp4v_depay_chain: pushing buffer of size %d",
- GST_BUFFER_SIZE (outbuf));
-
- return outbuf;
+ GST_DEBUG ("gst_rtp_mp4v_depay_chain: pushing buffer of size %"
+ G_GSIZE_FORMAT, gst_buffer_get_size (outbuf));
}
- return NULL;
+
+ gst_rtp_buffer_unmap (&rtp);
+
+ return outbuf;
}
static GstStateChangeReturn
#include <gst/gst.h>
#include <gst/base/gstadapter.h>
-#include <gst/rtp/gstbasertpdepayload.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
G_BEGIN_DECLS
struct _GstRtpMP4VDepay
{
- GstBaseRTPDepayload depayload;
+ GstRTPBaseDepayload depayload;
GstAdapter *adapter;
};
struct _GstRtpMP4VDepayClass
{
- GstBaseRTPDepayloadClass parent_class;
+ GstRTPBaseDepayloadClass parent_class;
};
GType gst_rtp_mp4v_depay_get_type (void);
static void gst_rtp_mp4v_pay_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
-static gboolean gst_rtp_mp4v_pay_setcaps (GstBaseRTPPayload * payload,
+static gboolean gst_rtp_mp4v_pay_setcaps (GstRTPBasePayload * payload,
GstCaps * caps);
-static GstFlowReturn gst_rtp_mp4v_pay_handle_buffer (GstBaseRTPPayload *
+static GstFlowReturn gst_rtp_mp4v_pay_handle_buffer (GstRTPBasePayload *
payload, GstBuffer * buffer);
-static gboolean gst_rtp_mp4v_pay_handle_event (GstPad * pad, GstEvent * event);
+static gboolean gst_rtp_mp4v_pay_sink_event (GstRTPBasePayload * pay,
+ GstEvent * event);
-GST_BOILERPLATE (GstRtpMP4VPay, gst_rtp_mp4v_pay, GstBaseRTPPayload,
- GST_TYPE_BASE_RTP_PAYLOAD)
+#define gst_rtp_mp4v_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpMP4VPay, gst_rtp_mp4v_pay, GST_TYPE_RTP_BASE_PAYLOAD)
- static void gst_rtp_mp4v_pay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_mp4v_pay_src_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_mp4v_pay_sink_template);
-
- gst_element_class_set_details_simple (element_class,
- "RTP MPEG4 Video payloader", "Codec/Payloader/Network/RTP",
- "Payload MPEG-4 video as RTP packets (RFC 3016)",
- "Wim Taymans <wim.taymans@gmail.com>");
-}
-
-static void
-gst_rtp_mp4v_pay_class_init (GstRtpMP4VPayClass * klass)
+ static void gst_rtp_mp4v_pay_class_init (GstRtpMP4VPayClass * klass)
{
GObjectClass *gobject_class;
- GstBaseRTPPayloadClass *gstbasertppayload_class;
+ GstElementClass *gstelement_class;
+ GstRTPBasePayloadClass *gstrtpbasepayload_class;
gobject_class = (GObjectClass *) klass;
- gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasepayload_class = (GstRTPBasePayloadClass *) klass;
gobject_class->set_property = gst_rtp_mp4v_pay_set_property;
gobject_class->get_property = gst_rtp_mp4v_pay_get_property;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_mp4v_pay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_mp4v_pay_sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP MPEG4 Video payloader", "Codec/Payloader/Network/RTP",
+ "Payload MPEG-4 video as RTP packets (RFC 3016)",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_SEND_CONFIG,
g_param_spec_boolean ("send-config", "Send Config",
"Send the config parameters in RTP packets as well(deprecated "
gobject_class->finalize = gst_rtp_mp4v_pay_finalize;
- gstbasertppayload_class->set_caps = gst_rtp_mp4v_pay_setcaps;
- gstbasertppayload_class->handle_buffer = gst_rtp_mp4v_pay_handle_buffer;
- gstbasertppayload_class->handle_event = gst_rtp_mp4v_pay_handle_event;
+ gstrtpbasepayload_class->set_caps = gst_rtp_mp4v_pay_setcaps;
+ gstrtpbasepayload_class->handle_buffer = gst_rtp_mp4v_pay_handle_buffer;
+ gstrtpbasepayload_class->sink_event = gst_rtp_mp4v_pay_sink_event;
GST_DEBUG_CATEGORY_INIT (rtpmp4vpay_debug, "rtpmp4vpay", 0,
"MP4 video RTP Payloader");
}
static void
-gst_rtp_mp4v_pay_init (GstRtpMP4VPay * rtpmp4vpay, GstRtpMP4VPayClass * klass)
+gst_rtp_mp4v_pay_init (GstRtpMP4VPay * rtpmp4vpay)
{
rtpmp4vpay->adapter = gst_adapter_new ();
rtpmp4vpay->rate = 90000;
gst_value_set_buffer (&v, rtpmp4vpay->config);
config = gst_value_serialize (&v);
- res = gst_basertppayload_set_outcaps (GST_BASE_RTP_PAYLOAD (rtpmp4vpay),
+ res = gst_rtp_base_payload_set_outcaps (GST_RTP_BASE_PAYLOAD (rtpmp4vpay),
"profile-level-id", G_TYPE_STRING, profile,
"config", G_TYPE_STRING, config, NULL);
}
static gboolean
-gst_rtp_mp4v_pay_setcaps (GstBaseRTPPayload * payload, GstCaps * caps)
+gst_rtp_mp4v_pay_setcaps (GstRTPBasePayload * payload, GstCaps * caps)
{
GstRtpMP4VPay *rtpmp4vpay;
GstStructure *structure;
rtpmp4vpay = GST_RTP_MP4V_PAY (payload);
- gst_basertppayload_set_options (payload, "video", TRUE, "MP4V-ES",
+ gst_rtp_base_payload_set_options (payload, "video", TRUE, "MP4V-ES",
rtpmp4vpay->rate);
res = TRUE;
GST_LOG_OBJECT (rtpmp4vpay, "got codec_data");
if (G_VALUE_TYPE (codec_data) == GST_TYPE_BUFFER) {
GstBuffer *buffer;
- guint8 *data;
- guint size;
buffer = gst_value_get_buffer (codec_data);
- data = GST_BUFFER_DATA (buffer);
- size = GST_BUFFER_SIZE (buffer);
-
- if (size < 5)
+ if (gst_buffer_get_size (buffer) < 5)
goto done;
- rtpmp4vpay->profile = data[4];
+ gst_buffer_extract (buffer, 4, &rtpmp4vpay->profile, 1);
GST_LOG_OBJECT (rtpmp4vpay, "configuring codec_data, profile %d",
- data[4]);
+ rtpmp4vpay->profile);
if (rtpmp4vpay->config)
gst_buffer_unref (rtpmp4vpay->config);
GstBuffer *outbuf_data = NULL;
GstFlowReturn ret;
GstBufferList *list = NULL;
- GstBufferListIterator *it = NULL;
/* the data available in the adapter is either smaller
* than the MTU or bigger. In the case it is smaller, the complete
* of buffers and the whole list will be pushed downstream
* at once */
list = gst_buffer_list_new ();
- it = gst_buffer_list_iterate (list);
}
while (avail > 0) {
guint towrite;
- guint8 *payload;
guint payload_len;
guint packet_len;
+ GstRTPBuffer rtp = { NULL };
/* this will be the total lenght of the packet */
packet_len = gst_rtp_buffer_calc_packet_len (avail, 0, 0);
/* fill one MTU or all available bytes */
- towrite = MIN (packet_len, GST_BASE_RTP_PAYLOAD_MTU (rtpmp4vpay));
+ towrite = MIN (packet_len, GST_RTP_BASE_PAYLOAD_MTU (rtpmp4vpay));
/* this is the payload length */
payload_len = gst_rtp_buffer_calc_payload_len (towrite, 0, 0);
- if (rtpmp4vpay->buffer_list) {
- /* create buffer without payload. The payload will be put
- * in next buffer instead. Both buffers will be then added
- * to the list */
- outbuf = gst_rtp_buffer_new_allocate (0, 0, 0);
-
- /* Take buffer with the payload from the adapter */
- outbuf_data = gst_adapter_take_buffer (rtpmp4vpay->adapter, payload_len);
- } else {
- /* create buffer to hold the payload */
- outbuf = gst_rtp_buffer_new_allocate (payload_len, 0, 0);
+ /* create buffer without payload. The payload will be put
+ * in next buffer instead. Both buffers will be merged */
+ outbuf = gst_rtp_buffer_new_allocate (0, 0, 0);
- /* copy payload */
- payload = gst_rtp_buffer_get_payload (outbuf);
-
- gst_adapter_copy (rtpmp4vpay->adapter, payload, 0, payload_len);
- gst_adapter_flush (rtpmp4vpay->adapter, payload_len);
- }
+ /* Take buffer with the payload from the adapter */
+ outbuf_data = gst_adapter_take_buffer (rtpmp4vpay->adapter, payload_len);
avail -= payload_len;
- gst_rtp_buffer_set_marker (outbuf, avail == 0);
+ gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
+ gst_rtp_buffer_set_marker (&rtp, avail == 0);
+ gst_rtp_buffer_unmap (&rtp);
+
+ outbuf = gst_buffer_join (outbuf, outbuf_data);
GST_BUFFER_TIMESTAMP (outbuf) = rtpmp4vpay->first_timestamp;
if (rtpmp4vpay->buffer_list) {
- /* create a new group to hold the rtp header and the payload */
- gst_buffer_list_iterator_add_group (it);
- gst_buffer_list_iterator_add (it, outbuf);
- gst_buffer_list_iterator_add (it, outbuf_data);
+ /* add to list */
+ gst_buffer_list_insert (list, -1, outbuf);
} else {
- ret = gst_basertppayload_push (GST_BASE_RTP_PAYLOAD (rtpmp4vpay), outbuf);
+ ret =
+ gst_rtp_base_payload_push (GST_RTP_BASE_PAYLOAD (rtpmp4vpay), outbuf);
}
}
if (rtpmp4vpay->buffer_list) {
- gst_buffer_list_iterator_free (it);
/* push the whole buffer list at once */
ret =
- gst_basertppayload_push_list (GST_BASE_RTP_PAYLOAD (rtpmp4vpay), list);
+ gst_rtp_base_payload_push_list (GST_RTP_BASE_PAYLOAD (rtpmp4vpay),
+ list);
}
return ret;
/* see if config changed */
equal = FALSE;
if (enc->config) {
- if (GST_BUFFER_SIZE (enc->config) == i) {
- equal = memcmp (GST_BUFFER_DATA (enc->config), data, i) == 0;
+ if (gst_buffer_get_size (enc->config) == i) {
+ equal = gst_buffer_memcmp (enc->config, 0, data, i) == 0;
}
}
/* if config string changed or new profile, make new caps */
if (enc->config)
gst_buffer_unref (enc->config);
enc->config = gst_buffer_new_and_alloc (i);
- memcpy (GST_BUFFER_DATA (enc->config), data, i);
+
+ gst_buffer_fill (enc->config, 0, data, i);
+
gst_rtp_mp4v_pay_new_caps (enc);
}
*strip = i;
/* we expect buffers starting on startcodes.
*/
static GstFlowReturn
-gst_rtp_mp4v_pay_handle_buffer (GstBaseRTPPayload * basepayload,
+gst_rtp_mp4v_pay_handle_buffer (GstRTPBasePayload * basepayload,
GstBuffer * buffer)
{
GstRtpMP4VPay *rtpmp4vpay;
GstFlowReturn ret;
- guint size, avail;
+ guint avail;
guint packet_len;
- guint8 *data;
+ GstMapInfo map;
+ gsize size;
gboolean flush;
gint strip;
GstClockTime timestamp, duration;
rtpmp4vpay = GST_RTP_MP4V_PAY (basepayload);
- size = GST_BUFFER_SIZE (buffer);
- data = GST_BUFFER_DATA (buffer);
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ size = map.size;
timestamp = GST_BUFFER_TIMESTAMP (buffer);
duration = GST_BUFFER_DURATION (buffer);
avail = gst_adapter_available (rtpmp4vpay->adapter);
/* depay incomming data and see if we need to start a new RTP
* packet */
- flush = gst_rtp_mp4v_pay_depay_data (rtpmp4vpay, data, size, &strip, &vopi);
+ flush =
+ gst_rtp_mp4v_pay_depay_data (rtpmp4vpay, map.data, size, &strip, &vopi);
+ gst_buffer_unmap (buffer, &map);
+
if (strip) {
/* strip off config if requested */
if (!(rtpmp4vpay->config_interval > 0)) {
GstBuffer *subbuf;
GST_LOG_OBJECT (rtpmp4vpay, "stripping config at %d, size %d", strip,
- size - strip);
+ (gint) size - strip);
/* strip off header */
- subbuf = gst_buffer_create_sub (buffer, strip, size - strip);
+ subbuf = gst_buffer_copy_region (buffer, GST_BUFFER_COPY_MEMORY, strip,
+ size - strip);
GST_BUFFER_TIMESTAMP (subbuf) = timestamp;
gst_buffer_unref (buffer);
buffer = subbuf;
- size = GST_BUFFER_SIZE (buffer);
+ size = gst_buffer_get_size (buffer);
} else {
GST_LOG_OBJECT (rtpmp4vpay, "found config in stream");
rtpmp4vpay->last_config = timestamp;
gst_buffer_unref (buffer);
buffer = superbuf;
- size = GST_BUFFER_SIZE (buffer);
+ size = gst_buffer_get_size (buffer);
if (timestamp != -1) {
rtpmp4vpay->last_config = timestamp;
/* get packet length of data and see if we exceeded MTU. */
packet_len = gst_rtp_buffer_calc_packet_len (avail + size, 0, 0);
- if (gst_basertppayload_is_filled (basepayload,
+ if (gst_rtp_base_payload_is_filled (basepayload,
packet_len, rtpmp4vpay->duration + duration)) {
ret = gst_rtp_mp4v_pay_flush (rtpmp4vpay);
rtpmp4vpay->first_timestamp = timestamp;
}
static gboolean
-gst_rtp_mp4v_pay_handle_event (GstPad * pad, GstEvent * event)
+gst_rtp_mp4v_pay_sink_event (GstRTPBasePayload * pay, GstEvent * event)
{
GstRtpMP4VPay *rtpmp4vpay;
- rtpmp4vpay = GST_RTP_MP4V_PAY (gst_pad_get_parent (pad));
+ rtpmp4vpay = GST_RTP_MP4V_PAY (pay);
GST_DEBUG ("Got event: %s", GST_EVENT_TYPE_NAME (event));
switch (GST_EVENT_TYPE (event)) {
- case GST_EVENT_NEWSEGMENT:
+ case GST_EVENT_SEGMENT:
case GST_EVENT_EOS:
/* This flush call makes sure that the last buffer is always pushed
* to the base payloader */
break;
}
- g_object_unref (rtpmp4vpay);
-
/* let parent handle event too */
- return FALSE;
+ return GST_RTP_BASE_PAYLOAD_CLASS (parent_class)->sink_event (pay, event);
}
static void
#define __GST_RTP_MP4V_PAY_H__
#include <gst/gst.h>
-#include <gst/rtp/gstbasertppayload.h>
+#include <gst/rtp/gstrtpbasepayload.h>
#include <gst/base/gstadapter.h>
G_BEGIN_DECLS
struct _GstRtpMP4VPay
{
- GstBaseRTPPayload payload;
+ GstRTPBasePayload payload;
GstAdapter *adapter;
GstClockTime first_timestamp;
struct _GstRtpMP4VPayClass
{
- GstBaseRTPPayloadClass parent_class;
+ GstRTPBasePayloadClass parent_class;
};
GType gst_rtp_mp4v_pay_get_type (void);
"clock-rate = (int) 90000")
);
-GST_BOILERPLATE (GstRtpMPADepay, gst_rtp_mpa_depay, GstBaseRTPDepayload,
- GST_TYPE_BASE_RTP_DEPAYLOAD);
+#define gst_rtp_mpa_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpMPADepay, gst_rtp_mpa_depay, GST_TYPE_RTP_BASE_DEPAYLOAD);
-static gboolean gst_rtp_mpa_depay_setcaps (GstBaseRTPDepayload * depayload,
+static gboolean gst_rtp_mpa_depay_setcaps (GstRTPBaseDepayload * depayload,
GstCaps * caps);
-static GstBuffer *gst_rtp_mpa_depay_process (GstBaseRTPDepayload * depayload,
+static GstBuffer *gst_rtp_mpa_depay_process (GstRTPBaseDepayload * depayload,
GstBuffer * buf);
static void
-gst_rtp_mpa_depay_base_init (gpointer klass)
+gst_rtp_mpa_depay_class_init (GstRtpMPADepayClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstElementClass *gstelement_class;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
+
+ GST_DEBUG_CATEGORY_INIT (rtpmpadepay_debug, "rtpmpadepay", 0,
+ "MPEG Audio RTP Depayloader");
+
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_mpa_depay_src_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_mpa_depay_sink_template);
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_mpa_depay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_mpa_depay_sink_template));
- gst_element_class_set_details_simple (element_class,
+ gst_element_class_set_details_simple (gstelement_class,
"RTP MPEG audio depayloader", "Codec/Depayloader/Network/RTP",
"Extracts MPEG audio from RTP packets (RFC 2038)",
"Wim Taymans <wim.taymans@gmail.com>");
-}
-
-static void
-gst_rtp_mpa_depay_class_init (GstRtpMPADepayClass * klass)
-{
- GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
-
- gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
-
- gstbasertpdepayload_class->set_caps = gst_rtp_mpa_depay_setcaps;
- gstbasertpdepayload_class->process = gst_rtp_mpa_depay_process;
- GST_DEBUG_CATEGORY_INIT (rtpmpadepay_debug, "rtpmpadepay", 0,
- "MPEG Audio RTP Depayloader");
+ gstrtpbasedepayload_class->set_caps = gst_rtp_mpa_depay_setcaps;
+ gstrtpbasedepayload_class->process = gst_rtp_mpa_depay_process;
}
static void
-gst_rtp_mpa_depay_init (GstRtpMPADepay * rtpmpadepay,
- GstRtpMPADepayClass * klass)
+gst_rtp_mpa_depay_init (GstRtpMPADepay * rtpmpadepay)
{
/* needed because of GST_BOILERPLATE */
}
static gboolean
-gst_rtp_mpa_depay_setcaps (GstBaseRTPDepayload * depayload, GstCaps * caps)
+gst_rtp_mpa_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
{
GstStructure *structure;
GstCaps *outcaps;
}
static GstBuffer *
-gst_rtp_mpa_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
+gst_rtp_mpa_depay_process (GstRTPBaseDepayload * depayload, GstBuffer * buf)
{
GstRtpMPADepay *rtpmpadepay;
GstBuffer *outbuf;
+ GstRTPBuffer rtp = { NULL };
+ gint payload_len;
+#if 0
+ guint8 *payload;
+ guint16 frag_offset;
+#endif
+ gboolean marker;
rtpmpadepay = GST_RTP_MPA_DEPAY (depayload);
- {
- gint payload_len;
- gboolean marker;
-
- payload_len = gst_rtp_buffer_get_payload_len (buf);
-
- if (payload_len <= 4)
- goto empty_packet;
-
- /* strip off header
- *
- * 0 1 2 3
- * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
- * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
- * | MBZ | Frag_offset |
- * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
- */
- /* frag_offset = (payload[2] << 8) | payload[3]; */
-
- /* subbuffer skipping the 4 header bytes */
- outbuf = gst_rtp_buffer_get_payload_subbuffer (buf, 4, -1);
- marker = gst_rtp_buffer_get_marker (buf);
-
- if (marker) {
- /* mark start of talkspurt with discont */
- GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
- }
- GST_DEBUG_OBJECT (rtpmpadepay,
- "gst_rtp_mpa_depay_chain: pushing buffer of size %d",
- GST_BUFFER_SIZE (outbuf));
-
- /* FIXME, we can push half mpeg frames when they are split over multiple
- * RTP packets */
- return outbuf;
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
+
+ payload_len = gst_rtp_buffer_get_payload_len (&rtp);
+
+ if (payload_len <= 4)
+ goto empty_packet;
+
+#if 0
+ payload = gst_rtp_buffer_get_payload (&rtp);
+ /* strip off header
+ *
+ * 0 1 2 3
+ * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | MBZ | Frag_offset |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ */
+ frag_offset = (payload[2] << 8) | payload[3];
+#endif
+
+ /* subbuffer skipping the 4 header bytes */
+ outbuf = gst_rtp_buffer_get_payload_subbuffer (&rtp, 4, -1);
+ marker = gst_rtp_buffer_get_marker (&rtp);
+
+ if (marker) {
+ /* mark start of talkspurt with discont */
+ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
}
+ GST_DEBUG_OBJECT (rtpmpadepay,
+ "gst_rtp_mpa_depay_chain: pushing buffer of size %" G_GSIZE_FORMAT "",
+ gst_buffer_get_size (outbuf));
+
+ gst_rtp_buffer_unmap (&rtp);
- return NULL;
+ /* FIXME, we can push half mpeg frames when they are split over multiple
+ * RTP packets */
+ return outbuf;
/* ERRORS */
empty_packet:
{
GST_ELEMENT_WARNING (rtpmpadepay, STREAM, DECODE,
("Empty Payload."), (NULL));
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
}
#define __GST_RTP_MPA_DEPAY_H__
#include <gst/gst.h>
-#include <gst/rtp/gstbasertpdepayload.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
G_BEGIN_DECLS
struct _GstRtpMPADepay
{
- GstBaseRTPDepayload depayload;
+ GstRTPBaseDepayload depayload;
};
struct _GstRtpMPADepayClass
{
- GstBaseRTPDepayloadClass parent_class;
+ GstRTPBaseDepayloadClass parent_class;
};
GType gst_rtp_mpa_depay_get_type (void);
static GstStateChangeReturn gst_rtp_mpa_pay_change_state (GstElement * element,
GstStateChange transition);
-static gboolean gst_rtp_mpa_pay_setcaps (GstBaseRTPPayload * payload,
+static gboolean gst_rtp_mpa_pay_setcaps (GstRTPBasePayload * payload,
GstCaps * caps);
-static gboolean gst_rtp_mpa_pay_handle_event (GstPad * pad, GstEvent * event);
+static gboolean gst_rtp_mpa_pay_sink_event (GstRTPBasePayload * payload,
+ GstEvent * event);
static GstFlowReturn gst_rtp_mpa_pay_flush (GstRtpMPAPay * rtpmpapay);
-static GstFlowReturn gst_rtp_mpa_pay_handle_buffer (GstBaseRTPPayload * payload,
+static GstFlowReturn gst_rtp_mpa_pay_handle_buffer (GstRTPBasePayload * payload,
GstBuffer * buffer);
-GST_BOILERPLATE (GstRtpMPAPay, gst_rtp_mpa_pay, GstBaseRTPPayload,
- GST_TYPE_BASE_RTP_PAYLOAD)
-
- static void gst_rtp_mpa_pay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_mpa_pay_src_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_mpa_pay_sink_template);
-
- gst_element_class_set_details_simple (element_class,
- "RTP MPEG audio payloader", "Codec/Payloader/Network/RTP",
- "Payload MPEG audio as RTP packets (RFC 2038)",
- "Wim Taymans <wim.taymans@gmail.com>");
-}
+#define gst_rtp_mpa_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpMPAPay, gst_rtp_mpa_pay, GST_TYPE_RTP_BASE_PAYLOAD);
static void
gst_rtp_mpa_pay_class_init (GstRtpMPAPayClass * klass)
{
GObjectClass *gobject_class;
GstElementClass *gstelement_class;
- GstBaseRTPPayloadClass *gstbasertppayload_class;
+ GstRTPBasePayloadClass *gstrtpbasepayload_class;
+
+ GST_DEBUG_CATEGORY_INIT (rtpmpapay_debug, "rtpmpapay", 0,
+ "MPEG Audio RTP Depayloader");
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
- gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
+ gstrtpbasepayload_class = (GstRTPBasePayloadClass *) klass;
gobject_class->finalize = gst_rtp_mpa_pay_finalize;
gstelement_class->change_state = gst_rtp_mpa_pay_change_state;
- gstbasertppayload_class->set_caps = gst_rtp_mpa_pay_setcaps;
- gstbasertppayload_class->handle_event = gst_rtp_mpa_pay_handle_event;
- gstbasertppayload_class->handle_buffer = gst_rtp_mpa_pay_handle_buffer;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_mpa_pay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_mpa_pay_sink_template));
- GST_DEBUG_CATEGORY_INIT (rtpmpapay_debug, "rtpmpapay", 0,
- "MPEG Audio RTP Depayloader");
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP MPEG audio payloader", "Codec/Payloader/Network/RTP",
+ "Payload MPEG audio as RTP packets (RFC 2038)",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
+ gstrtpbasepayload_class->set_caps = gst_rtp_mpa_pay_setcaps;
+ gstrtpbasepayload_class->sink_event = gst_rtp_mpa_pay_sink_event;
+ gstrtpbasepayload_class->handle_buffer = gst_rtp_mpa_pay_handle_buffer;
}
static void
-gst_rtp_mpa_pay_init (GstRtpMPAPay * rtpmpapay, GstRtpMPAPayClass * klass)
+gst_rtp_mpa_pay_init (GstRtpMPAPay * rtpmpapay)
{
rtpmpapay->adapter = gst_adapter_new ();
}
}
static gboolean
-gst_rtp_mpa_pay_setcaps (GstBaseRTPPayload * payload, GstCaps * caps)
+gst_rtp_mpa_pay_setcaps (GstRTPBasePayload * payload, GstCaps * caps)
{
gboolean res;
- gst_basertppayload_set_options (payload, "audio", TRUE, "MPA", 90000);
- res = gst_basertppayload_set_outcaps (payload, NULL);
+ gst_rtp_base_payload_set_options (payload, "audio", TRUE, "MPA", 90000);
+ res = gst_rtp_base_payload_set_outcaps (payload, NULL);
return res;
}
static gboolean
-gst_rtp_mpa_pay_handle_event (GstPad * pad, GstEvent * event)
+gst_rtp_mpa_pay_sink_event (GstRTPBasePayload * payload, GstEvent * event)
{
+ gboolean ret;
GstRtpMPAPay *rtpmpapay;
- rtpmpapay = GST_RTP_MPA_PAY (gst_pad_get_parent (pad));
+ rtpmpapay = GST_RTP_MPA_PAY (payload);
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_EOS:
break;
}
- gst_object_unref (rtpmpapay);
+ ret = GST_RTP_BASE_PAYLOAD_CLASS (parent_class)->sink_event (payload, event);
- /* FALSE to let the parent handle the event as well */
- return FALSE;
+ return ret;
}
static GstFlowReturn
guint8 *payload;
guint payload_len;
guint packet_len;
+ GstRTPBuffer rtp = { NULL };
/* this will be the total length of the packet */
packet_len = gst_rtp_buffer_calc_packet_len (4 + avail, 0, 0);
/* fill one MTU or all available bytes */
- towrite = MIN (packet_len, GST_BASE_RTP_PAYLOAD_MTU (rtpmpapay));
+ towrite = MIN (packet_len, GST_RTP_BASE_PAYLOAD_MTU (rtpmpapay));
/* this is the payload length */
payload_len = gst_rtp_buffer_calc_payload_len (towrite, 0, 0);
/* create buffer to hold the payload */
outbuf = gst_rtp_buffer_new_allocate (payload_len, 0, 0);
+ gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
+
payload_len -= 4;
- gst_rtp_buffer_set_payload_type (outbuf, GST_RTP_PAYLOAD_MPA);
+ gst_rtp_buffer_set_payload_type (&rtp, GST_RTP_PAYLOAD_MPA);
/*
* 0 1 2 3
* | MBZ | Frag_offset |
* +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
*/
- payload = gst_rtp_buffer_get_payload (outbuf);
+ payload = gst_rtp_buffer_get_payload (&rtp);
payload[0] = 0;
payload[1] = 0;
payload[2] = frag_offset >> 8;
frag_offset += payload_len;
if (avail == 0)
- gst_rtp_buffer_set_marker (outbuf, TRUE);
+ gst_rtp_buffer_set_marker (&rtp, TRUE);
+
+ gst_rtp_buffer_unmap (&rtp);
GST_BUFFER_TIMESTAMP (outbuf) = rtpmpapay->first_ts;
GST_BUFFER_DURATION (outbuf) = rtpmpapay->duration;
- ret = gst_basertppayload_push (GST_BASE_RTP_PAYLOAD (rtpmpapay), outbuf);
+ ret = gst_rtp_base_payload_push (GST_RTP_BASE_PAYLOAD (rtpmpapay), outbuf);
}
return ret;
}
static GstFlowReturn
-gst_rtp_mpa_pay_handle_buffer (GstBaseRTPPayload * basepayload,
+gst_rtp_mpa_pay_handle_buffer (GstRTPBasePayload * basepayload,
GstBuffer * buffer)
{
GstRtpMPAPay *rtpmpapay;
rtpmpapay = GST_RTP_MPA_PAY (basepayload);
- size = GST_BUFFER_SIZE (buffer);
+ size = gst_buffer_get_size (buffer);
duration = GST_BUFFER_DURATION (buffer);
timestamp = GST_BUFFER_TIMESTAMP (buffer);
/* if this buffer is going to overflow the packet, flush what we
* have. */
- if (gst_basertppayload_is_filled (basepayload,
+ if (gst_rtp_base_payload_is_filled (basepayload,
packet_len, rtpmpapay->duration + duration)) {
ret = gst_rtp_mpa_pay_flush (rtpmpapay);
avail = 0;
#define __GST_RTP_MPA_PAY_H__
#include <gst/gst.h>
-#include <gst/rtp/gstbasertppayload.h>
+#include <gst/rtp/gstrtpbasepayload.h>
#include <gst/base/gstadapter.h>
G_BEGIN_DECLS
struct _GstRtpMPAPay
{
- GstBaseRTPPayload payload;
+ GstRTPBasePayload payload;
GstAdapter *adapter;
GstClockTime first_ts;
struct _GstRtpMPAPayClass
{
- GstBaseRTPPayloadClass parent_class;
+ GstRTPBasePayloadClass parent_class;
};
GType gst_rtp_mpa_pay_get_type (void);
GstBuffer *buffer;
} GstADUFrame;
-GST_BOILERPLATE (GstRtpMPARobustDepay, gst_rtp_mpa_robust_depay,
- GstBaseRTPDepayload, GST_TYPE_BASE_RTP_DEPAYLOAD);
+#define gst_rtp_mpa_robust_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpMPARobustDepay, gst_rtp_mpa_robust_depay,
+ GST_TYPE_RTP_BASE_DEPAYLOAD);
static GstStateChangeReturn gst_rtp_mpa_robust_change_state (GstElement *
element, GstStateChange transition);
-static gboolean gst_rtp_mpa_robust_depay_setcaps (GstBaseRTPDepayload *
+static gboolean gst_rtp_mpa_robust_depay_setcaps (GstRTPBaseDepayload *
depayload, GstCaps * caps);
-static GstBuffer *gst_rtp_mpa_robust_depay_process (GstBaseRTPDepayload *
+static GstBuffer *gst_rtp_mpa_robust_depay_process (GstRTPBaseDepayload *
depayload, GstBuffer * buf);
static void
-gst_rtp_mpa_robust_depay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_mpa_robust_depay_src_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_mpa_robust_depay_sink_template);
-
- gst_element_class_set_details_simple (element_class,
- "RTP MPEG audio depayloader", "Codec/Depayloader/Network/RTP",
- "Extracts MPEG audio from RTP packets (RFC 5219)",
- "Mark Nauwelaerts <mark.nauwelaerts@collabora.co.uk>");
-}
-
-static void
gst_rtp_mpa_robust_depay_finalize (GObject * object)
{
GstRtpMPARobustDepay *rtpmpadepay;
G_OBJECT_CLASS (parent_class)->finalize (object);
}
-
static void
gst_rtp_mpa_robust_depay_class_init (GstRtpMPARobustDepayClass * klass)
{
GObjectClass *gobject_class;
GstElementClass *gstelement_class;
- GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
+
+ GST_DEBUG_CATEGORY_INIT (rtpmparobustdepay_debug, "rtpmparobustdepay", 0,
+ "Robust MPEG Audio RTP Depayloader");
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
- gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
+ gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
gobject_class->finalize = gst_rtp_mpa_robust_depay_finalize;
gstelement_class->change_state =
GST_DEBUG_FUNCPTR (gst_rtp_mpa_robust_change_state);
- gstbasertpdepayload_class->set_caps = gst_rtp_mpa_robust_depay_setcaps;
- gstbasertpdepayload_class->process = gst_rtp_mpa_robust_depay_process;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_mpa_robust_depay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_mpa_robust_depay_sink_template));
- GST_DEBUG_CATEGORY_INIT (rtpmparobustdepay_debug, "rtpmparobustdepay", 0,
- "Robust MPEG Audio RTP Depayloader");
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP MPEG audio depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts MPEG audio from RTP packets (RFC 5219)",
+ "Mark Nauwelaerts <mark.nauwelaerts@collabora.co.uk>");
+
+ gstrtpbasedepayload_class->set_caps = gst_rtp_mpa_robust_depay_setcaps;
+ gstrtpbasedepayload_class->process = gst_rtp_mpa_robust_depay_process;
}
static void
-gst_rtp_mpa_robust_depay_init (GstRtpMPARobustDepay * rtpmpadepay,
- GstRtpMPARobustDepayClass * klass)
+gst_rtp_mpa_robust_depay_init (GstRtpMPARobustDepay * rtpmpadepay)
{
rtpmpadepay->adapter = gst_adapter_new ();
rtpmpadepay->adu_frames = g_queue_new ();
}
static gboolean
-gst_rtp_mpa_robust_depay_setcaps (GstBaseRTPDepayload * depayload,
+gst_rtp_mpa_robust_depay_setcaps (GstRTPBaseDepayload * depayload,
GstCaps * caps)
{
GstRtpMPARobustDepay *rtpmpadepay;
rtpmpadepay, GstADUFrame * frame)
{
GstADUFrame *dummy;
+ GstMapInfo map;
dummy = g_slice_dup (GstADUFrame, frame);
dummy->backpointer = 0;
dummy->buffer = gst_buffer_new_and_alloc (dummy->side_info + 4);
- memset (GST_BUFFER_DATA (dummy->buffer), 0, dummy->side_info + 4);
- GST_WRITE_UINT32_BE (GST_BUFFER_DATA (dummy->buffer), dummy->header);
+
+ gst_buffer_map (dummy->buffer, &map, GST_MAP_WRITE);
+ memset (map.data, 0, map.size);
+ GST_WRITE_UINT32_BE (map.data, dummy->header);
+ gst_buffer_unmap (dummy->buffer, &map);
+
GST_BUFFER_TIMESTAMP (dummy->buffer) = GST_BUFFER_TIMESTAMP (frame->buffer);
return dummy;
GstADUFrame *frame = NULL;
guint version, layer, channels, size;
guint crc;
+ GstMapInfo map;
g_return_val_if_fail (buf != NULL, FALSE);
- if (GST_BUFFER_SIZE (buf) < 6) {
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+
+ if (map.size < 6)
goto corrupt_frame;
- }
frame = g_slice_new0 (GstADUFrame);
- frame->header = GST_READ_UINT32_BE (GST_BUFFER_DATA (buf));
+ frame->header = GST_READ_UINT32_BE (map.data);
size = mp3_type_frame_length_from_header (GST_ELEMENT_CAST (rtpmpadepay),
frame->header, &version, &layer, &channels, NULL, NULL, NULL, &crc);
/* backpointer */
if (layer == 3) {
- frame->backpointer = GST_READ_UINT16_BE (GST_BUFFER_DATA (buf) + 4);
+ frame->backpointer = GST_READ_UINT16_BE (map.data + 4);
frame->backpointer >>= 7;
GST_LOG_OBJECT (rtpmpadepay, "backpointer: %d", frame->backpointer);
}
frame->data_size = frame->size - 4 - frame->side_info;
/* some size validation checks */
- if (4 + frame->side_info > GST_BUFFER_SIZE (buf))
+ if (4 + frame->side_info > map.size)
goto corrupt_frame;
/* ADU data would then extend past MP3 frame,
* even using past byte reservoir */
- if (-frame->backpointer + (gint) (GST_BUFFER_SIZE (buf)) > frame->size)
+ if (-frame->backpointer + (gint) (map.size) > frame->size)
goto corrupt_frame;
+ gst_buffer_unmap (buf, &map);
+
/* ok, take buffer and queue */
frame->buffer = buf;
g_queue_push_tail (rtpmpadepay->adu_frames, frame);
corrupt_frame:
{
GST_DEBUG_OBJECT (rtpmpadepay, "frame is corrupt");
+ gst_buffer_unmap (buf, &map);
gst_buffer_unref (buf);
if (frame)
g_slice_free (GstADUFrame, frame);
GstBuffer * buf)
{
gboolean ret = FALSE;
- guint8 *data;
+ GstMapInfo map;
guint val, iindex, icc;
- data = GST_BUFFER_DATA (buf);
- val = GST_READ_UINT16_BE (data) >> 5;
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ val = GST_READ_UINT16_BE (map.data) >> 5;
+ gst_buffer_unmap (buf, &map);
+
iindex = val >> 3;
icc = val & 0x7;
GstFlowReturn ret = GST_FLOW_OK;
while (1) {
+ GstMapInfo map;
if (G_UNLIKELY (!rtpmpadepay->cur_adu_frame)) {
rtpmpadepay->cur_adu_frame = rtpmpadepay->adu_frames->head;
if (G_UNLIKELY (frame->layer != 3)) {
GST_DEBUG_OBJECT (rtpmpadepay, "layer %d frame, sending as-is",
frame->layer);
- gst_base_rtp_depayload_push (GST_BASE_RTP_DEPAYLOAD (rtpmpadepay),
+ gst_rtp_base_depayload_push (GST_RTP_BASE_DEPAYLOAD (rtpmpadepay),
frame->buffer);
frame->buffer = NULL;
/* and remove it from any further consideration */
continue;
}
- if (rtpmpadepay->offset == GST_BUFFER_SIZE (frame->buffer)) {
+ if (rtpmpadepay->offset == gst_buffer_get_size (frame->buffer)) {
if (g_list_next (rtpmpadepay->cur_adu_frame)) {
GST_LOG_OBJECT (rtpmpadepay,
"moving to next ADU frame, size %d, side_info %d",
gst_byte_writer_set_pos (rtpmpadepay->mp3_frame, 0);
/* bytewriter corresponds to head frame,
* i.e. the header and the side info must match */
+ gst_buffer_map (head->buffer, &map, GST_MAP_READ);
gst_byte_writer_put_data (rtpmpadepay->mp3_frame,
- GST_BUFFER_DATA (head->buffer), 4 + head->side_info);
+ map.data, 4 + head->side_info);
+ gst_buffer_unmap (head->buffer, &map);
}
buf = frame->buffer;
rtpmpadepay->size);
if (rtpmpadepay->offset) {
+ gst_buffer_map (buf, &map, GST_MAP_READ);
/* no need to position, simply append */
- g_assert (GST_BUFFER_SIZE (buf) > rtpmpadepay->offset);
- av = MIN (av, GST_BUFFER_SIZE (buf) - rtpmpadepay->offset);
+ g_assert (map.size > rtpmpadepay->offset);
+ av = MIN (av, map.size - rtpmpadepay->offset);
GST_LOG_OBJECT (rtpmpadepay,
"appending %d bytes from ADU frame at offset %d", av,
rtpmpadepay->offset);
gst_byte_writer_put_data (rtpmpadepay->mp3_frame,
- GST_BUFFER_DATA (buf) + rtpmpadepay->offset, av);
+ map.data + rtpmpadepay->offset, av);
rtpmpadepay->offset += av;
+ gst_buffer_unmap (buf, &map);
} else {
gint pos, tpos;
gst_byte_writer_set_pos (rtpmpadepay->mp3_frame, pos + av);
} else {
/* position and append */
+ gst_buffer_map (buf, &map, GST_MAP_READ);
GST_LOG_OBJECT (rtpmpadepay, "adding to current MP3 frame");
gst_byte_writer_set_pos (rtpmpadepay->mp3_frame, tpos);
- av = MIN (av, GST_BUFFER_SIZE (buf) - 4 - frame->side_info);
+ av = MIN (av, map.size - 4 - frame->side_info);
gst_byte_writer_put_data (rtpmpadepay->mp3_frame,
- GST_BUFFER_DATA (buf) + 4 + frame->side_info, av);
+ map.data + 4 + frame->side_info, av);
rtpmpadepay->offset += av + 4 + frame->side_info;
+ gst_buffer_unmap (buf, &map);
}
}
rtpmpadepay->size -= head->data_size;
gst_rtp_mpa_robust_depay_dequeue_frame (rtpmpadepay);
/* send */
- ret = gst_base_rtp_depayload_push (GST_BASE_RTP_DEPAYLOAD (rtpmpadepay),
+ ret = gst_rtp_base_depayload_push (GST_RTP_BASE_DEPAYLOAD (rtpmpadepay),
buf);
}
}
}
static GstBuffer *
-gst_rtp_mpa_robust_depay_process (GstBaseRTPDepayload * depayload,
+gst_rtp_mpa_robust_depay_process (GstRTPBaseDepayload * depayload,
GstBuffer * buf)
{
GstRtpMPARobustDepay *rtpmpadepay;
gboolean cont, dtype;
guint av, size;
GstClockTime timestamp;
+ GstRTPBuffer rtp = { NULL };
rtpmpadepay = GST_RTP_MPA_ROBUST_DEPAY (depayload);
- payload_len = gst_rtp_buffer_get_payload_len (buf);
timestamp = GST_BUFFER_TIMESTAMP (buf);
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
+
+ payload_len = gst_rtp_buffer_get_payload_len (&rtp);
if (payload_len <= 1)
goto short_read;
- payload = gst_rtp_buffer_get_payload (buf);
+ payload = gst_rtp_buffer_get_payload (&rtp);
offset = 0;
GST_LOG_OBJECT (rtpmpadepay, "payload_len: %d", payload_len);
*/
while (payload_len) {
if (G_LIKELY (rtpmpadepay->has_descriptor)) {
- cont = !!(payload[offset] & 0x80);
- dtype = !!(payload[offset] & 0x40);
+ cont = ! !(payload[offset] & 0x80);
+ dtype = ! !(payload[offset] & 0x40);
if (dtype) {
size = (payload[offset] & 0x3f) << 8 | payload[offset + 1];
payload_len--;
GST_LOG_OBJECT (rtpmpadepay, "offset %d has cont: %d, dtype: %d, size: %d",
offset, cont, dtype, size);
- buf = gst_rtp_buffer_get_payload_subbuffer (buf, offset,
+ buf = gst_rtp_buffer_get_payload_subbuffer (&rtp, offset,
MIN (size, payload_len));
if (cont) {
"discarding continuation fragment without prior fragment");
gst_buffer_unref (buf);
} else {
- av += GST_BUFFER_SIZE (buf);
+ av += gst_buffer_get_size (buf);
gst_adapter_push (rtpmpadepay->adapter, buf);
if (av == size) {
timestamp = gst_adapter_prev_timestamp (rtpmpadepay->adapter, NULL);
/* timestamp applies to first payload, no idea for subsequent ones */
timestamp = GST_CLOCK_TIME_NONE;
}
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
{
GST_ELEMENT_WARNING (rtpmpadepay, STREAM, DECODE,
(NULL), ("Packet contains invalid data"));
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
}
#define __GST_RTP_MPA_ROBUST_DEPAY_H__
#include <gst/gst.h>
-#include <gst/rtp/gstbasertpdepayload.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
#include <gst/base/gstadapter.h>
#include <gst/base/gstbytewriter.h>
struct _GstRtpMPARobustDepay
{
- GstBaseRTPDepayload depayload;
+ GstRTPBaseDepayload depayload;
GstAdapter *adapter;
gboolean has_descriptor;
struct _GstRtpMPARobustDepayClass
{
- GstBaseRTPDepayloadClass parent_class;
+ GstRTPBaseDepayloadClass parent_class;
};
GType gst_rtp_mpa_robust_depay_get_type (void);
"clock-rate = (int) 90000")
);
-GST_BOILERPLATE (GstRtpMPVDepay, gst_rtp_mpv_depay, GstBaseRTPDepayload,
- GST_TYPE_BASE_RTP_DEPAYLOAD);
+G_DEFINE_TYPE (GstRtpMPVDepay, gst_rtp_mpv_depay, GST_TYPE_RTP_BASE_DEPAYLOAD);
-static gboolean gst_rtp_mpv_depay_setcaps (GstBaseRTPDepayload * depayload,
+static gboolean gst_rtp_mpv_depay_setcaps (GstRTPBaseDepayload * depayload,
GstCaps * caps);
-static GstBuffer *gst_rtp_mpv_depay_process (GstBaseRTPDepayload * depayload,
+static GstBuffer *gst_rtp_mpv_depay_process (GstRTPBaseDepayload * depayload,
GstBuffer * buf);
static void
-gst_rtp_mpv_depay_base_init (gpointer klass)
+gst_rtp_mpv_depay_class_init (GstRtpMPVDepayClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstElementClass *gstelement_class;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
+
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_mpv_depay_src_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_mpv_depay_sink_template);
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_mpv_depay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_mpv_depay_sink_template));
- gst_element_class_set_details_simple (element_class,
+ gst_element_class_set_details_simple (gstelement_class,
"RTP MPEG video depayloader", "Codec/Depayloader/Network/RTP",
"Extracts MPEG video from RTP packets (RFC 2250)",
"Wim Taymans <wim.taymans@gmail.com>");
-}
-
-static void
-gst_rtp_mpv_depay_class_init (GstRtpMPVDepayClass * klass)
-{
- GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
-
- gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
- gstbasertpdepayload_class->set_caps = gst_rtp_mpv_depay_setcaps;
- gstbasertpdepayload_class->process = gst_rtp_mpv_depay_process;
+ gstrtpbasedepayload_class->set_caps = gst_rtp_mpv_depay_setcaps;
+ gstrtpbasedepayload_class->process = gst_rtp_mpv_depay_process;
GST_DEBUG_CATEGORY_INIT (rtpmpvdepay_debug, "rtpmpvdepay", 0,
"MPEG Video RTP Depayloader");
}
static void
-gst_rtp_mpv_depay_init (GstRtpMPVDepay * rtpmpvdepay,
- GstRtpMPVDepayClass * klass)
+gst_rtp_mpv_depay_init (GstRtpMPVDepay * rtpmpvdepay)
{
- /* needed because of GST_BOILERPLATE */
}
static gboolean
-gst_rtp_mpv_depay_setcaps (GstBaseRTPDepayload * depayload, GstCaps * caps)
+gst_rtp_mpv_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
{
GstStructure *structure;
gint clock_rate;
}
static GstBuffer *
-gst_rtp_mpv_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
+gst_rtp_mpv_depay_process (GstRTPBaseDepayload * depayload, GstBuffer * buf)
{
GstRtpMPVDepay *rtpmpvdepay;
GstBuffer *outbuf;
+ GstRTPBuffer rtp = { NULL };
rtpmpvdepay = GST_RTP_MPV_DEPAY (depayload);
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
+
{
gint payload_len, payload_header;
guint8 *payload;
guint8 T;
- payload_len = gst_rtp_buffer_get_payload_len (buf);
- payload = gst_rtp_buffer_get_payload (buf);
+ payload_len = gst_rtp_buffer_get_payload_len (&rtp);
+ payload = gst_rtp_buffer_get_payload (&rtp);
payload_header = 0;
if (payload_len <= 4)
payload += 4;
}
- outbuf = gst_rtp_buffer_get_payload_subbuffer (buf, payload_header, -1);
+ outbuf = gst_rtp_buffer_get_payload_subbuffer (&rtp, payload_header, -1);
if (outbuf) {
GST_DEBUG_OBJECT (rtpmpvdepay,
- "gst_rtp_mpv_depay_chain: pushing buffer of size %d",
- GST_BUFFER_SIZE (outbuf));
+ "gst_rtp_mpv_depay_chain: pushing buffer of size %" G_GSIZE_FORMAT,
+ gst_buffer_get_size (outbuf));
}
-
return outbuf;
}
#define __GST_RTP_MPV_DEPAY_H__
#include <gst/gst.h>
-#include <gst/rtp/gstbasertpdepayload.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
G_BEGIN_DECLS
struct _GstRtpMPVDepay
{
- GstBaseRTPDepayload depayload;
+ GstRTPBaseDepayload depayload;
};
struct _GstRtpMPVDepayClass
{
- GstBaseRTPDepayloadClass parent_class;
+ GstRTPBaseDepayloadClass parent_class;
};
GType gst_rtp_mpv_depay_get_type (void);
static void gst_rtp_mpv_pay_finalize (GObject * object);
static GstFlowReturn gst_rtp_mpv_pay_flush (GstRTPMPVPay * rtpmpvpay);
-static gboolean gst_rtp_mpv_pay_setcaps (GstBaseRTPPayload * payload,
+static gboolean gst_rtp_mpv_pay_setcaps (GstRTPBasePayload * payload,
GstCaps * caps);
-static GstFlowReturn gst_rtp_mpv_pay_handle_buffer (GstBaseRTPPayload *
+static GstFlowReturn gst_rtp_mpv_pay_handle_buffer (GstRTPBasePayload *
payload, GstBuffer * buffer);
-static gboolean gst_rtp_mpv_pay_handle_event (GstPad * pad, GstEvent * event);
+static gboolean gst_rtp_mpv_pay_sink_event (GstRTPBasePayload * payload,
+ GstEvent * event);
-GST_BOILERPLATE (GstRTPMPVPay, gst_rtp_mpv_pay, GstBaseRTPPayload,
- GST_TYPE_BASE_RTP_PAYLOAD);
-
-static void
-gst_rtp_mpv_pay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_mpv_pay_sink_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_mpv_pay_src_template);
- gst_element_class_set_details_simple (element_class,
- "RTP MPEG2 ES video payloader", "Codec/Payloader/Network/RTP",
- "Payload-encodes MPEG2 ES into RTP packets (RFC 2250)",
- "Thijs Vermeir <thijsvermeir@gmail.com>");
-}
+#define gst_rtp_mpv_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRTPMPVPay, gst_rtp_mpv_pay, GST_TYPE_RTP_BASE_PAYLOAD);
static void
gst_rtp_mpv_pay_class_init (GstRTPMPVPayClass * klass)
{
GObjectClass *gobject_class;
GstElementClass *gstelement_class;
- GstBaseRTPPayloadClass *gstbasertppayload_class;
+ GstRTPBasePayloadClass *gstrtpbasepayload_class;
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
- gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
+ gstrtpbasepayload_class = (GstRTPBasePayloadClass *) klass;
gobject_class->finalize = gst_rtp_mpv_pay_finalize;
gstelement_class->change_state = gst_rtp_mpv_pay_change_state;
- gstbasertppayload_class->set_caps = gst_rtp_mpv_pay_setcaps;
- gstbasertppayload_class->handle_buffer = gst_rtp_mpv_pay_handle_buffer;
- gstbasertppayload_class->handle_event = gst_rtp_mpv_pay_handle_event;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_mpv_pay_sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_mpv_pay_src_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP MPEG2 ES video payloader", "Codec/Payloader/Network/RTP",
+ "Payload-encodes MPEG2 ES into RTP packets (RFC 2250)",
+ "Thijs Vermeir <thijsvermeir@gmail.com>");
+
+ gstrtpbasepayload_class->set_caps = gst_rtp_mpv_pay_setcaps;
+ gstrtpbasepayload_class->handle_buffer = gst_rtp_mpv_pay_handle_buffer;
+ gstrtpbasepayload_class->sink_event = gst_rtp_mpv_pay_sink_event;
GST_DEBUG_CATEGORY_INIT (rtpmpvpay_debug, "rtpmpvpay", 0,
"MPEG2 ES Video RTP Payloader");
}
static void
-gst_rtp_mpv_pay_init (GstRTPMPVPay * rtpmpvpay, GstRTPMPVPayClass * klass)
+gst_rtp_mpv_pay_init (GstRTPMPVPay * rtpmpvpay)
{
- GST_BASE_RTP_PAYLOAD (rtpmpvpay)->clock_rate = 90000;
- GST_BASE_RTP_PAYLOAD_PT (rtpmpvpay) = GST_RTP_PAYLOAD_MPV;
+ GST_RTP_BASE_PAYLOAD (rtpmpvpay)->clock_rate = 90000;
+ GST_RTP_BASE_PAYLOAD_PT (rtpmpvpay) = GST_RTP_PAYLOAD_MPV;
rtpmpvpay->adapter = gst_adapter_new ();
}
}
static gboolean
-gst_rtp_mpv_pay_setcaps (GstBaseRTPPayload * payload, GstCaps * caps)
+gst_rtp_mpv_pay_setcaps (GstRTPBasePayload * payload, GstCaps * caps)
{
- gst_basertppayload_set_options (payload, "video", FALSE, "MPV", 90000);
- return gst_basertppayload_set_outcaps (payload, NULL);
+ gst_rtp_base_payload_set_options (payload, "video", FALSE, "MPV", 90000);
+ return gst_rtp_base_payload_set_outcaps (payload, NULL);
}
static gboolean
-gst_rtp_mpv_pay_handle_event (GstPad * pad, GstEvent * event)
+gst_rtp_mpv_pay_sink_event (GstRTPBasePayload * payload, GstEvent * event)
{
+ gboolean ret;
GstRTPMPVPay *rtpmpvpay;
- rtpmpvpay = GST_RTP_MPV_PAY (gst_pad_get_parent (pad));
+ rtpmpvpay = GST_RTP_MPV_PAY (payload);
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_EOS:
break;
}
- gst_object_unref (rtpmpvpay);
+ ret = GST_RTP_BASE_PAYLOAD_CLASS (parent_class)->sink_event (payload, event);
- /* FALSE to let the parent handle the event as well */
- return FALSE;
+ return ret;
}
static GstFlowReturn
guint towrite;
guint packet_len;
guint payload_len;
+ GstRTPBuffer rtp = { NULL };
packet_len = gst_rtp_buffer_calc_packet_len (avail, 4, 0);
- towrite = MIN (packet_len, GST_BASE_RTP_PAYLOAD_MTU (rtpmpvpay));
+ towrite = MIN (packet_len, GST_RTP_BASE_PAYLOAD_MTU (rtpmpvpay));
payload_len = gst_rtp_buffer_calc_payload_len (towrite, 4, 0);
outbuf = gst_rtp_buffer_new_allocate (payload_len, 4, 0);
- payload = gst_rtp_buffer_get_payload (outbuf);
+ gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
+
+ payload = gst_rtp_buffer_get_payload (&rtp);
/* enable MPEG Video-specific header
*
* 0 1 2 3
avail -= payload_len;
- gst_rtp_buffer_set_marker (outbuf, avail == 0);
+ gst_rtp_buffer_set_marker (&rtp, avail == 0);
+ gst_rtp_buffer_unmap (&rtp);
GST_BUFFER_TIMESTAMP (outbuf) = rtpmpvpay->first_ts;
- ret = gst_basertppayload_push (GST_BASE_RTP_PAYLOAD (rtpmpvpay), outbuf);
+ ret = gst_rtp_base_payload_push (GST_RTP_BASE_PAYLOAD (rtpmpvpay), outbuf);
}
return ret;
}
static GstFlowReturn
-gst_rtp_mpv_pay_handle_buffer (GstBaseRTPPayload * basepayload,
+gst_rtp_mpv_pay_handle_buffer (GstRTPBasePayload * basepayload,
GstBuffer * buffer)
{
GstRTPMPVPay *rtpmpvpay;
GST_LOG_OBJECT (rtpmpvpay, "available %d, rtp packet length %d", avail,
packet_len);
- if (gst_basertppayload_is_filled (basepayload,
+ if (gst_rtp_base_payload_is_filled (basepayload,
packet_len, rtpmpvpay->duration)) {
ret = gst_rtp_mpv_pay_flush (rtpmpvpay);
} else {
#define __GST_RTP_MPV_PAY_H__
#include <gst/gst.h>
-#include <gst/rtp/gstbasertppayload.h>
+#include <gst/rtp/gstrtpbasepayload.h>
#include <gst/base/gstadapter.h>
G_BEGIN_DECLS
struct _GstRTPMPVPay
{
- GstBaseRTPPayload payload;
+ GstRTPBasePayload payload;
GstAdapter *adapter;
GstClockTime first_ts;
struct _GstRTPMPVPayClass
{
- GstBaseRTPPayloadClass parent_class;
+ GstRTPBasePayloadClass parent_class;
};
GType gst_rtp_mpv_pay_get_type (void);
GST_STATIC_CAPS ("audio/x-alaw, channels = (int) 1, rate = (int) [1, MAX ]")
);
-static GstBuffer *gst_rtp_pcma_depay_process (GstBaseRTPDepayload * depayload,
+static GstBuffer *gst_rtp_pcma_depay_process (GstRTPBaseDepayload * depayload,
GstBuffer * buf);
-static gboolean gst_rtp_pcma_depay_setcaps (GstBaseRTPDepayload * depayload,
+static gboolean gst_rtp_pcma_depay_setcaps (GstRTPBaseDepayload * depayload,
GstCaps * caps);
-GST_BOILERPLATE (GstRtpPcmaDepay, gst_rtp_pcma_depay, GstBaseRTPDepayload,
- GST_TYPE_BASE_RTP_DEPAYLOAD);
-
-static void
-gst_rtp_pcma_depay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_pcma_depay_src_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_pcma_depay_sink_template);
- gst_element_class_set_details_simple (element_class, "RTP PCMA depayloader",
- "Codec/Depayloader/Network/RTP",
- "Extracts PCMA audio from RTP packets",
- "Edgard Lima <edgard.lima@indt.org.br>, Zeeshan Ali <zeenix@gmail.com>");
-}
+#define gst_rtp_pcma_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpPcmaDepay, gst_rtp_pcma_depay,
+ GST_TYPE_RTP_BASE_DEPAYLOAD);
static void
gst_rtp_pcma_depay_class_init (GstRtpPcmaDepayClass * klass)
{
- GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
+ GstElementClass *gstelement_class;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
- gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
- gstbasertpdepayload_class->process = gst_rtp_pcma_depay_process;
- gstbasertpdepayload_class->set_caps = gst_rtp_pcma_depay_setcaps;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_pcma_depay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_pcma_depay_sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP PCMA depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts PCMA audio from RTP packets",
+ "Edgard Lima <edgard.lima@indt.org.br>, Zeeshan Ali <zeenix@gmail.com>");
+
+ gstrtpbasedepayload_class->process = gst_rtp_pcma_depay_process;
+ gstrtpbasedepayload_class->set_caps = gst_rtp_pcma_depay_setcaps;
}
static void
-gst_rtp_pcma_depay_init (GstRtpPcmaDepay * rtppcmadepay,
- GstRtpPcmaDepayClass * klass)
+gst_rtp_pcma_depay_init (GstRtpPcmaDepay * rtppcmadepay)
{
- GstBaseRTPDepayload *depayload;
+ GstRTPBaseDepayload *depayload;
- depayload = GST_BASE_RTP_DEPAYLOAD (rtppcmadepay);
+ depayload = GST_RTP_BASE_DEPAYLOAD (rtppcmadepay);
- gst_pad_use_fixed_caps (GST_BASE_RTP_DEPAYLOAD_SRCPAD (depayload));
+ gst_pad_use_fixed_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload));
}
static gboolean
-gst_rtp_pcma_depay_setcaps (GstBaseRTPDepayload * depayload, GstCaps * caps)
+gst_rtp_pcma_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
{
GstCaps *srccaps;
GstStructure *structure;
srccaps = gst_caps_new_simple ("audio/x-alaw",
"channels", G_TYPE_INT, 1, "rate", G_TYPE_INT, clock_rate, NULL);
- ret = gst_pad_set_caps (GST_BASE_RTP_DEPAYLOAD_SRCPAD (depayload), srccaps);
+ ret = gst_pad_set_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload), srccaps);
gst_caps_unref (srccaps);
return ret;
}
static GstBuffer *
-gst_rtp_pcma_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
+gst_rtp_pcma_depay_process (GstRTPBaseDepayload * depayload, GstBuffer * buf)
{
GstBuffer *outbuf = NULL;
gboolean marker;
guint len;
+ GstRTPBuffer rtp = { NULL };
+
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
- marker = gst_rtp_buffer_get_marker (buf);
+ marker = gst_rtp_buffer_get_marker (&rtp);
- GST_DEBUG ("process : got %d bytes, mark %d ts %u seqn %d",
- GST_BUFFER_SIZE (buf), marker,
- gst_rtp_buffer_get_timestamp (buf), gst_rtp_buffer_get_seq (buf));
+ GST_DEBUG ("process : got %" G_GSIZE_FORMAT " bytes, mark %d ts %u seqn %d",
+ gst_buffer_get_size (buf), marker,
+ gst_rtp_buffer_get_timestamp (&rtp), gst_rtp_buffer_get_seq (&rtp));
- len = gst_rtp_buffer_get_payload_len (buf);
- outbuf = gst_rtp_buffer_get_payload_buffer (buf);
+ len = gst_rtp_buffer_get_payload_len (&rtp);
+ outbuf = gst_rtp_buffer_get_payload_buffer (&rtp);
+ gst_rtp_buffer_unmap (&rtp);
if (outbuf) {
GST_BUFFER_DURATION (outbuf) =
}
}
+
return outbuf;
}
#define __GST_RTP_PCMA_DEPAY_H__
#include <gst/gst.h>
-#include <gst/rtp/gstbasertpdepayload.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
G_BEGIN_DECLS
struct _GstRtpPcmaDepay
{
- GstBaseRTPDepayload depayload;
+ GstRTPBaseDepayload depayload;
};
struct _GstRtpPcmaDepayClass
{
- GstBaseRTPDepayloadClass parent_class;
+ GstRTPBaseDepayloadClass parent_class;
};
GType gst_rtp_pcma_depay_get_type (void);
"clock-rate = (int) [1, MAX ], " "encoding-name = (string) \"PCMA\"")
);
-static gboolean gst_rtp_pcma_pay_setcaps (GstBaseRTPPayload * payload,
+static gboolean gst_rtp_pcma_pay_setcaps (GstRTPBasePayload * payload,
GstCaps * caps);
-GST_BOILERPLATE (GstRtpPcmaPay, gst_rtp_pcma_pay, GstBaseRTPAudioPayload,
- GST_TYPE_BASE_RTP_AUDIO_PAYLOAD);
+#define gst_rtp_pcma_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpPcmaPay, gst_rtp_pcma_pay,
+ GST_TYPE_RTP_BASE_AUDIO_PAYLOAD);
static void
-gst_rtp_pcma_pay_base_init (gpointer klass)
+gst_rtp_pcma_pay_class_init (GstRtpPcmaPayClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstElementClass *gstelement_class;
+ GstRTPBasePayloadClass *gstrtpbasepayload_class;
+
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasepayload_class = (GstRTPBasePayloadClass *) klass;
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_pcma_pay_sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_pcma_pay_src_template));
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_pcma_pay_sink_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_pcma_pay_src_template);
- gst_element_class_set_details_simple (element_class, "RTP PCMA payloader",
+ gst_element_class_set_details_simple (gstelement_class, "RTP PCMA payloader",
"Codec/Payloader/Network/RTP",
"Payload-encodes PCMA audio into a RTP packet",
"Edgard Lima <edgard.lima@indt.org.br>");
-}
-
-static void
-gst_rtp_pcma_pay_class_init (GstRtpPcmaPayClass * klass)
-{
- GstBaseRTPPayloadClass *gstbasertppayload_class;
-
- gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
- gstbasertppayload_class->set_caps = gst_rtp_pcma_pay_setcaps;
+ gstrtpbasepayload_class->set_caps = gst_rtp_pcma_pay_setcaps;
}
static void
-gst_rtp_pcma_pay_init (GstRtpPcmaPay * rtppcmapay, GstRtpPcmaPayClass * klass)
+gst_rtp_pcma_pay_init (GstRtpPcmaPay * rtppcmapay)
{
- GstBaseRTPAudioPayload *basertpaudiopayload;
+ GstRTPBaseAudioPayload *rtpbaseaudiopayload;
- basertpaudiopayload = GST_BASE_RTP_AUDIO_PAYLOAD (rtppcmapay);
+ rtpbaseaudiopayload = GST_RTP_BASE_AUDIO_PAYLOAD (rtppcmapay);
- GST_BASE_RTP_PAYLOAD (rtppcmapay)->clock_rate = 8000;
+ GST_RTP_BASE_PAYLOAD (rtppcmapay)->clock_rate = 8000;
- /* tell basertpaudiopayload that this is a sample based codec */
- gst_base_rtp_audio_payload_set_sample_based (basertpaudiopayload);
+ /* tell rtpbaseaudiopayload that this is a sample based codec */
+ gst_rtp_base_audio_payload_set_sample_based (rtpbaseaudiopayload);
/* octet-per-sample is 1 for PCM */
- gst_base_rtp_audio_payload_set_sample_options (basertpaudiopayload, 1);
+ gst_rtp_base_audio_payload_set_sample_options (rtpbaseaudiopayload, 1);
}
static gboolean
-gst_rtp_pcma_pay_setcaps (GstBaseRTPPayload * payload, GstCaps * caps)
+gst_rtp_pcma_pay_setcaps (GstRTPBasePayload * payload, GstCaps * caps)
{
gboolean res;
payload->pt = GST_RTP_PAYLOAD_PCMA;
- gst_basertppayload_set_options (payload, "audio", FALSE, "PCMA", 8000);
- res = gst_basertppayload_set_outcaps (payload, NULL);
+ gst_rtp_base_payload_set_options (payload, "audio", FALSE, "PCMA", 8000);
+ res = gst_rtp_base_payload_set_outcaps (payload, NULL);
return res;
}
#define __GST_RTP_PCMA_PAY_H__
#include <gst/gst.h>
-#include <gst/rtp/gstbasertpaudiopayload.h>
+#include <gst/rtp/gstrtpbaseaudiopayload.h>
G_BEGIN_DECLS
struct _GstRtpPcmaPay
{
- GstBaseRTPAudioPayload audiopayload;
+ GstRTPBaseAudioPayload audiopayload;
};
struct _GstRtpPcmaPayClass
{
- GstBaseRTPAudioPayloadClass parent_class;
+ GstRTPBaseAudioPayloadClass parent_class;
};
GType gst_rtp_pcma_pay_get_type (void);
"channels = (int) 1, rate = (int) [1, MAX ]")
);
-static GstBuffer *gst_rtp_pcmu_depay_process (GstBaseRTPDepayload * depayload,
+static GstBuffer *gst_rtp_pcmu_depay_process (GstRTPBaseDepayload * depayload,
GstBuffer * buf);
-static gboolean gst_rtp_pcmu_depay_setcaps (GstBaseRTPDepayload * depayload,
+static gboolean gst_rtp_pcmu_depay_setcaps (GstRTPBaseDepayload * depayload,
GstCaps * caps);
-GST_BOILERPLATE (GstRtpPcmuDepay, gst_rtp_pcmu_depay, GstBaseRTPDepayload,
- GST_TYPE_BASE_RTP_DEPAYLOAD);
-
-static void
-gst_rtp_pcmu_depay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_pcmu_depay_src_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_pcmu_depay_sink_template);
- gst_element_class_set_details_simple (element_class, "RTP PCMU depayloader",
- "Codec/Depayloader/Network/RTP",
- "Extracts PCMU audio from RTP packets",
- "Edgard Lima <edgard.lima@indt.org.br>, Zeeshan Ali <zeenix@gmail.com>");
-}
+#define gst_rtp_pcmu_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpPcmuDepay, gst_rtp_pcmu_depay,
+ GST_TYPE_RTP_BASE_DEPAYLOAD);
static void
gst_rtp_pcmu_depay_class_init (GstRtpPcmuDepayClass * klass)
{
- GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
+ GstElementClass *gstelement_class;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
- gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
- gstbasertpdepayload_class->process = gst_rtp_pcmu_depay_process;
- gstbasertpdepayload_class->set_caps = gst_rtp_pcmu_depay_setcaps;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_pcmu_depay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_pcmu_depay_sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP PCMU depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts PCMU audio from RTP packets",
+ "Edgard Lima <edgard.lima@indt.org.br>, Zeeshan Ali <zeenix@gmail.com>");
+
+ gstrtpbasedepayload_class->process = gst_rtp_pcmu_depay_process;
+ gstrtpbasedepayload_class->set_caps = gst_rtp_pcmu_depay_setcaps;
}
static void
-gst_rtp_pcmu_depay_init (GstRtpPcmuDepay * rtppcmudepay,
- GstRtpPcmuDepayClass * klass)
+gst_rtp_pcmu_depay_init (GstRtpPcmuDepay * rtppcmudepay)
{
- GstBaseRTPDepayload *depayload;
+ GstRTPBaseDepayload *depayload;
- depayload = GST_BASE_RTP_DEPAYLOAD (rtppcmudepay);
+ depayload = GST_RTP_BASE_DEPAYLOAD (rtppcmudepay);
- gst_pad_use_fixed_caps (GST_BASE_RTP_DEPAYLOAD_SRCPAD (depayload));
+ gst_pad_use_fixed_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload));
}
static gboolean
-gst_rtp_pcmu_depay_setcaps (GstBaseRTPDepayload * depayload, GstCaps * caps)
+gst_rtp_pcmu_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
{
GstCaps *srccaps;
GstStructure *structure;
srccaps = gst_caps_new_simple ("audio/x-mulaw",
"channels", G_TYPE_INT, 1, "rate", G_TYPE_INT, clock_rate, NULL);
- ret = gst_pad_set_caps (GST_BASE_RTP_DEPAYLOAD_SRCPAD (depayload), srccaps);
+ ret = gst_pad_set_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload), srccaps);
gst_caps_unref (srccaps);
return ret;
}
static GstBuffer *
-gst_rtp_pcmu_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
+gst_rtp_pcmu_depay_process (GstRTPBaseDepayload * depayload, GstBuffer * buf)
{
GstBuffer *outbuf = NULL;
guint len;
gboolean marker;
+ GstRTPBuffer rtp = { NULL };
+
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
- marker = gst_rtp_buffer_get_marker (buf);
+ marker = gst_rtp_buffer_get_marker (&rtp);
- GST_DEBUG ("process : got %d bytes, mark %d ts %u seqn %d",
- GST_BUFFER_SIZE (buf), marker,
- gst_rtp_buffer_get_timestamp (buf), gst_rtp_buffer_get_seq (buf));
+ GST_DEBUG ("process : got %" G_GSIZE_FORMAT " bytes, mark %d ts %u seqn %d",
+ gst_buffer_get_size (buf), marker,
+ gst_rtp_buffer_get_timestamp (&rtp), gst_rtp_buffer_get_seq (&rtp));
- len = gst_rtp_buffer_get_payload_len (buf);
- outbuf = gst_rtp_buffer_get_payload_buffer (buf);
+ len = gst_rtp_buffer_get_payload_len (&rtp);
+ outbuf = gst_rtp_buffer_get_payload_buffer (&rtp);
+ gst_rtp_buffer_unmap (&rtp);
if (outbuf) {
GST_BUFFER_DURATION (outbuf) =
#define __GST_RTP_PCMU_DEPAY_H__
#include <gst/gst.h>
-#include <gst/rtp/gstbasertpdepayload.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
G_BEGIN_DECLS
struct _GstRtpPcmuDepay
{
- GstBaseRTPDepayload depayload;
+ GstRTPBaseDepayload depayload;
};
struct _GstRtpPcmuDepayClass
{
- GstBaseRTPDepayloadClass parent_class;
+ GstRTPBaseDepayloadClass parent_class;
};
GType gst_rtp_pcmu_depay_get_type (void);
"clock-rate = (int) [1, MAX ], " "encoding-name = (string) \"PCMU\"")
);
-static gboolean gst_rtp_pcmu_pay_setcaps (GstBaseRTPPayload * payload,
+static gboolean gst_rtp_pcmu_pay_setcaps (GstRTPBasePayload * payload,
GstCaps * caps);
-GST_BOILERPLATE (GstRtpPcmuPay, gst_rtp_pcmu_pay, GstBaseRTPAudioPayload,
- GST_TYPE_BASE_RTP_AUDIO_PAYLOAD);
+#define gst_rtp_pcmu_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpPcmuPay, gst_rtp_pcmu_pay,
+ GST_TYPE_RTP_BASE_AUDIO_PAYLOAD);
static void
-gst_rtp_pcmu_pay_base_init (gpointer klass)
+gst_rtp_pcmu_pay_class_init (GstRtpPcmuPayClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstElementClass *gstelement_class;
+ GstRTPBasePayloadClass *gstrtpbasepayload_class;
+
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasepayload_class = (GstRTPBasePayloadClass *) klass;
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_pcmu_pay_sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_pcmu_pay_src_template));
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_pcmu_pay_sink_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_pcmu_pay_src_template);
- gst_element_class_set_details_simple (element_class, "RTP PCMU payloader",
+ gst_element_class_set_details_simple (gstelement_class, "RTP PCMU payloader",
"Codec/Payloader/Network/RTP",
"Payload-encodes PCMU audio into a RTP packet",
"Edgard Lima <edgard.lima@indt.org.br>");
-}
-
-static void
-gst_rtp_pcmu_pay_class_init (GstRtpPcmuPayClass * klass)
-{
- GstBaseRTPPayloadClass *gstbasertppayload_class;
-
- gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
- gstbasertppayload_class->set_caps = gst_rtp_pcmu_pay_setcaps;
+ gstrtpbasepayload_class->set_caps = gst_rtp_pcmu_pay_setcaps;
}
static void
-gst_rtp_pcmu_pay_init (GstRtpPcmuPay * rtppcmupay, GstRtpPcmuPayClass * klass)
+gst_rtp_pcmu_pay_init (GstRtpPcmuPay * rtppcmupay)
{
- GstBaseRTPAudioPayload *basertpaudiopayload;
+ GstRTPBaseAudioPayload *rtpbaseaudiopayload;
- basertpaudiopayload = GST_BASE_RTP_AUDIO_PAYLOAD (rtppcmupay);
+ rtpbaseaudiopayload = GST_RTP_BASE_AUDIO_PAYLOAD (rtppcmupay);
- GST_BASE_RTP_PAYLOAD (rtppcmupay)->clock_rate = 8000;
+ GST_RTP_BASE_PAYLOAD (rtppcmupay)->clock_rate = 8000;
- /* tell basertpaudiopayload that this is a sample based codec */
- gst_base_rtp_audio_payload_set_sample_based (basertpaudiopayload);
+ /* tell rtpbaseaudiopayload that this is a sample based codec */
+ gst_rtp_base_audio_payload_set_sample_based (rtpbaseaudiopayload);
/* octet-per-sample is 1 for PCM */
- gst_base_rtp_audio_payload_set_sample_options (basertpaudiopayload, 1);
+ gst_rtp_base_audio_payload_set_sample_options (rtpbaseaudiopayload, 1);
}
static gboolean
-gst_rtp_pcmu_pay_setcaps (GstBaseRTPPayload * payload, GstCaps * caps)
+gst_rtp_pcmu_pay_setcaps (GstRTPBasePayload * payload, GstCaps * caps)
{
gboolean res;
payload->pt = GST_RTP_PAYLOAD_PCMU;
- gst_basertppayload_set_options (payload, "audio", FALSE, "PCMU", 8000);
- res = gst_basertppayload_set_outcaps (payload, NULL);
+ gst_rtp_base_payload_set_options (payload, "audio", FALSE, "PCMU", 8000);
+ res = gst_rtp_base_payload_set_outcaps (payload, NULL);
return res;
}
#define __GST_RTP_PCMU_PAY_H__
#include <gst/gst.h>
-#include <gst/rtp/gstbasertpaudiopayload.h>
+#include <gst/rtp/gstrtpbaseaudiopayload.h>
G_BEGIN_DECLS
struct _GstRtpPcmuPay
{
- GstBaseRTPAudioPayload audiopayload;
+ GstRTPBaseAudioPayload audiopayload;
};
struct _GstRtpPcmuPayClass
{
- GstBaseRTPAudioPayloadClass parent_class;
+ GstRTPBaseAudioPayloadClass parent_class;
};
GType gst_rtp_pcmu_pay_get_type (void);
static void gst_rtp_qcelp_depay_finalize (GObject * object);
-static gboolean gst_rtp_qcelp_depay_setcaps (GstBaseRTPDepayload * depayload,
+static gboolean gst_rtp_qcelp_depay_setcaps (GstRTPBaseDepayload * depayload,
GstCaps * caps);
-static GstBuffer *gst_rtp_qcelp_depay_process (GstBaseRTPDepayload * depayload,
+static GstBuffer *gst_rtp_qcelp_depay_process (GstRTPBaseDepayload * depayload,
GstBuffer * buf);
-GST_BOILERPLATE (GstRtpQCELPDepay, gst_rtp_qcelp_depay, GstBaseRTPDepayload,
- GST_TYPE_BASE_RTP_DEPAYLOAD);
-
-static void
-gst_rtp_qcelp_depay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_qcelp_depay_src_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_qcelp_depay_sink_template);
-
- gst_element_class_set_details_simple (element_class, "RTP QCELP depayloader",
- "Codec/Depayloader/Network/RTP",
- "Extracts QCELP (PureVoice) audio from RTP packets (RFC 2658)",
- "Wim Taymans <wim.taymans@gmail.com>");
-}
+#define gst_rtp_qcelp_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpQCELPDepay, gst_rtp_qcelp_depay,
+ GST_TYPE_RTP_BASE_DEPAYLOAD);
static void
gst_rtp_qcelp_depay_class_init (GstRtpQCELPDepayClass * klass)
{
GObjectClass *gobject_class;
- GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
+ GstElementClass *gstelement_class;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
gobject_class = (GObjectClass *) klass;
- gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
gobject_class->finalize = gst_rtp_qcelp_depay_finalize;
- gstbasertpdepayload_class->process = gst_rtp_qcelp_depay_process;
- gstbasertpdepayload_class->set_caps = gst_rtp_qcelp_depay_setcaps;
+ gstrtpbasedepayload_class->process = gst_rtp_qcelp_depay_process;
+ gstrtpbasedepayload_class->set_caps = gst_rtp_qcelp_depay_setcaps;
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_qcelp_depay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_qcelp_depay_sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP QCELP depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts QCELP (PureVoice) audio from RTP packets (RFC 2658)",
+ "Wim Taymans <wim.taymans@gmail.com>");
GST_DEBUG_CATEGORY_INIT (rtpqcelpdepay_debug, "rtpqcelpdepay", 0,
"QCELP RTP Depayloader");
}
static void
-gst_rtp_qcelp_depay_init (GstRtpQCELPDepay * rtpqcelpdepay,
- GstRtpQCELPDepayClass * klass)
+gst_rtp_qcelp_depay_init (GstRtpQCELPDepay * rtpqcelpdepay)
{
- GstBaseRTPDepayload G_GNUC_UNUSED *depayload;
+ GstRTPBaseDepayload G_GNUC_UNUSED *depayload;
- depayload = GST_BASE_RTP_DEPAYLOAD (rtpqcelpdepay);
+ depayload = GST_RTP_BASE_DEPAYLOAD (rtpqcelpdepay);
}
static void
static gboolean
-gst_rtp_qcelp_depay_setcaps (GstBaseRTPDepayload * depayload, GstCaps * caps)
+gst_rtp_qcelp_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
{
GstCaps *srccaps;
gboolean res;
srccaps = gst_caps_new_simple ("audio/qcelp",
"channels", G_TYPE_INT, 1, "rate", G_TYPE_INT, 8000, NULL);
- res = gst_pad_set_caps (GST_BASE_RTP_DEPAYLOAD_SRCPAD (depayload), srccaps);
+ res = gst_pad_set_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload), srccaps);
gst_caps_unref (srccaps);
return res;
outbuf = g_ptr_array_index (depay->packets, i);
g_ptr_array_index (depay->packets, i) = NULL;
- gst_base_rtp_depayload_push (GST_BASE_RTP_DEPAYLOAD (depay), outbuf);
+ gst_rtp_base_depayload_push (GST_RTP_BASE_DEPAYLOAD (depay), outbuf);
}
/* and reset interleaving state */
create_erasure_buffer (GstRtpQCELPDepay * depay)
{
GstBuffer *outbuf;
+ GstMapInfo map;
outbuf = gst_buffer_new_and_alloc (1);
- GST_BUFFER_DATA (outbuf)[0] = 14;
+ gst_buffer_map (outbuf, &map, GST_MAP_WRITE);
+ map.data[0] = 14;
+ gst_buffer_unmap (outbuf, &map);
return outbuf;
}
static GstBuffer *
-gst_rtp_qcelp_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
+gst_rtp_qcelp_depay_process (GstRTPBaseDepayload * depayload, GstBuffer * buf)
{
GstRtpQCELPDepay *depay;
GstBuffer *outbuf;
guint payload_len, offset, index;
guint8 *payload;
guint LLL, NNN;
+ GstRTPBuffer rtp = { NULL };
depay = GST_RTP_QCELP_DEPAY (depayload);
- payload_len = gst_rtp_buffer_get_payload_len (buf);
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
+
+ payload_len = gst_rtp_buffer_get_payload_len (&rtp);
if (payload_len < 2)
goto too_small;
timestamp = GST_BUFFER_TIMESTAMP (buf);
- payload = gst_rtp_buffer_get_payload (buf);
+ payload = gst_rtp_buffer_get_payload (&rtp);
/* 0 1 2 3 4 5 6 7
* +-+-+-+-+-+-+-+-+
outbuf = create_erasure_buffer (depay);
} else {
/* each frame goes into its buffer */
- outbuf = gst_rtp_buffer_get_payload_subbuffer (buf, offset, frame_len);
+ outbuf = gst_rtp_buffer_get_payload_subbuffer (&rtp, offset, frame_len);
}
GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
if (!depay->interleaved || index == 0) {
/* not interleaved or first frame in packet, just push */
- gst_base_rtp_depayload_push (depayload, outbuf);
+ gst_rtp_base_depayload_push (depayload, outbuf);
if (timestamp != -1)
timestamp += FRAME_DURATION;
flush_packets (depay);
}
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
/* ERRORS */
{
GST_ELEMENT_WARNING (depay, STREAM, DECODE,
(NULL), ("QCELP RTP payload too small (%d)", payload_len));
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
invalid_lll:
{
GST_ELEMENT_WARNING (depay, STREAM, DECODE,
(NULL), ("QCELP RTP invalid LLL received (%d)", LLL));
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
invalid_nnn:
{
GST_ELEMENT_WARNING (depay, STREAM, DECODE,
(NULL), ("QCELP RTP invalid NNN received (%d)", NNN));
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
invalid_frame:
{
GST_ELEMENT_WARNING (depay, STREAM, DECODE,
(NULL), ("QCELP RTP invalid frame received"));
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
}
#define __GST_RTP_QCELP_DEPAY_H__
#include <gst/gst.h>
-#include <gst/rtp/gstbasertpdepayload.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
G_BEGIN_DECLS
struct _GstRtpQCELPDepay
{
- GstBaseRTPDepayload depayload;
+ GstRTPBaseDepayload depayload;
gboolean interleaved;
guint bundling;
struct _GstRtpQCELPDepayClass
{
- GstBaseRTPDepayloadClass parent_class;
+ GstRTPBaseDepayloadClass parent_class;
};
GType gst_rtp_qcelp_depay_get_type (void);
"encoding-name = (string)\"X-QDM\"")
);
-GST_BOILERPLATE (GstRtpQDM2Depay, gst_rtp_qdm2_depay, GstBaseRTPDepayload,
- GST_TYPE_BASE_RTP_DEPAYLOAD);
+#define gst_rtp_qdm2_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpQDM2Depay, gst_rtp_qdm2_depay,
+ GST_TYPE_RTP_BASE_DEPAYLOAD);
static const guint8 headheader[20] = {
0x0, 0x0, 0x0, 0xc, 0x66, 0x72, 0x6d, 0x61,
static GstStateChangeReturn gst_rtp_qdm2_depay_change_state (GstElement *
element, GstStateChange transition);
-static GstBuffer *gst_rtp_qdm2_depay_process (GstBaseRTPDepayload * depayload,
+static GstBuffer *gst_rtp_qdm2_depay_process (GstRTPBaseDepayload * depayload,
GstBuffer * buf);
-gboolean gst_rtp_qdm2_depay_setcaps (GstBaseRTPDepayload * filter,
+gboolean gst_rtp_qdm2_depay_setcaps (GstRTPBaseDepayload * filter,
GstCaps * caps);
static void
-gst_rtp_qdm2_depay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_qdm2_depay_src_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_qdm2_depay_sink_template);
-
-
- gst_element_class_set_details_simple (element_class, "RTP QDM2 depayloader",
- "Codec/Depayloader/Network/RTP",
- "Extracts QDM2 audio from RTP packets (no RFC)",
- "Edward Hervey <bilboed@bilboed.com>");
-}
-
-static void
gst_rtp_qdm2_depay_class_init (GstRtpQDM2DepayClass * klass)
{
GObjectClass *gobject_class;
GstElementClass *gstelement_class;
- GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
- gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
+ gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
- gstbasertpdepayload_class->process = gst_rtp_qdm2_depay_process;
- gstbasertpdepayload_class->set_caps = gst_rtp_qdm2_depay_setcaps;
+ gstrtpbasedepayload_class->process = gst_rtp_qdm2_depay_process;
+ gstrtpbasedepayload_class->set_caps = gst_rtp_qdm2_depay_setcaps;
gobject_class->finalize = gst_rtp_qdm2_depay_finalize;
gstelement_class->change_state = gst_rtp_qdm2_depay_change_state;
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_qdm2_depay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_qdm2_depay_sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP QDM2 depayloader",
+ "Codec/Depayloader/Network/RTP",
+ "Extracts QDM2 audio from RTP packets (no RFC)",
+ "Edward Hervey <bilboed@bilboed.com>");
}
static void
-gst_rtp_qdm2_depay_init (GstRtpQDM2Depay * rtpqdm2depay,
- GstRtpQDM2DepayClass * klass)
+gst_rtp_qdm2_depay_init (GstRtpQDM2Depay * rtpqdm2depay)
{
rtpqdm2depay->adapter = gst_adapter_new ();
}
// only on the sink
gboolean
-gst_rtp_qdm2_depay_setcaps (GstBaseRTPDepayload * filter, GstCaps * caps)
+gst_rtp_qdm2_depay_setcaps (GstRTPBaseDepayload * filter, GstCaps * caps)
{
GstStructure *structure = gst_caps_get_structure (caps, 0);
gint clock_rate;
GST_MEMDUMP ("Extracted packet", data, depay->packetsize);
buf = gst_buffer_new ();
- GST_BUFFER_DATA (buf) = data;
- GST_BUFFER_MALLOCDATA (buf) = data;
- GST_BUFFER_SIZE (buf) = depay->packetsize;
+ gst_buffer_take_memory (buf, -1,
+ gst_memory_new_wrapped (0, data, g_free, depay->packetsize, 0,
+ depay->packetsize));
gst_adapter_push (depay->adapter, buf);
}
static GstBuffer *
-gst_rtp_qdm2_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
+gst_rtp_qdm2_depay_process (GstRTPBaseDepayload * depayload, GstBuffer * buf)
{
GstRtpQDM2Depay *rtpqdm2depay;
- GstBuffer *outbuf;
+ GstBuffer *outbuf = NULL;
guint16 seq;
+ GstRTPBuffer rtp = { NULL };
rtpqdm2depay = GST_RTP_QDM2_DEPAY (depayload);
guint avail;
guint pos = 0;
- payload_len = gst_rtp_buffer_get_payload_len (buf);
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
+ payload_len = gst_rtp_buffer_get_payload_len (&rtp);
if (payload_len < 3)
goto bad_packet;
- payload = gst_rtp_buffer_get_payload (buf);
- seq = gst_rtp_buffer_get_seq (buf);
+ payload = gst_rtp_buffer_get_payload (&rtp);
+ seq = gst_rtp_buffer_get_seq (&rtp);
if (G_UNLIKELY (seq != rtpqdm2depay->nextseq)) {
GST_DEBUG ("GAP in sequence number, Resetting data !");
/* Flush previous data */
if (G_UNLIKELY (!rtpqdm2depay->configured)) {
guint8 *ourdata;
GstBuffer *codecdata;
+ GstMapInfo cmap;
GstCaps *caps;
/* First bytes are unknown */
/* Caps */
codecdata = gst_buffer_new_and_alloc (48);
- memcpy (GST_BUFFER_DATA (codecdata), headheader, 20);
- memcpy (GST_BUFFER_DATA (codecdata) + 20, ourdata, 28);
+ gst_buffer_map (codecdata, &cmap, GST_MAP_WRITE);
+ memcpy (cmap.data, headheader, 20);
+ memcpy (cmap.data + 20, ourdata, 28);
+ gst_buffer_unmap (codecdata, &cmap);
caps = gst_caps_new_simple ("audio/x-qdm2",
"samplesize", G_TYPE_INT, 16,
"rate", G_TYPE_INT, rtpqdm2depay->samplerate,
"channels", G_TYPE_INT, rtpqdm2depay->channs,
"codec_data", GST_TYPE_BUFFER, codecdata, NULL);
- gst_pad_set_caps (GST_BASE_RTP_DEPAYLOAD_SRCPAD (depayload), caps);
+ gst_pad_set_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload), caps);
gst_caps_unref (caps);
rtpqdm2depay->configured = TRUE;
} else {
GST_BUFFER_TIMESTAMP (outbuf) = rtpqdm2depay->ptimestamp;
GST_DEBUG ("Outgoing buffer timestamp %" GST_TIME_FORMAT,
GST_TIME_ARGS (rtpqdm2depay->ptimestamp));
- return outbuf;
}
}
- return NULL;
+
+ gst_rtp_buffer_unmap (&rtp);
+ return outbuf;
/* ERRORS */
bad_packet:
{
GST_ELEMENT_WARNING (rtpqdm2depay, STREAM, DECODE,
(NULL), ("Packet was too short"));
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
}
#include <gst/gst.h>
#include <gst/base/gstadapter.h>
-#include <gst/rtp/gstbasertpdepayload.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
G_BEGIN_DECLS
struct _GstRtpQDM2Depay
{
- GstBaseRTPDepayload depayload;
+ GstRTPBaseDepayload depayload;
GstAdapter *adapter;
struct _GstRtpQDM2DepayClass
{
- GstBaseRTPDepayloadClass parent_class;
+ GstRTPBaseDepayloadClass parent_class;
};
GType gst_rtp_qdm2_depay_get_type (void);
GST_STATIC_CAPS ("audio/x-siren, " "dct-length = (int) 320")
);
-static GstBuffer *gst_rtp_siren_depay_process (GstBaseRTPDepayload * depayload,
+static GstBuffer *gst_rtp_siren_depay_process (GstRTPBaseDepayload * depayload,
GstBuffer * buf);
-static gboolean gst_rtp_siren_depay_setcaps (GstBaseRTPDepayload * depayload,
+static gboolean gst_rtp_siren_depay_setcaps (GstRTPBaseDepayload * depayload,
GstCaps * caps);
-GST_BOILERPLATE (GstRTPSirenDepay, gst_rtp_siren_depay, GstBaseRTPDepayload,
- GST_TYPE_BASE_RTP_DEPAYLOAD);
+G_DEFINE_TYPE (GstRTPSirenDepay, gst_rtp_siren_depay,
+ GST_TYPE_RTP_BASE_DEPAYLOAD);
static void
-gst_rtp_siren_depay_base_init (gpointer klass)
+gst_rtp_siren_depay_class_init (GstRTPSirenDepayClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstElementClass *gstelement_class;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
+
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
+
+ gstrtpbasedepayload_class->process = gst_rtp_siren_depay_process;
+ gstrtpbasedepayload_class->set_caps = gst_rtp_siren_depay_setcaps;
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_siren_depay_src_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_siren_depay_sink_template);
- gst_element_class_set_details_simple (element_class,
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_siren_depay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_siren_depay_sink_template));
+ gst_element_class_set_details_simple (gstelement_class,
"RTP Siren packet depayloader", "Codec/Depayloader/Network/RTP",
"Extracts Siren audio from RTP packets",
"Philippe Kalaf <philippe.kalaf@collabora.co.uk>");
}
static void
-gst_rtp_siren_depay_class_init (GstRTPSirenDepayClass * klass)
-{
- GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
-
- gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
-
- gstbasertpdepayload_class->process = gst_rtp_siren_depay_process;
- gstbasertpdepayload_class->set_caps = gst_rtp_siren_depay_setcaps;
-}
-
-static void
-gst_rtp_siren_depay_init (GstRTPSirenDepay * rtpsirendepay,
- GstRTPSirenDepayClass * klass)
+gst_rtp_siren_depay_init (GstRTPSirenDepay * rtpsirendepay)
{
}
static gboolean
-gst_rtp_siren_depay_setcaps (GstBaseRTPDepayload * depayload, GstCaps * caps)
+gst_rtp_siren_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
{
GstCaps *srccaps;
gboolean ret;
srccaps = gst_caps_new_simple ("audio/x-siren",
"dct-length", G_TYPE_INT, 320, NULL);
- ret = gst_pad_set_caps (GST_BASE_RTP_DEPAYLOAD_SRCPAD (depayload), srccaps);
+ ret = gst_pad_set_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload), srccaps);
GST_DEBUG ("set caps on source: %" GST_PTR_FORMAT " (ret=%d)", srccaps, ret);
gst_caps_unref (srccaps);
}
static GstBuffer *
-gst_rtp_siren_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
+gst_rtp_siren_depay_process (GstRTPBaseDepayload * depayload, GstBuffer * buf)
{
GstBuffer *outbuf;
+ GstRTPBuffer rtp = { NULL };
- outbuf = gst_rtp_buffer_get_payload_buffer (buf);
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
+ outbuf = gst_rtp_buffer_get_payload_buffer (&rtp);
+ gst_rtp_buffer_unmap (&rtp);
return outbuf;
}
#define __GST_RTP_SIREN_DEPAY_H__
#include <gst/gst.h>
-#include <gst/rtp/gstbasertpdepayload.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
G_BEGIN_DECLS typedef struct _GstRTPSirenDepay GstRTPSirenDepay;
typedef struct _GstRTPSirenDepayClass GstRTPSirenDepayClass;
struct _GstRTPSirenDepay
{
- GstBaseRTPDepayload depayload;
+ GstRTPBaseDepayload depayload;
};
struct _GstRTPSirenDepayClass
{
- GstBaseRTPDepayloadClass parent_class;
+ GstRTPBaseDepayloadClass parent_class;
};
GType gst_rtp_siren_depay_get_type (void);
"bitrate = (string) \"16000\", " "dct-length = (int) 320")
);
-static gboolean gst_rtp_siren_pay_setcaps (GstBaseRTPPayload * payload,
+static gboolean gst_rtp_siren_pay_setcaps (GstRTPBasePayload * payload,
GstCaps * caps);
-GST_BOILERPLATE (GstRTPSirenPay, gst_rtp_siren_pay, GstBaseRTPAudioPayload,
- GST_TYPE_BASE_RTP_AUDIO_PAYLOAD);
+G_DEFINE_TYPE (GstRTPSirenPay, gst_rtp_siren_pay,
+ GST_TYPE_RTP_BASE_AUDIO_PAYLOAD);
static void
-gst_rtp_siren_pay_base_init (gpointer klass)
+gst_rtp_siren_pay_class_init (GstRTPSirenPayClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstElementClass *gstelement_class;
+ GstRTPBasePayloadClass *gstrtpbasepayload_class;
+
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasepayload_class = (GstRTPBasePayloadClass *) klass;
+
+ gstrtpbasepayload_class->set_caps = gst_rtp_siren_pay_setcaps;
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_siren_pay_sink_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_siren_pay_src_template);
- gst_element_class_set_details_simple (element_class,
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_siren_pay_sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_siren_pay_src_template));
+ gst_element_class_set_details_simple (gstelement_class,
"RTP Payloader for Siren Audio", "Codec/Payloader/Network/RTP",
"Packetize Siren audio streams into RTP packets",
"Youness Alaoui <kakaroto@kakaroto.homelinux.net>");
-}
-
-static void
-gst_rtp_siren_pay_class_init (GstRTPSirenPayClass * klass)
-{
- GstBaseRTPPayloadClass *gstbasertppayload_class;
-
- gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
-
- gstbasertppayload_class->set_caps = gst_rtp_siren_pay_setcaps;
GST_DEBUG_CATEGORY_INIT (rtpsirenpay_debug, "rtpsirenpay", 0,
"siren audio RTP payloader");
}
static void
-gst_rtp_siren_pay_init (GstRTPSirenPay * rtpsirenpay,
- GstRTPSirenPayClass * klass)
+gst_rtp_siren_pay_init (GstRTPSirenPay * rtpsirenpay)
{
- GstBaseRTPPayload *basertppayload;
- GstBaseRTPAudioPayload *basertpaudiopayload;
+ GstRTPBasePayload *rtpbasepayload;
+ GstRTPBaseAudioPayload *rtpbaseaudiopayload;
- basertppayload = GST_BASE_RTP_PAYLOAD (rtpsirenpay);
- basertpaudiopayload = GST_BASE_RTP_AUDIO_PAYLOAD (rtpsirenpay);
+ rtpbasepayload = GST_RTP_BASE_PAYLOAD (rtpsirenpay);
+ rtpbaseaudiopayload = GST_RTP_BASE_AUDIO_PAYLOAD (rtpsirenpay);
/* we don't set the payload type, it should be set by the application using
* the pt property or the default 96 will be used */
- basertppayload->clock_rate = 16000;
+ rtpbasepayload->clock_rate = 16000;
- /* tell basertpaudiopayload that this is a frame based codec */
- gst_base_rtp_audio_payload_set_frame_based (basertpaudiopayload);
+ /* tell rtpbaseaudiopayload that this is a frame based codec */
+ gst_rtp_base_audio_payload_set_frame_based (rtpbaseaudiopayload);
}
static gboolean
-gst_rtp_siren_pay_setcaps (GstBaseRTPPayload * basertppayload, GstCaps * caps)
+gst_rtp_siren_pay_setcaps (GstRTPBasePayload * rtpbasepayload, GstCaps * caps)
{
GstRTPSirenPay *rtpsirenpay;
- GstBaseRTPAudioPayload *basertpaudiopayload;
+ GstRTPBaseAudioPayload *rtpbaseaudiopayload;
gint dct_length;
GstStructure *structure;
const char *payload_name;
- rtpsirenpay = GST_RTP_SIREN_PAY (basertppayload);
- basertpaudiopayload = GST_BASE_RTP_AUDIO_PAYLOAD (basertppayload);
+ rtpsirenpay = GST_RTP_SIREN_PAY (rtpbasepayload);
+ rtpbaseaudiopayload = GST_RTP_BASE_AUDIO_PAYLOAD (rtpbasepayload);
structure = gst_caps_get_structure (caps, 0);
if (g_ascii_strcasecmp ("audio/x-siren", payload_name))
goto wrong_caps;
- gst_basertppayload_set_options (basertppayload, "audio", TRUE, "SIREN",
+ gst_rtp_base_payload_set_options (rtpbasepayload, "audio", TRUE, "SIREN",
16000);
/* set options for this frame based audio codec */
- gst_base_rtp_audio_payload_set_frame_options (basertpaudiopayload, 20, 40);
+ gst_rtp_base_audio_payload_set_frame_options (rtpbaseaudiopayload, 20, 40);
- return gst_basertppayload_set_outcaps (basertppayload, NULL);
+ return gst_rtp_base_payload_set_outcaps (rtpbasepayload, NULL);
/* ERRORS */
wrong_dct:
#define __GST_RTP_SIREN_PAY_H__
#include <gst/gst.h>
-#include <gst/rtp/gstbasertpaudiopayload.h>
+#include <gst/rtp/gstrtpbaseaudiopayload.h>
G_BEGIN_DECLS
#define GST_TYPE_RTP_SIREN_PAY \
struct _GstRTPSirenPay
{
- GstBaseRTPAudioPayload audiopayload;
+ GstRTPBaseAudioPayload audiopayload;
};
struct _GstRTPSirenPayClass
{
- GstBaseRTPAudioPayloadClass parent_class;
+ GstRTPBaseAudioPayloadClass parent_class;
};
GType gst_rtp_siren_pay_get_type (void);
GST_STATIC_CAPS ("audio/x-speex")
);
-static GstBuffer *gst_rtp_speex_depay_process (GstBaseRTPDepayload * depayload,
+static GstBuffer *gst_rtp_speex_depay_process (GstRTPBaseDepayload * depayload,
GstBuffer * buf);
-static gboolean gst_rtp_speex_depay_setcaps (GstBaseRTPDepayload * depayload,
+static gboolean gst_rtp_speex_depay_setcaps (GstRTPBaseDepayload * depayload,
GstCaps * caps);
-GST_BOILERPLATE (GstRtpSPEEXDepay, gst_rtp_speex_depay, GstBaseRTPDepayload,
- GST_TYPE_BASE_RTP_DEPAYLOAD);
-
-static void
-gst_rtp_speex_depay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_speex_depay_src_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_speex_depay_sink_template);
- gst_element_class_set_details_simple (element_class, "RTP Speex depayloader",
- "Codec/Depayloader/Network/RTP",
- "Extracts Speex audio from RTP packets",
- "Edgard Lima <edgard.lima@indt.org.br>");
-}
+G_DEFINE_TYPE (GstRtpSPEEXDepay, gst_rtp_speex_depay,
+ GST_TYPE_RTP_BASE_DEPAYLOAD);
static void
gst_rtp_speex_depay_class_init (GstRtpSPEEXDepayClass * klass)
{
- GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
+ GstElementClass *gstelement_class;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
+
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
- gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
+ gstrtpbasedepayload_class->process = gst_rtp_speex_depay_process;
+ gstrtpbasedepayload_class->set_caps = gst_rtp_speex_depay_setcaps;
- gstbasertpdepayload_class->process = gst_rtp_speex_depay_process;
- gstbasertpdepayload_class->set_caps = gst_rtp_speex_depay_setcaps;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_speex_depay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_speex_depay_sink_template));
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP Speex depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts Speex audio from RTP packets",
+ "Edgard Lima <edgard.lima@indt.org.br>");
}
static void
-gst_rtp_speex_depay_init (GstRtpSPEEXDepay * rtpspeexdepay,
- GstRtpSPEEXDepayClass * klass)
+gst_rtp_speex_depay_init (GstRtpSPEEXDepay * rtpspeexdepay)
{
}
"\045\0\0\0Depayloaded with GStreamer speexdepay\0\0\0\0";
static gboolean
-gst_rtp_speex_depay_setcaps (GstBaseRTPDepayload * depayload, GstCaps * caps)
+gst_rtp_speex_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
{
GstStructure *structure;
GstRtpSPEEXDepay *rtpspeexdepay;
gint clock_rate, nb_channels;
GstBuffer *buf;
+ GstMapInfo map;
guint8 *data;
const gchar *params;
GstCaps *srccaps;
/* construct minimal header and comment packet for the decoder */
buf = gst_buffer_new_and_alloc (80);
- data = GST_BUFFER_DATA (buf);
+ gst_buffer_map (buf, &map, GST_MAP_WRITE);
+ data = map.data;
memcpy (data, "Speex ", 8);
data += 8;
memcpy (data, "1.1.12", 7);
GST_WRITE_UINT32_LE (data, 0); /* reserved1 */
data += 4;
GST_WRITE_UINT32_LE (data, 0); /* reserved2 */
+ gst_buffer_unmap (buf, &map);
- srccaps = gst_caps_new_simple ("audio/x-speex", NULL);
+ srccaps = gst_caps_new_empty_simple ("audio/x-speex");
res = gst_pad_set_caps (depayload->srcpad, srccaps);
gst_caps_unref (srccaps);
- gst_buffer_set_caps (buf, GST_PAD_CAPS (depayload->srcpad));
- gst_base_rtp_depayload_push (GST_BASE_RTP_DEPAYLOAD (rtpspeexdepay), buf);
+ gst_rtp_base_depayload_push (GST_RTP_BASE_DEPAYLOAD (rtpspeexdepay), buf);
buf = gst_buffer_new_and_alloc (sizeof (gst_rtp_speex_comment));
- memcpy (GST_BUFFER_DATA (buf), gst_rtp_speex_comment,
+ gst_buffer_fill (buf, 0, gst_rtp_speex_comment,
sizeof (gst_rtp_speex_comment));
- gst_buffer_set_caps (buf, GST_PAD_CAPS (depayload->srcpad));
- gst_base_rtp_depayload_push (GST_BASE_RTP_DEPAYLOAD (rtpspeexdepay), buf);
+ gst_rtp_base_depayload_push (GST_RTP_BASE_DEPAYLOAD (rtpspeexdepay), buf);
return res;
}
static GstBuffer *
-gst_rtp_speex_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
+gst_rtp_speex_depay_process (GstRTPBaseDepayload * depayload, GstBuffer * buf)
{
GstBuffer *outbuf = NULL;
+ GstRTPBuffer rtp = { NULL };
+
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
- GST_DEBUG ("process : got %d bytes, mark %d ts %u seqn %d",
- GST_BUFFER_SIZE (buf),
- gst_rtp_buffer_get_marker (buf),
- gst_rtp_buffer_get_timestamp (buf), gst_rtp_buffer_get_seq (buf));
+ GST_DEBUG ("process : got %" G_GSIZE_FORMAT " bytes, mark %d ts %u seqn %d",
+ gst_buffer_get_size (buf),
+ gst_rtp_buffer_get_marker (&rtp),
+ gst_rtp_buffer_get_timestamp (&rtp), gst_rtp_buffer_get_seq (&rtp));
/* nothing special to be done */
- outbuf = gst_rtp_buffer_get_payload_buffer (buf);
+ outbuf = gst_rtp_buffer_get_payload_buffer (&rtp);
+ gst_rtp_buffer_unmap (&rtp);
if (outbuf)
GST_BUFFER_DURATION (outbuf) = 20 * GST_MSECOND;
#define __GST_RTP_SPEEX_DEPAY_H__
#include <gst/gst.h>
-#include <gst/rtp/gstbasertpdepayload.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
G_BEGIN_DECLS
struct _GstRtpSPEEXDepay
{
- GstBaseRTPDepayload depayload;
+ GstRTPBaseDepayload depayload;
};
struct _GstRtpSPEEXDepayClass
{
- GstBaseRTPDepayloadClass parent_class;
+ GstRTPBaseDepayloadClass parent_class;
};
GType gst_rtp_speex_depay_get_type (void);
static GstStateChangeReturn gst_rtp_speex_pay_change_state (GstElement *
element, GstStateChange transition);
-static gboolean gst_rtp_speex_pay_setcaps (GstBaseRTPPayload * payload,
+static gboolean gst_rtp_speex_pay_setcaps (GstRTPBasePayload * payload,
GstCaps * caps);
-static GstCaps *gst_rtp_speex_pay_getcaps (GstBaseRTPPayload * payload,
- GstPad * pad);
-static GstFlowReturn gst_rtp_speex_pay_handle_buffer (GstBaseRTPPayload *
+static GstCaps *gst_rtp_speex_pay_getcaps (GstRTPBasePayload * payload,
+ GstPad * pad, GstCaps * filter);
+static GstFlowReturn gst_rtp_speex_pay_handle_buffer (GstRTPBasePayload *
payload, GstBuffer * buffer);
-GST_BOILERPLATE (GstRtpSPEEXPay, gst_rtp_speex_pay, GstBaseRTPPayload,
- GST_TYPE_BASE_RTP_PAYLOAD);
-
-static void
-gst_rtp_speex_pay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_speex_pay_sink_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_speex_pay_src_template);
- gst_element_class_set_details_simple (element_class, "RTP Speex payloader",
- "Codec/Payloader/Network/RTP",
- "Payload-encodes Speex audio into a RTP packet",
- "Edgard Lima <edgard.lima@indt.org.br>");
-
- GST_DEBUG_CATEGORY_INIT (rtpspeexpay_debug, "rtpspeexpay", 0,
- "Speex RTP Payloader");
-}
+#define gst_rtp_speex_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpSPEEXPay, gst_rtp_speex_pay, GST_TYPE_RTP_BASE_PAYLOAD);
static void
gst_rtp_speex_pay_class_init (GstRtpSPEEXPayClass * klass)
{
GstElementClass *gstelement_class;
- GstBaseRTPPayloadClass *gstbasertppayload_class;
+ GstRTPBasePayloadClass *gstrtpbasepayload_class;
gstelement_class = (GstElementClass *) klass;
- gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
+ gstrtpbasepayload_class = (GstRTPBasePayloadClass *) klass;
gstelement_class->change_state = gst_rtp_speex_pay_change_state;
- gstbasertppayload_class->set_caps = gst_rtp_speex_pay_setcaps;
- gstbasertppayload_class->get_caps = gst_rtp_speex_pay_getcaps;
- gstbasertppayload_class->handle_buffer = gst_rtp_speex_pay_handle_buffer;
+ gstrtpbasepayload_class->set_caps = gst_rtp_speex_pay_setcaps;
+ gstrtpbasepayload_class->get_caps = gst_rtp_speex_pay_getcaps;
+ gstrtpbasepayload_class->handle_buffer = gst_rtp_speex_pay_handle_buffer;
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_speex_pay_sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_speex_pay_src_template));
+ gst_element_class_set_details_simple (gstelement_class, "RTP Speex payloader",
+ "Codec/Payloader/Network/RTP",
+ "Payload-encodes Speex audio into a RTP packet",
+ "Edgard Lima <edgard.lima@indt.org.br>");
+
+ GST_DEBUG_CATEGORY_INIT (rtpspeexpay_debug, "rtpspeexpay", 0,
+ "Speex RTP Payloader");
}
static void
-gst_rtp_speex_pay_init (GstRtpSPEEXPay * rtpspeexpay,
- GstRtpSPEEXPayClass * klass)
+gst_rtp_speex_pay_init (GstRtpSPEEXPay * rtpspeexpay)
{
- GST_BASE_RTP_PAYLOAD (rtpspeexpay)->clock_rate = 8000;
- GST_BASE_RTP_PAYLOAD_PT (rtpspeexpay) = 110; /* Create String */
+ GST_RTP_BASE_PAYLOAD (rtpspeexpay)->clock_rate = 8000;
+ GST_RTP_BASE_PAYLOAD_PT (rtpspeexpay) = 110; /* Create String */
}
static gboolean
-gst_rtp_speex_pay_setcaps (GstBaseRTPPayload * payload, GstCaps * caps)
+gst_rtp_speex_pay_setcaps (GstRTPBasePayload * payload, GstCaps * caps)
{
/* don't configure yet, we wait for the ident packet */
return TRUE;
static GstCaps *
-gst_rtp_speex_pay_getcaps (GstBaseRTPPayload * payload, GstPad * pad)
+gst_rtp_speex_pay_getcaps (GstRTPBasePayload * payload, GstPad * pad,
+ GstCaps * filter)
{
GstCaps *otherpadcaps;
GstCaps *caps;
gst_caps_unref (otherpadcaps);
}
+ if (filter) {
+ GstCaps *tcaps = caps;
+
+ caps = gst_caps_intersect_full (filter, tcaps, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (tcaps);
+ }
+
return caps;
}
const guint8 * data, guint size)
{
guint32 version, header_size, rate, mode, nb_channels;
- GstBaseRTPPayload *payload;
+ GstRTPBasePayload *payload;
gchar *cstr;
gboolean res;
GST_DEBUG_OBJECT (rtpspeexpay, "rate %d, mode %d, nb_channels %d",
rate, mode, nb_channels);
- payload = GST_BASE_RTP_PAYLOAD (rtpspeexpay);
+ payload = GST_RTP_BASE_PAYLOAD (rtpspeexpay);
- gst_basertppayload_set_options (payload, "audio", FALSE, "SPEEX", rate);
+ gst_rtp_base_payload_set_options (payload, "audio", FALSE, "SPEEX", rate);
cstr = g_strdup_printf ("%d", nb_channels);
- res = gst_basertppayload_set_outcaps (payload, "encoding-params",
+ res = gst_rtp_base_payload_set_outcaps (payload, "encoding-params",
G_TYPE_STRING, cstr, NULL);
g_free (cstr);
}
static GstFlowReturn
-gst_rtp_speex_pay_handle_buffer (GstBaseRTPPayload * basepayload,
+gst_rtp_speex_pay_handle_buffer (GstRTPBasePayload * basepayload,
GstBuffer * buffer)
{
GstRtpSPEEXPay *rtpspeexpay;
- guint size, payload_len;
+ guint payload_len;
+ GstMapInfo map;
GstBuffer *outbuf;
- guint8 *payload, *data;
+ guint8 *payload;
GstClockTime timestamp, duration;
GstFlowReturn ret;
+ GstRTPBuffer rtp = { NULL };
rtpspeexpay = GST_RTP_SPEEX_PAY (basepayload);
- size = GST_BUFFER_SIZE (buffer);
- data = GST_BUFFER_DATA (buffer);
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
switch (rtpspeexpay->packet) {
case 0:
/* ident packet. We need to parse the headers to construct the RTP
* properties. */
- if (!gst_rtp_speex_pay_parse_ident (rtpspeexpay, data, size))
+ if (!gst_rtp_speex_pay_parse_ident (rtpspeexpay, map.data, map.size))
goto parse_error;
ret = GST_FLOW_OK;
duration = GST_BUFFER_DURATION (buffer);
/* FIXME, only one SPEEX frame per RTP packet for now */
- payload_len = size;
+ payload_len = map.size;
outbuf = gst_rtp_buffer_new_allocate (payload_len, 0, 0);
/* FIXME, assert for now */
- g_assert (payload_len <= GST_BASE_RTP_PAYLOAD_MTU (rtpspeexpay));
+ g_assert (payload_len <= GST_RTP_BASE_PAYLOAD_MTU (rtpspeexpay));
/* copy timestamp and duration */
GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
GST_BUFFER_DURATION (outbuf) = duration;
+ gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
/* get payload */
- payload = gst_rtp_buffer_get_payload (outbuf);
+ payload = gst_rtp_buffer_get_payload (&rtp);
/* copy data in payload */
- memcpy (&payload[0], data, size);
+ memcpy (&payload[0], map.data, map.size);
+
+ gst_rtp_buffer_unmap (&rtp);
- ret = gst_basertppayload_push (basepayload, outbuf);
+ ret = gst_rtp_base_payload_push (basepayload, outbuf);
done:
+ gst_buffer_unmap (buffer, &map);
gst_buffer_unref (buffer);
rtpspeexpay->packet++;
{
GST_ELEMENT_ERROR (rtpspeexpay, STREAM, DECODE, (NULL),
("Error parsing first identification packet."));
+ gst_buffer_unmap (buffer, &map);
gst_buffer_unref (buffer);
return GST_FLOW_ERROR;
}
#define __GST_RTP_SPEEX_PAY_H__
#include <gst/gst.h>
-#include <gst/rtp/gstbasertppayload.h>
+#include <gst/rtp/gstrtpbasepayload.h>
G_BEGIN_DECLS
struct _GstRtpSPEEXPay
{
- GstBaseRTPPayload payload;
+ GstRTPBasePayload payload;
guint64 packet;
};
struct _GstRtpSPEEXPayClass
{
- GstBaseRTPPayloadClass parent_class;
+ GstRTPBasePayloadClass parent_class;
};
GType gst_rtp_speex_pay_get_type (void);
"encoding-name = (string) { \"X-SV3V-ES\", \"X-SORENSON-VIDEO\" , \"X-SORENSONVIDEO\" , \"X-SorensonVideo\" }")
);
-GST_BOILERPLATE (GstRtpSV3VDepay, gst_rtp_sv3v_depay, GstBaseRTPDepayload,
- GST_TYPE_BASE_RTP_DEPAYLOAD);
+#define gst_rtp_sv3v_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpSV3VDepay, gst_rtp_sv3v_depay,
+ GST_TYPE_RTP_BASE_DEPAYLOAD);
static void gst_rtp_sv3v_depay_finalize (GObject * object);
static GstStateChangeReturn gst_rtp_sv3v_depay_change_state (GstElement *
element, GstStateChange transition);
-static GstBuffer *gst_rtp_sv3v_depay_process (GstBaseRTPDepayload * depayload,
+static GstBuffer *gst_rtp_sv3v_depay_process (GstRTPBaseDepayload * depayload,
GstBuffer * buf);
-gboolean gst_rtp_sv3v_depay_setcaps (GstBaseRTPDepayload * filter,
+gboolean gst_rtp_sv3v_depay_setcaps (GstRTPBaseDepayload * filter,
GstCaps * caps);
static void
-gst_rtp_sv3v_depay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_sv3v_depay_src_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_sv3v_depay_sink_template);
-
-
- gst_element_class_set_details_simple (element_class, "RTP SVQ3 depayloader",
- "Codec/Depayloader/Network/RTP",
- "Extracts SVQ3 video from RTP packets (no RFC)",
- "Wim Taymans <wim.taymans@gmail.com>");
-}
-
-static void
gst_rtp_sv3v_depay_class_init (GstRtpSV3VDepayClass * klass)
{
GObjectClass *gobject_class;
GstElementClass *gstelement_class;
- GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
- gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
+ gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
- gstbasertpdepayload_class->process = gst_rtp_sv3v_depay_process;
- gstbasertpdepayload_class->set_caps = gst_rtp_sv3v_depay_setcaps;
+ gstrtpbasedepayload_class->process = gst_rtp_sv3v_depay_process;
+ gstrtpbasedepayload_class->set_caps = gst_rtp_sv3v_depay_setcaps;
gobject_class->finalize = gst_rtp_sv3v_depay_finalize;
gstelement_class->change_state = gst_rtp_sv3v_depay_change_state;
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_sv3v_depay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_sv3v_depay_sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP SVQ3 depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts SVQ3 video from RTP packets (no RFC)",
+ "Wim Taymans <wim.taymans@gmail.com>");
}
static void
-gst_rtp_sv3v_depay_init (GstRtpSV3VDepay * rtpsv3vdepay,
- GstRtpSV3VDepayClass * klass)
+gst_rtp_sv3v_depay_init (GstRtpSV3VDepay * rtpsv3vdepay)
{
rtpsv3vdepay->adapter = gst_adapter_new ();
}
// only on the sink
gboolean
-gst_rtp_sv3v_depay_setcaps (GstBaseRTPDepayload * filter, GstCaps * caps)
+gst_rtp_sv3v_depay_setcaps (GstRTPBaseDepayload * filter, GstCaps * caps)
{
GstStructure *structure = gst_caps_get_structure (caps, 0);
gint clock_rate;
}
static GstBuffer *
-gst_rtp_sv3v_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
+gst_rtp_sv3v_depay_process (GstRTPBaseDepayload * depayload, GstBuffer * buf)
{
GstRtpSV3VDepay *rtpsv3vdepay;
static struct
gboolean C, S, E;
GstBuffer *outbuf = NULL;
guint16 seq;
+ GstRTPBuffer rtp = { NULL };
rtpsv3vdepay = GST_RTP_SV3V_DEPAY (depayload);
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
+
/* flush on sequence number gaps */
- seq = gst_rtp_buffer_get_seq (buf);
+ seq = gst_rtp_buffer_get_seq (&rtp);
GST_DEBUG ("timestamp %" GST_TIME_FORMAT ", sequence number:%d",
GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)), seq);
}
rtpsv3vdepay->nextseq = seq + 1;
- payload_len = gst_rtp_buffer_get_payload_len (buf);
+ payload_len = gst_rtp_buffer_get_payload_len (&rtp);
if (payload_len < 3)
goto bad_packet;
- payload = gst_rtp_buffer_get_payload (buf);
+ payload = gst_rtp_buffer_get_payload (&rtp);
- M = gst_rtp_buffer_get_marker (buf);
+ M = gst_rtp_buffer_get_marker (&rtp);
/* This is all a guess:
* 1 1 1 1 1 1
if (G_UNLIKELY (C)) {
GstCaps *caps;
GstBuffer *codec_data;
+ GstMapInfo cmap;
guint8 res;
GST_DEBUG ("Configuration packet");
/* if we already have caps, we don't need to do anything. FIXME, check if
* something changed. */
- if (G_UNLIKELY (GST_PAD_CAPS (GST_BASE_RTP_DEPAYLOAD_SRCPAD (depayload)))) {
+ if (G_UNLIKELY (gst_pad_has_current_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD
+ (depayload)))) {
GST_DEBUG ("Already configured, skipping config parsing");
goto beach;
}
/* CodecData needs to be 'SEQH' + len (32bit) + data according to
* ffmpeg's libavcodec/svq3.c:svq3_decode_init */
codec_data = gst_buffer_new_and_alloc (payload_len + 6);
- memcpy (GST_BUFFER_DATA (codec_data), "SEQH", 4);
- GST_WRITE_UINT32_LE (GST_BUFFER_DATA (codec_data) + 4, payload_len - 2);
- memcpy (GST_BUFFER_DATA (codec_data) + 8, payload + 2, payload_len - 2);
-
- GST_MEMDUMP ("codec_data", GST_BUFFER_DATA (codec_data),
- GST_BUFFER_SIZE (codec_data));
+ gst_buffer_map (codec_data, &cmap, GST_MAP_WRITE);
+ memcpy (cmap.data, "SEQH", 4);
+ GST_WRITE_UINT32_LE (cmap.data + 4, payload_len - 2);
+ memcpy (cmap.data + 8, payload + 2, payload_len - 2);
+ GST_MEMDUMP ("codec_data", cmap.data, gst_buffer_get_size (codec_data));
+ gst_buffer_unmap (codec_data, &cmap);
caps = gst_caps_new_simple ("video/x-svq",
"svqversion", G_TYPE_INT, 3,
"width", G_TYPE_INT, rtpsv3vdepay->width,
"height", G_TYPE_INT, rtpsv3vdepay->height,
"codec_data", GST_TYPE_BUFFER, codec_data, NULL);
- gst_pad_set_caps (GST_BASE_RTP_DEPAYLOAD_SRCPAD (depayload), caps);
+ gst_pad_set_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload), caps);
gst_caps_unref (caps);
GST_DEBUG ("Depayloader now configured");
GST_DEBUG ("Storing incoming payload");
/* store data in adapter, stip off 2 bytes header */
- tmpbuf = gst_rtp_buffer_get_payload_subbuffer (buf, 2, -1);
+ tmpbuf = gst_rtp_buffer_get_payload_subbuffer (&rtp, 2, -1);
gst_adapter_push (rtpsv3vdepay->adapter, tmpbuf);
if (G_UNLIKELY (M)) {
}
beach:
+ gst_rtp_buffer_unmap (&rtp);
return outbuf;
/* ERRORS */
{
GST_ELEMENT_WARNING (rtpsv3vdepay, STREAM, DECODE,
(NULL), ("Packet was too short"));
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
}
#include <gst/gst.h>
#include <gst/base/gstadapter.h>
-#include <gst/rtp/gstbasertpdepayload.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
G_BEGIN_DECLS
struct _GstRtpSV3VDepay
{
- GstBaseRTPDepayload depayload;
+ GstRTPBaseDepayload depayload;
GstAdapter *adapter;
struct _GstRtpSV3VDepayClass
{
- GstBaseRTPDepayloadClass parent_class;
+ GstRTPBaseDepayloadClass parent_class;
};
GType gst_rtp_sv3v_depay_get_type (void);
GST_STATIC_CAPS ("video/x-theora")
);
-GST_BOILERPLATE (GstRtpTheoraDepay, gst_rtp_theora_depay, GstBaseRTPDepayload,
- GST_TYPE_BASE_RTP_DEPAYLOAD);
+#define gst_rtp_theora_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpTheoraDepay, gst_rtp_theora_depay,
+ GST_TYPE_RTP_BASE_DEPAYLOAD);
-static gboolean gst_rtp_theora_depay_setcaps (GstBaseRTPDepayload * depayload,
+static gboolean gst_rtp_theora_depay_setcaps (GstRTPBaseDepayload * depayload,
GstCaps * caps);
-static GstBuffer *gst_rtp_theora_depay_process (GstBaseRTPDepayload * depayload,
+static GstBuffer *gst_rtp_theora_depay_process (GstRTPBaseDepayload * depayload,
GstBuffer * buf);
-static gboolean gst_rtp_theora_depay_packet_lost (GstBaseRTPDepayload *
+static gboolean gst_rtp_theora_depay_packet_lost (GstRTPBaseDepayload *
depayload, GstEvent * event);
static void gst_rtp_theora_depay_finalize (GObject * object);
-
-static void
-gst_rtp_theora_depay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_theora_depay_sink_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_theora_depay_src_template);
-
- gst_element_class_set_details_simple (element_class, "RTP Theora depayloader",
- "Codec/Depayloader/Network/RTP",
- "Extracts Theora video from RTP packets (draft-01 of RFC XXXX)",
- "Wim Taymans <wim.taymans@gmail.com>");
-}
-
static void
gst_rtp_theora_depay_class_init (GstRtpTheoraDepayClass * klass)
{
GObjectClass *gobject_class;
- GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
+ GstElementClass *gstelement_class;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
gobject_class = (GObjectClass *) klass;
- gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
gobject_class->finalize = gst_rtp_theora_depay_finalize;
- gstbasertpdepayload_class->process = gst_rtp_theora_depay_process;
- gstbasertpdepayload_class->set_caps = gst_rtp_theora_depay_setcaps;
- gstbasertpdepayload_class->packet_lost = gst_rtp_theora_depay_packet_lost;
+ gstrtpbasedepayload_class->process = gst_rtp_theora_depay_process;
+ gstrtpbasedepayload_class->set_caps = gst_rtp_theora_depay_setcaps;
+ gstrtpbasedepayload_class->packet_lost = gst_rtp_theora_depay_packet_lost;
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_theora_depay_sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_theora_depay_src_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP Theora depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts Theora video from RTP packets (draft-01 of RFC XXXX)",
+ "Wim Taymans <wim.taymans@gmail.com>");
GST_DEBUG_CATEGORY_INIT (rtptheoradepay_debug, "rtptheoradepay", 0,
"Theora RTP Depayloader");
}
static void
-gst_rtp_theora_depay_init (GstRtpTheoraDepay * rtptheoradepay,
- GstRtpTheoraDepayClass * klass)
+gst_rtp_theora_depay_init (GstRtpTheoraDepay * rtptheoradepay)
{
rtptheoradepay->adapter = gst_adapter_new ();
}
{
GstBuffer *buf;
guint32 num_headers;
+ GstMapInfo map;
guint8 *data;
- guint size;
+ gsize size;
gint i, j;
- data = GST_BUFFER_DATA (confbuf);
- size = GST_BUFFER_SIZE (confbuf);
+ gst_buffer_map (confbuf, &map, GST_MAP_READ);
+ data = map.data;
+ size = map.size;
- GST_DEBUG_OBJECT (rtptheoradepay, "config size %u", size);
+ GST_DEBUG_OBJECT (rtptheoradepay, "config size %" G_GSIZE_FORMAT, size);
/* +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
* | Number of packed headers |
data += 6;
GST_DEBUG_OBJECT (rtptheoradepay,
- "header %d, ident 0x%08x, length %u, left %u", i, ident, length, size);
+ "header %d, ident 0x%08x, length %u, left %" G_GSIZE_FORMAT, i, ident,
+ length, size);
/* FIXME check if we already got this ident */
h_size);
buf = gst_buffer_new_and_alloc (h_size);
- memcpy (GST_BUFFER_DATA (buf), data, h_size);
+ gst_buffer_fill (buf, 0, data, h_size);
conf->headers = g_list_append (conf->headers, buf);
data += h_size;
size -= h_size;
}
rtptheoradepay->configs = g_list_append (rtptheoradepay->configs, conf);
}
+
+ gst_buffer_unmap (confbuf, &map);
return TRUE;
/* ERRORS */
too_small:
{
GST_DEBUG_OBJECT (rtptheoradepay, "configuration too small");
+ gst_buffer_unmap (confbuf, &map);
return FALSE;
}
}
guint length)
{
GstBuffer *confbuf;
- guint8 *conf;
+ GstMapInfo map;
if (G_UNLIKELY (size < 4))
return FALSE;
/* transform inline to out-of-band and parse that one */
confbuf = gst_buffer_new_and_alloc (size + 9);
- conf = GST_BUFFER_DATA (confbuf);
+ gst_buffer_map (confbuf, &map, GST_MAP_WRITE);
/* 1 header */
- GST_WRITE_UINT32_BE (conf, 1);
+ GST_WRITE_UINT32_BE (map.data, 1);
/* write Ident */
- GST_WRITE_UINT24_BE (conf + 4, ident);
+ GST_WRITE_UINT24_BE (map.data + 4, ident);
/* write sort-of-length */
- GST_WRITE_UINT16_BE (conf + 7, length);
+ GST_WRITE_UINT16_BE (map.data + 7, length);
/* copy remainder */
- memcpy (conf + 9, configuration, size);
+ memcpy (map.data + 9, configuration, size);
+ gst_buffer_unmap (confbuf, &map);
return gst_rtp_theora_depay_parse_configuration (rtptheoradepay, confbuf);
}
static gboolean
-gst_rtp_theora_depay_setcaps (GstBaseRTPDepayload * depayload, GstCaps * caps)
+gst_rtp_theora_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
{
GstStructure *structure;
GstRtpTheoraDepay *rtptheoradepay;
data = g_base64_decode (configuration, &size);
confbuf = gst_buffer_new ();
- GST_BUFFER_DATA (confbuf) = data;
- GST_BUFFER_MALLOCDATA (confbuf) = data;
- GST_BUFFER_SIZE (confbuf) = size;
+ gst_buffer_take_memory (confbuf, -1,
+ gst_memory_new_wrapped (0, data, g_free, size, 0, size));
if (!gst_rtp_theora_depay_parse_configuration (rtptheoradepay, confbuf))
goto invalid_configuration;
}
/* set caps on pad and on header */
- srccaps = gst_caps_new_simple ("video/x-theora", NULL);
+ srccaps = gst_caps_new_empty_simple ("video/x-theora");
res = gst_pad_set_caps (depayload->srcpad, srccaps);
gst_caps_unref (srccaps);
GstBuffer *header = GST_BUFFER_CAST (headers->data);
gst_buffer_ref (header);
- gst_base_rtp_depayload_push (GST_BASE_RTP_DEPAYLOAD (rtptheoradepay),
+ gst_rtp_base_depayload_push (GST_RTP_BASE_DEPAYLOAD (rtptheoradepay),
header);
}
/* remember the current config */
}
static GstBuffer *
-gst_rtp_theora_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
+gst_rtp_theora_depay_process (GstRTPBaseDepayload * depayload, GstBuffer * buf)
{
GstRtpTheoraDepay *rtptheoradepay;
GstBuffer *outbuf;
GstFlowReturn ret;
gint payload_len;
guint8 *payload, *to_free = NULL;
- guint32 timestamp;
guint32 header, ident;
guint8 F, TDT, packets;
+ GstRTPBuffer rtp = { NULL };
rtptheoradepay = GST_RTP_THEORA_DEPAY (depayload);
- payload_len = gst_rtp_buffer_get_payload_len (buf);
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
+
+ payload_len = gst_rtp_buffer_get_payload_len (&rtp);
GST_DEBUG_OBJECT (depayload, "got RTP packet of size %d", payload_len);
if (G_UNLIKELY (payload_len < 4))
goto packet_short;
- payload = gst_rtp_buffer_get_payload (buf);
+ payload = gst_rtp_buffer_get_payload (&rtp);
header = GST_READ_UINT32_BE (payload);
/*
/* first assembled packet, reuse 2 bytes to store the length */
headerskip = (F == 1 ? 4 : 6);
/* skip header and length. */
- vdata = gst_rtp_buffer_get_payload_subbuffer (buf, headerskip, -1);
+ vdata = gst_rtp_buffer_get_payload_subbuffer (&rtp, headerskip, -1);
GST_DEBUG_OBJECT (depayload, "assemble theora packet");
gst_adapter_push (rtptheoradepay->adapter, vdata);
* .. theora data |
* +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+*
*/
- timestamp = gst_rtp_buffer_get_timestamp (buf);
-
while (payload_len >= 2) {
guint16 length;
/* create buffer for packet */
if (G_UNLIKELY (to_free)) {
outbuf = gst_buffer_new ();
- GST_BUFFER_DATA (outbuf) = payload;
- GST_BUFFER_MALLOCDATA (outbuf) = to_free;
- GST_BUFFER_SIZE (outbuf) = length;
+ gst_buffer_take_memory (buf, -1,
+ gst_memory_new_wrapped (0, to_free, g_free,
+ (payload - to_free) + length, payload - to_free, length));
to_free = NULL;
} else {
outbuf = gst_buffer_new_and_alloc (length);
- memcpy (GST_BUFFER_DATA (outbuf), payload, length);
+ gst_buffer_fill (outbuf, 0, payload, length);
}
if (payload_len > 0 && (payload[0] & 0xC0) == 0x0)
payload += length;
payload_len -= length;
- if (timestamp != -1)
- /* push with timestamp of the last packet, which is the same timestamp that
- * should apply to the first assembled packet. */
- ret = gst_base_rtp_depayload_push_ts (depayload, timestamp, outbuf);
- else
- ret = gst_base_rtp_depayload_push (depayload, outbuf);
-
+ ret = gst_rtp_base_depayload_push (depayload, outbuf);
if (ret != GST_FLOW_OK)
break;
-
- /* make sure we don't set a timestamp on next buffers */
- timestamp = -1;
}
g_free (to_free);
if (rtptheoradepay->needs_keyframe)
goto request_keyframe;
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
no_output:
{
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
/* ERORRS */
ignore_reserved:
{
GST_WARNING_OBJECT (rtptheoradepay, "reserved TDT ignored");
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
length_short:
}
request_config:
{
- gst_pad_push_event (GST_BASE_RTP_DEPAYLOAD_SINKPAD (depayload),
+ gst_pad_push_event (GST_RTP_BASE_DEPAYLOAD_SINKPAD (depayload),
gst_event_new_custom (GST_EVENT_CUSTOM_UPSTREAM,
gst_structure_new ("GstForceKeyUnit",
"all-headers", G_TYPE_BOOLEAN, TRUE, NULL)));
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
request_keyframe:
{
rtptheoradepay->needs_keyframe = TRUE;
- gst_pad_push_event (GST_BASE_RTP_DEPAYLOAD_SINKPAD (depayload),
+ gst_pad_push_event (GST_RTP_BASE_DEPAYLOAD_SINKPAD (depayload),
gst_event_new_custom (GST_EVENT_CUSTOM_UPSTREAM,
- gst_structure_new ("GstForceKeyUnit", NULL)));
+ gst_structure_new_empty ("GstForceKeyUnit")));
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
}
}
static gboolean
-gst_rtp_theora_depay_packet_lost (GstBaseRTPDepayload * depayload,
+gst_rtp_theora_depay_packet_lost (GstRTPBaseDepayload * depayload,
GstEvent * event)
{
GstRtpTheoraDepay *rtptheoradepay = GST_RTP_THEORA_DEPAY (depayload);
guint seqnum = 0;
- gst_structure_get_uint (event->structure, "seqnum", &seqnum);
+ gst_structure_get_uint (gst_event_get_structure (event), "seqnum", &seqnum);
GST_LOG_OBJECT (depayload, "Requested keyframe because frame with seqnum %u"
" is missing", seqnum);
rtptheoradepay->needs_keyframe = TRUE;
- gst_pad_push_event (GST_BASE_RTP_DEPAYLOAD_SINKPAD (depayload),
+ gst_pad_push_event (GST_RTP_BASE_DEPAYLOAD_SINKPAD (depayload),
gst_event_new_custom (GST_EVENT_CUSTOM_UPSTREAM,
- gst_structure_new ("GstForceKeyUnit", NULL)));
+ gst_structure_new_empty ("GstForceKeyUnit")));
return TRUE;
}
#include <gst/gst.h>
#include <gst/base/gstadapter.h>
-#include <gst/rtp/gstbasertpdepayload.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
G_BEGIN_DECLS
struct _GstRtpTheoraDepay
{
- GstBaseRTPDepayload parent;
+ GstRTPBaseDepayload parent;
GList *configs;
GstRtpTheoraConfig *config;
struct _GstRtpTheoraDepayClass
{
- GstBaseRTPDepayloadClass parent_class;
+ GstRTPBaseDepayloadClass parent_class;
};
GType gst_rtp_theora_depay_get_type (void);
PROP_CONFIG_INTERVAL
};
-GST_BOILERPLATE (GstRtpTheoraPay, gst_rtp_theora_pay, GstBaseRTPPayload,
- GST_TYPE_BASE_RTP_PAYLOAD);
+#define gst_rtp_theora_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpTheoraPay, gst_rtp_theora_pay, GST_TYPE_RTP_BASE_PAYLOAD);
-static gboolean gst_rtp_theora_pay_setcaps (GstBaseRTPPayload * basepayload,
+static gboolean gst_rtp_theora_pay_setcaps (GstRTPBasePayload * basepayload,
GstCaps * caps);
static GstStateChangeReturn gst_rtp_theora_pay_change_state (GstElement *
element, GstStateChange transition);
-static GstFlowReturn gst_rtp_theora_pay_handle_buffer (GstBaseRTPPayload * pad,
+static GstFlowReturn gst_rtp_theora_pay_handle_buffer (GstRTPBasePayload * pad,
GstBuffer * buffer);
-static gboolean gst_rtp_theora_pay_handle_event (GstPad * pad,
+static gboolean gst_rtp_theora_pay_sink_event (GstRTPBasePayload * payload,
GstEvent * event);
GValue * value, GParamSpec * pspec);
static void
-gst_rtp_theora_pay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_theora_pay_src_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_theora_pay_sink_template);
-
- gst_element_class_set_details_simple (element_class, "RTP Theora payloader",
- "Codec/Payloader/Network/RTP",
- "Payload-encode Theora video into RTP packets (draft-01 RFC XXXX)",
- "Wim Taymans <wim.taymans@gmail.com>");
-}
-
-static void
gst_rtp_theora_pay_class_init (GstRtpTheoraPayClass * klass)
{
GObjectClass *gobject_class;
GstElementClass *gstelement_class;
- GstBaseRTPPayloadClass *gstbasertppayload_class;
+ GstRTPBasePayloadClass *gstrtpbasepayload_class;
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
- gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
+ gstrtpbasepayload_class = (GstRTPBasePayloadClass *) klass;
gstelement_class->change_state = gst_rtp_theora_pay_change_state;
- gstbasertppayload_class->set_caps = gst_rtp_theora_pay_setcaps;
- gstbasertppayload_class->handle_buffer = gst_rtp_theora_pay_handle_buffer;
- gstbasertppayload_class->handle_event = gst_rtp_theora_pay_handle_event;
+ gstrtpbasepayload_class->set_caps = gst_rtp_theora_pay_setcaps;
+ gstrtpbasepayload_class->handle_buffer = gst_rtp_theora_pay_handle_buffer;
+ gstrtpbasepayload_class->sink_event = gst_rtp_theora_pay_sink_event;
gobject_class->set_property = gst_rtp_theora_pay_set_property;
gobject_class->get_property = gst_rtp_theora_pay_get_property;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_theora_pay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_theora_pay_sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP Theora payloader", "Codec/Payloader/Network/RTP",
+ "Payload-encode Theora video into RTP packets (draft-01 RFC XXXX)",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
GST_DEBUG_CATEGORY_INIT (rtptheorapay_debug, "rtptheorapay", 0,
"Theora RTP Payloader");
}
static void
-gst_rtp_theora_pay_init (GstRtpTheoraPay * rtptheorapay,
- GstRtpTheoraPayClass * klass)
+gst_rtp_theora_pay_init (GstRtpTheoraPay * rtptheorapay)
{
rtptheorapay->last_config = GST_CLOCK_TIME_NONE;
}
}
static gboolean
-gst_rtp_theora_pay_setcaps (GstBaseRTPPayload * basepayload, GstCaps * caps)
+gst_rtp_theora_pay_setcaps (GstRTPBasePayload * basepayload, GstCaps * caps)
{
GstRtpTheoraPay *rtptheorapay;
gst_rtp_theora_pay_reset_packet (GstRtpTheoraPay * rtptheorapay, guint8 TDT)
{
guint payload_len;
+ GstRTPBuffer rtp = { NULL };
GST_DEBUG_OBJECT (rtptheorapay, "reset packet");
rtptheorapay->payload_pos = 4;
- payload_len = gst_rtp_buffer_get_payload_len (rtptheorapay->packet);
+ gst_rtp_buffer_map (rtptheorapay->packet, GST_MAP_READ, &rtp);
+ payload_len = gst_rtp_buffer_get_payload_len (&rtp);
+ gst_rtp_buffer_unmap (&rtp);
rtptheorapay->payload_left = payload_len - 4;
rtptheorapay->payload_duration = 0;
rtptheorapay->payload_F = 0;
/* new packet allocate max packet size */
rtptheorapay->packet =
- gst_rtp_buffer_new_allocate_len (GST_BASE_RTP_PAYLOAD_MTU
+ gst_rtp_buffer_new_allocate_len (GST_RTP_BASE_PAYLOAD_MTU
(rtptheorapay), 0, 0);
gst_rtp_theora_pay_reset_packet (rtptheorapay, TDT);
GstFlowReturn ret;
guint8 *payload;
guint hlen;
+ GstRTPBuffer rtp = { NULL };
/* check for empty packet */
if (!rtptheorapay->packet || rtptheorapay->payload_pos <= 4)
GST_DEBUG_OBJECT (rtptheorapay, "flushing packet");
+ gst_rtp_buffer_map (rtptheorapay->packet, GST_MAP_WRITE, &rtp);
+
/* fix header */
- payload = gst_rtp_buffer_get_payload (rtptheorapay->packet);
+ payload = gst_rtp_buffer_get_payload (&rtp);
/*
* 0 1 2 3
* 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
(rtptheorapay->payload_TDT & 0x3) << 4 |
(rtptheorapay->payload_pkts & 0xf);
+ gst_rtp_buffer_unmap (&rtp);
+
/* shrink the buffer size to the last written byte */
hlen = gst_rtp_buffer_calc_header_len (0);
- GST_BUFFER_SIZE (rtptheorapay->packet) = hlen + rtptheorapay->payload_pos;
+ gst_buffer_resize (rtptheorapay->packet, 0, hlen + rtptheorapay->payload_pos);
GST_BUFFER_DURATION (rtptheorapay->packet) = rtptheorapay->payload_duration;
/* push, this gives away our ref to the packet, so clear it. */
ret =
- gst_basertppayload_push (GST_BASE_RTP_PAYLOAD (rtptheorapay),
+ gst_rtp_base_payload_push (GST_RTP_BASE_PAYLOAD (rtptheorapay),
rtptheorapay->packet);
rtptheorapay->packet = NULL;
}
static gboolean
-gst_rtp_theora_pay_finish_headers (GstBaseRTPPayload * basepayload)
+gst_rtp_theora_pay_finish_headers (GstRTPBasePayload * basepayload)
{
GstRtpTheoraPay *rtptheorapay = GST_RTP_THEORA_PAY (basepayload);
GList *walk;
extralen = 1;
for (walk = rtptheorapay->headers; walk; walk = g_list_next (walk)) {
GstBuffer *buf = GST_BUFFER_CAST (walk->data);
-
+ GstMapInfo map;
guint bsize;
- bsize = GST_BUFFER_SIZE (buf);
+ bsize = gst_buffer_get_size (buf);
length += bsize;
n_headers++;
} while (bsize);
}
/* update hash */
- ident = fnv1_hash_32_update (ident, GST_BUFFER_DATA (buf),
- GST_BUFFER_SIZE (buf));
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ ident = fnv1_hash_32_update (ident, map.data, map.size);
+ gst_buffer_unmap (buf, &map);
}
/* packet length is header size + packet length */
if (!g_list_next (walk))
break;
- bsize = GST_BUFFER_SIZE (buf);
+ bsize = gst_buffer_get_size (buf);
/* calc size */
size = 0;
} while (bsize);
temp = size;
- bsize = GST_BUFFER_SIZE (buf);
+ bsize = gst_buffer_get_size (buf);
/* write the size backwards */
flag = 0;
while (size) {
/* copy header data */
for (walk = rtptheorapay->headers; walk; walk = g_list_next (walk)) {
GstBuffer *buf = GST_BUFFER_CAST (walk->data);
+ GstMapInfo map;
- memcpy (data, GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (buf));
- data += GST_BUFFER_SIZE (buf);
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ memcpy (data, map.data, map.size);
+ gst_buffer_unmap (buf, &map);
+ data += map.size;
}
/* serialize to base64 */
/* configure payloader settings */
wstr = g_strdup_printf ("%d", rtptheorapay->width);
hstr = g_strdup_printf ("%d", rtptheorapay->height);
- gst_basertppayload_set_options (basepayload, "video", TRUE, "THEORA", 90000);
- res = gst_basertppayload_set_outcaps (basepayload,
- "sampling", G_TYPE_STRING, "YCbCr-4:2:0",
- "width", G_TYPE_STRING, wstr,
- "height", G_TYPE_STRING, hstr,
- "configuration", G_TYPE_STRING, configuration,
- "delivery-method", G_TYPE_STRING, "inline",
+ gst_rtp_base_payload_set_options (basepayload, "video", TRUE, "THEORA",
+ 90000);
+ res =
+ gst_rtp_base_payload_set_outcaps (basepayload, "sampling", G_TYPE_STRING,
+ "YCbCr-4:2:0", "width", G_TYPE_STRING, wstr, "height", G_TYPE_STRING,
+ hstr, "configuration", G_TYPE_STRING, configuration, "delivery-method",
+ G_TYPE_STRING, "inline",
/* don't set the other defaults
*/
NULL);
}
static gboolean
-gst_rtp_theora_pay_parse_id (GstBaseRTPPayload * basepayload, guint8 * data,
+gst_rtp_theora_pay_parse_id (GstRTPBasePayload * basepayload, guint8 * data,
guint size)
{
GstRtpTheoraPay *rtptheorapay;
guint plen;
guint8 *ppos, *payload;
gboolean fragmented;
+ GstRTPBuffer rtp = { NULL };
/* size increases with packet length and 2 bytes size eader. */
newduration = rtptheorapay->payload_duration;
packet_len = gst_rtp_buffer_calc_packet_len (newsize, 0, 0);
/* check buffer filled against length and max latency */
- flush = gst_basertppayload_is_filled (GST_BASE_RTP_PAYLOAD (rtptheorapay),
+ flush = gst_rtp_base_payload_is_filled (GST_RTP_BASE_PAYLOAD (rtptheorapay),
packet_len, newduration);
/* we can store up to 15 theora packets in one RTP packet. */
flush |= (rtptheorapay->payload_pkts == 15);
gst_rtp_theora_pay_init_packet (rtptheorapay, TDT, timestamp);
}
- payload = gst_rtp_buffer_get_payload (rtptheorapay->packet);
+ gst_rtp_buffer_map (rtptheorapay->packet, GST_MAP_WRITE, &rtp);
+ payload = gst_rtp_buffer_get_payload (&rtp);
ppos = payload + rtptheorapay->payload_pos;
fragmented = FALSE;
if (fragmented) {
/* fragmented packets are always flushed and have ptks of 0 */
rtptheorapay->payload_pkts = 0;
+ gst_rtp_buffer_unmap (&rtp);
ret = gst_rtp_theora_pay_flush_packet (rtptheorapay);
if (size > 0) {
/* start new packet and get pointers. TDT stays the same. */
gst_rtp_theora_pay_init_packet (rtptheorapay,
rtptheorapay->payload_TDT, timestamp);
- payload = gst_rtp_buffer_get_payload (rtptheorapay->packet);
+ gst_rtp_buffer_map (rtptheorapay->packet, GST_MAP_WRITE, &rtp);
+ payload = gst_rtp_buffer_get_payload (&rtp);
ppos = payload + rtptheorapay->payload_pos;
}
} else {
}
} while (size);
+ if (rtp.buffer)
+ gst_rtp_buffer_unmap (&rtp);
+
return ret;
}
static GstFlowReturn
-gst_rtp_theora_pay_handle_buffer (GstBaseRTPPayload * basepayload,
+gst_rtp_theora_pay_handle_buffer (GstRTPBasePayload * basepayload,
GstBuffer * buffer)
{
GstRtpTheoraPay *rtptheorapay;
GstFlowReturn ret;
- guint size;
+ GstMapInfo map;
+ gsize size;
guint8 *data;
GstClockTime duration, timestamp;
guint8 TDT;
rtptheorapay = GST_RTP_THEORA_PAY (basepayload);
- size = GST_BUFFER_SIZE (buffer);
- data = GST_BUFFER_DATA (buffer);
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ data = map.data;
+ size = map.size;
duration = GST_BUFFER_DURATION (buffer);
timestamp = GST_BUFFER_TIMESTAMP (buffer);
- GST_DEBUG_OBJECT (rtptheorapay, "size %u, duration %" GST_TIME_FORMAT,
- size, GST_TIME_ARGS (duration));
+ GST_DEBUG_OBJECT (rtptheorapay, "size %" G_GSIZE_FORMAT
+ ", duration %" GST_TIME_FORMAT, size, GST_TIME_ARGS (duration));
if (G_UNLIKELY (size > 0xffff))
goto wrong_size;
if (TDT != 0) {
GST_DEBUG_OBJECT (rtptheorapay, "collecting header, buffer %p", buffer);
/* append header to the list of headers */
+ gst_buffer_unmap (buffer, &map);
rtptheorapay->headers = g_list_append (rtptheorapay->headers, buffer);
ret = GST_FLOW_OK;
goto done;
ret = gst_rtp_theora_pay_payload_buffer (rtptheorapay, TDT, data, size,
timestamp, duration, 0);
+
+ gst_buffer_unmap (buffer, &map);
gst_buffer_unref (buffer);
done:
wrong_size:
{
GST_ELEMENT_WARNING (rtptheorapay, STREAM, DECODE,
- ("Invalid packet size (%d <= 0xffff)", size), (NULL));
+ ("Invalid packet size (%" G_GSIZE_FORMAT " <= 0xffff)", size), (NULL));
+ gst_buffer_unmap (buffer, &map);
gst_buffer_unref (buffer);
return GST_FLOW_OK;
}
parse_id_failed:
{
+ gst_buffer_unmap (buffer, &map);
gst_buffer_unref (buffer);
return GST_FLOW_ERROR;
}
{
GST_ELEMENT_WARNING (rtptheorapay, STREAM, DECODE,
(NULL), ("Ignoring unknown header received"));
+ gst_buffer_unmap (buffer, &map);
gst_buffer_unref (buffer);
return GST_FLOW_OK;
}
{
GST_ELEMENT_WARNING (rtptheorapay, STREAM, DECODE,
(NULL), ("Error initializing header config"));
+ gst_buffer_unmap (buffer, &map);
gst_buffer_unref (buffer);
return GST_FLOW_OK;
}
}
static gboolean
-gst_rtp_theora_pay_handle_event (GstPad * pad, GstEvent * event)
+gst_rtp_theora_pay_sink_event (GstRTPBasePayload * payload, GstEvent * event)
{
- GstRtpTheoraPay *rtptheorapay = GST_RTP_THEORA_PAY (GST_PAD_PARENT (pad));
+ GstRtpTheoraPay *rtptheorapay = GST_RTP_THEORA_PAY (payload);
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_FLUSH_STOP:
break;
}
/* false to let parent handle event as well */
- return FALSE;
+ return GST_RTP_BASE_PAYLOAD_CLASS (parent_class)->sink_event (payload, event);
}
static GstStateChangeReturn
#define __GST_RTP_THEORA_PAY_H__
#include <gst/gst.h>
-#include <gst/rtp/gstbasertppayload.h>
+#include <gst/rtp/gstrtpbasepayload.h>
#include <gst/base/gstadapter.h>
G_BEGIN_DECLS
struct _GstRtpTheoraPay
{
- GstBaseRTPPayload payload;
+ GstRTPBasePayload payload;
/* the headers */
gboolean need_headers;
struct _GstRtpTheoraPayClass
{
- GstBaseRTPPayloadClass parent_class;
+ GstRTPBasePayloadClass parent_class;
};
GType gst_rtp_theora_pay_get_type (void);
GST_STATIC_CAPS ("audio/x-vorbis")
);
-GST_BOILERPLATE (GstRtpVorbisDepay, gst_rtp_vorbis_depay, GstBaseRTPDepayload,
- GST_TYPE_BASE_RTP_DEPAYLOAD);
+#define gst_rtp_vorbis_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpVorbisDepay, gst_rtp_vorbis_depay,
+ GST_TYPE_RTP_BASE_DEPAYLOAD);
-static gboolean gst_rtp_vorbis_depay_setcaps (GstBaseRTPDepayload * depayload,
+static gboolean gst_rtp_vorbis_depay_setcaps (GstRTPBaseDepayload * depayload,
GstCaps * caps);
-static GstBuffer *gst_rtp_vorbis_depay_process (GstBaseRTPDepayload * depayload,
+static GstBuffer *gst_rtp_vorbis_depay_process (GstRTPBaseDepayload * depayload,
GstBuffer * buf);
static void gst_rtp_vorbis_depay_finalize (GObject * object);
static GstStateChangeReturn gst_rtp_vorbis_depay_change_state (GstElement *
element, GstStateChange transition);
-
-static void
-gst_rtp_vorbis_depay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_vorbis_depay_sink_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_vorbis_depay_src_template);
-
- gst_element_class_set_details_simple (element_class, "RTP Vorbis depayloader",
- "Codec/Depayloader/Network/RTP",
- "Extracts Vorbis Audio from RTP packets (RFC 5215)",
- "Wim Taymans <wim.taymans@gmail.com>");
-}
-
static void
gst_rtp_vorbis_depay_class_init (GstRtpVorbisDepayClass * klass)
{
GObjectClass *gobject_class;
GstElementClass *gstelement_class;
- GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
- gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
+ gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
gobject_class->finalize = gst_rtp_vorbis_depay_finalize;
gstelement_class->change_state = gst_rtp_vorbis_depay_change_state;
- gstbasertpdepayload_class->process = gst_rtp_vorbis_depay_process;
- gstbasertpdepayload_class->set_caps = gst_rtp_vorbis_depay_setcaps;
+ gstrtpbasedepayload_class->process = gst_rtp_vorbis_depay_process;
+ gstrtpbasedepayload_class->set_caps = gst_rtp_vorbis_depay_setcaps;
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_vorbis_depay_sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_vorbis_depay_src_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP Vorbis depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts Vorbis Audio from RTP packets (RFC 5215)",
+ "Wim Taymans <wim.taymans@gmail.com>");
GST_DEBUG_CATEGORY_INIT (rtpvorbisdepay_debug, "rtpvorbisdepay", 0,
"Vorbis RTP Depayloader");
}
static void
-gst_rtp_vorbis_depay_init (GstRtpVorbisDepay * rtpvorbisdepay,
- GstRtpVorbisDepayClass * klass)
+gst_rtp_vorbis_depay_init (GstRtpVorbisDepay * rtpvorbisdepay)
{
rtpvorbisdepay->adapter = gst_adapter_new ();
}
{
GstBuffer *buf;
guint32 num_headers;
+ GstMapInfo map;
guint8 *data;
- guint size;
+ gsize size;
guint offset;
gint i, j;
- data = GST_BUFFER_DATA (confbuf);
- size = GST_BUFFER_SIZE (confbuf);
+ gst_buffer_map (confbuf, &map, GST_MAP_READ);
+ data = map.data;
+ size = map.size;
- GST_DEBUG_OBJECT (rtpvorbisdepay, "config size %u", size);
+ GST_DEBUG_OBJECT (rtpvorbisdepay, "config size %" G_GSIZE_FORMAT, size);
/* +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
* | Number of packed headers |
offset += 6;
GST_DEBUG_OBJECT (rtpvorbisdepay,
- "header %d, ident 0x%08x, length %u, left %u", i, ident, length, size);
+ "header %d, ident 0x%08x, length %u, left %" G_GSIZE_FORMAT, i, ident,
+ length, size);
/* FIXME check if we already got this ident */
GST_DEBUG_OBJECT (rtpvorbisdepay, "reading header %d, size %u", j,
h_size);
- buf = gst_buffer_create_sub (confbuf, offset, h_size);
+ buf = gst_buffer_copy_region (confbuf, GST_BUFFER_COPY_MEMORY, offset,
+ h_size);
conf->headers = g_list_append (conf->headers, buf);
offset += h_size;
size -= h_size;
}
rtpvorbisdepay->configs = g_list_append (rtpvorbisdepay->configs, conf);
}
+
+ gst_buffer_unmap (confbuf, &map);
gst_buffer_unref (confbuf);
return TRUE;
too_small:
{
GST_DEBUG_OBJECT (rtpvorbisdepay, "configuration too small");
+ gst_buffer_unmap (confbuf, &map);
gst_buffer_unref (confbuf);
return FALSE;
}
guint length)
{
GstBuffer *confbuf;
- guint8 *conf;
+ GstMapInfo map;
if (G_UNLIKELY (size < 4))
return FALSE;
/* transform inline to out-of-band and parse that one */
confbuf = gst_buffer_new_and_alloc (size + 9);
- conf = GST_BUFFER_DATA (confbuf);
+ gst_buffer_map (confbuf, &map, GST_MAP_WRITE);
/* 1 header */
- GST_WRITE_UINT32_BE (conf, 1);
+ GST_WRITE_UINT32_BE (map.data, 1);
/* write Ident */
- GST_WRITE_UINT24_BE (conf + 4, ident);
+ GST_WRITE_UINT24_BE (map.data + 4, ident);
/* write sort-of-length */
- GST_WRITE_UINT16_BE (conf + 7, length);
+ GST_WRITE_UINT16_BE (map.data + 7, length);
/* copy remainder */
- memcpy (conf + 9, configuration, size);
+ memcpy (map.data + 9, configuration, size);
+ gst_buffer_unmap (confbuf, &map);
return gst_rtp_vorbis_depay_parse_configuration (rtpvorbisdepay, confbuf);
}
static gboolean
-gst_rtp_vorbis_depay_setcaps (GstBaseRTPDepayload * depayload, GstCaps * caps)
+gst_rtp_vorbis_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
{
GstStructure *structure;
GstRtpVorbisDepay *rtpvorbisdepay;
data = g_base64_decode (configuration, &size);
confbuf = gst_buffer_new ();
- GST_BUFFER_DATA (confbuf) = data;
- GST_BUFFER_MALLOCDATA (confbuf) = data;
- GST_BUFFER_SIZE (confbuf) = size;
+ gst_buffer_take_memory (confbuf, -1,
+ gst_memory_new_wrapped (0, data, g_free, size, 0, size));
if (!gst_rtp_vorbis_depay_parse_configuration (rtpvorbisdepay, confbuf))
goto invalid_configuration;
} else {
depayload->clock_rate = clock_rate;
/* set caps on pad and on header */
- srccaps = gst_caps_new_simple ("audio/x-vorbis", NULL);
+ srccaps = gst_caps_new_empty_simple ("audio/x-vorbis");
res = gst_pad_set_caps (depayload->srcpad, srccaps);
gst_caps_unref (srccaps);
GstBuffer *header = GST_BUFFER_CAST (headers->data);
gst_buffer_ref (header);
- gst_base_rtp_depayload_push (GST_BASE_RTP_DEPAYLOAD (rtpvorbisdepay),
+ gst_rtp_base_depayload_push (GST_RTP_BASE_DEPAYLOAD (rtpvorbisdepay),
header);
}
/* remember the current config */
}
static GstBuffer *
-gst_rtp_vorbis_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
+gst_rtp_vorbis_depay_process (GstRTPBaseDepayload * depayload, GstBuffer * buf)
{
GstRtpVorbisDepay *rtpvorbisdepay;
GstBuffer *outbuf;
GstFlowReturn ret;
gint payload_len;
guint8 *payload, *to_free = NULL;
- guint32 timestamp;
guint32 header, ident;
guint8 F, VDT, packets;
+ GstRTPBuffer rtp = { NULL };
rtpvorbisdepay = GST_RTP_VORBIS_DEPAY (depayload);
- payload_len = gst_rtp_buffer_get_payload_len (buf);
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
+
+ payload_len = gst_rtp_buffer_get_payload_len (&rtp);
GST_DEBUG_OBJECT (depayload, "got RTP packet of size %d", payload_len);
if (G_UNLIKELY (payload_len < 4))
goto packet_short;
- payload = gst_rtp_buffer_get_payload (buf);
+ payload = gst_rtp_buffer_get_payload (&rtp);
header = GST_READ_UINT32_BE (payload);
/*
/* first assembled packet, reuse 2 bytes to store the length */
headerskip = (F == 1 ? 4 : 6);
/* skip header and length. */
- vdata = gst_rtp_buffer_get_payload_subbuffer (buf, headerskip, -1);
+ vdata = gst_rtp_buffer_get_payload_subbuffer (&rtp, headerskip, -1);
GST_DEBUG_OBJECT (depayload, "assemble vorbis packet");
gst_adapter_push (rtpvorbisdepay->adapter, vdata);
* .. vorbis data |
* +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+*
*/
- timestamp = gst_rtp_buffer_get_timestamp (buf);
-
while (payload_len > 2) {
guint16 length;
/* create buffer for packet */
if (G_UNLIKELY (to_free)) {
outbuf = gst_buffer_new ();
- GST_BUFFER_DATA (outbuf) = payload;
- GST_BUFFER_MALLOCDATA (outbuf) = to_free;
- GST_BUFFER_SIZE (outbuf) = length;
+ gst_buffer_take_memory (outbuf, -1,
+ gst_memory_new_wrapped (0, to_free, g_free,
+ (payload - to_free) + length, payload - to_free, length));
to_free = NULL;
} else {
outbuf = gst_buffer_new_and_alloc (length);
- memcpy (GST_BUFFER_DATA (outbuf), payload, length);
+ gst_buffer_fill (outbuf, 0, payload, length);
}
payload += length;
payload_len -= length;
- if (timestamp != -1)
- /* push with timestamp of the last packet, which is the same timestamp that
- * should apply to the first assembled packet. */
- ret = gst_base_rtp_depayload_push_ts (depayload, timestamp, outbuf);
- else
- ret = gst_base_rtp_depayload_push (depayload, outbuf);
-
+ ret = gst_rtp_base_depayload_push (depayload, outbuf);
if (ret != GST_FLOW_OK)
break;
-
- /* make sure we don't set a timestamp on next buffers */
- timestamp = -1;
}
g_free (to_free);
+ gst_rtp_buffer_unmap (&rtp);
+
return NULL;
no_output:
{
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
/* ERORRS */
{
GST_ELEMENT_WARNING (rtpvorbisdepay, STREAM, DECODE,
(NULL), ("Could not switch codebooks"));
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
packet_short:
{
GST_ELEMENT_WARNING (rtpvorbisdepay, STREAM, DECODE,
(NULL), ("Packet was too short (%d < 4)", payload_len));
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
ignore_reserved:
{
GST_WARNING_OBJECT (rtpvorbisdepay, "reserved VDT ignored");
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
length_short:
{
GST_ELEMENT_WARNING (rtpvorbisdepay, STREAM, DECODE,
(NULL), ("Packet contains invalid data"));
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
invalid_configuration:
/* fatal, as we otherwise risk carrying on without output */
GST_ELEMENT_ERROR (rtpvorbisdepay, STREAM, DECODE,
(NULL), ("Packet contains invalid configuration"));
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
}
#include <gst/gst.h>
#include <gst/base/gstadapter.h>
-#include <gst/rtp/gstbasertpdepayload.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
G_BEGIN_DECLS
struct _GstRtpVorbisDepay
{
- GstBaseRTPDepayload parent;
+ GstRTPBaseDepayload parent;
GList *configs;
GstRtpVorbisConfig *config;
struct _GstRtpVorbisDepayClass
{
- GstBaseRTPDepayloadClass parent_class;
+ GstRTPBaseDepayloadClass parent_class;
};
GType gst_rtp_vorbis_depay_get_type (void);
GST_STATIC_CAPS ("audio/x-vorbis")
);
-GST_BOILERPLATE (GstRtpVorbisPay, gst_rtp_vorbis_pay, GstBaseRTPPayload,
- GST_TYPE_BASE_RTP_PAYLOAD);
+#define gst_rtp_vorbis_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpVorbisPay, gst_rtp_vorbis_pay, GST_TYPE_RTP_BASE_PAYLOAD);
-static gboolean gst_rtp_vorbis_pay_setcaps (GstBaseRTPPayload * basepayload,
+static gboolean gst_rtp_vorbis_pay_setcaps (GstRTPBasePayload * basepayload,
GstCaps * caps);
static GstStateChangeReturn gst_rtp_vorbis_pay_change_state (GstElement *
element, GstStateChange transition);
-static GstFlowReturn gst_rtp_vorbis_pay_handle_buffer (GstBaseRTPPayload * pad,
+static GstFlowReturn gst_rtp_vorbis_pay_handle_buffer (GstRTPBasePayload * pad,
GstBuffer * buffer);
-static gboolean gst_rtp_vorbis_pay_handle_event (GstPad * pad,
+static gboolean gst_rtp_vorbis_pay_sink_event (GstRTPBasePayload * payload,
GstEvent * event);
static void
-gst_rtp_vorbis_pay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_vorbis_pay_src_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_vorbis_pay_sink_template);
-
- gst_element_class_set_details_simple (element_class, "RTP Vorbis depayloader",
- "Codec/Payloader/Network/RTP",
- "Payload-encode Vorbis audio into RTP packets (RFC 5215)",
- "Wim Taymans <wimi.taymans@gmail.com>");
-}
-
-static void
gst_rtp_vorbis_pay_class_init (GstRtpVorbisPayClass * klass)
{
GstElementClass *gstelement_class;
- GstBaseRTPPayloadClass *gstbasertppayload_class;
+ GstRTPBasePayloadClass *gstrtpbasepayload_class;
gstelement_class = (GstElementClass *) klass;
- gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
+ gstrtpbasepayload_class = (GstRTPBasePayloadClass *) klass;
gstelement_class->change_state = gst_rtp_vorbis_pay_change_state;
- gstbasertppayload_class->set_caps = gst_rtp_vorbis_pay_setcaps;
- gstbasertppayload_class->handle_buffer = gst_rtp_vorbis_pay_handle_buffer;
- gstbasertppayload_class->handle_event = gst_rtp_vorbis_pay_handle_event;
+ gstrtpbasepayload_class->set_caps = gst_rtp_vorbis_pay_setcaps;
+ gstrtpbasepayload_class->handle_buffer = gst_rtp_vorbis_pay_handle_buffer;
+ gstrtpbasepayload_class->sink_event = gst_rtp_vorbis_pay_sink_event;
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_vorbis_pay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_vorbis_pay_sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP Vorbis depayloader",
+ "Codec/Payloader/Network/RTP",
+ "Payload-encode Vorbis audio into RTP packets (RFC 5215)",
+ "Wim Taymans <wimi.taymans@gmail.com>");
GST_DEBUG_CATEGORY_INIT (rtpvorbispay_debug, "rtpvorbispay", 0,
"Vorbis RTP Payloader");
}
static void
-gst_rtp_vorbis_pay_init (GstRtpVorbisPay * rtpvorbispay,
- GstRtpVorbisPayClass * klass)
+gst_rtp_vorbis_pay_init (GstRtpVorbisPay * rtpvorbispay)
{
/* needed because of GST_BOILERPLATE */
}
}
static gboolean
-gst_rtp_vorbis_pay_setcaps (GstBaseRTPPayload * basepayload, GstCaps * caps)
+gst_rtp_vorbis_pay_setcaps (GstRTPBasePayload * basepayload, GstCaps * caps)
{
GstRtpVorbisPay *rtpvorbispay;
gst_rtp_vorbis_pay_reset_packet (GstRtpVorbisPay * rtpvorbispay, guint8 VDT)
{
guint payload_len;
+ GstRTPBuffer rtp = { NULL };
GST_LOG_OBJECT (rtpvorbispay, "reset packet");
rtpvorbispay->payload_pos = 4;
- payload_len = gst_rtp_buffer_get_payload_len (rtpvorbispay->packet);
+ gst_rtp_buffer_map (rtpvorbispay->packet, GST_MAP_READ, &rtp);
+ payload_len = gst_rtp_buffer_get_payload_len (&rtp);
+ gst_rtp_buffer_unmap (&rtp);
rtpvorbispay->payload_left = payload_len - 4;
rtpvorbispay->payload_duration = 0;
rtpvorbispay->payload_F = 0;
/* new packet allocate max packet size */
rtpvorbispay->packet =
- gst_rtp_buffer_new_allocate_len (GST_BASE_RTP_PAYLOAD_MTU
+ gst_rtp_buffer_new_allocate_len (GST_RTP_BASE_PAYLOAD_MTU
(rtpvorbispay), 0, 0);
gst_rtp_vorbis_pay_reset_packet (rtpvorbispay, VDT);
GST_BUFFER_TIMESTAMP (rtpvorbispay->packet) = timestamp;
GstFlowReturn ret;
guint8 *payload;
guint hlen;
+ GstRTPBuffer rtp = { NULL };
/* check for empty packet */
if (!rtpvorbispay->packet || rtpvorbispay->payload_pos <= 4)
GST_LOG_OBJECT (rtpvorbispay, "flushing packet");
+ gst_rtp_buffer_map (rtpvorbispay->packet, GST_MAP_WRITE, &rtp);
+
/* fix header */
- payload = gst_rtp_buffer_get_payload (rtpvorbispay->packet);
+ payload = gst_rtp_buffer_get_payload (&rtp);
/*
* 0 1 2 3
* 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
(rtpvorbispay->payload_VDT & 0x3) << 4 |
(rtpvorbispay->payload_pkts & 0xf);
+ gst_rtp_buffer_unmap (&rtp);
+
/* shrink the buffer size to the last written byte */
hlen = gst_rtp_buffer_calc_header_len (0);
- GST_BUFFER_SIZE (rtpvorbispay->packet) = hlen + rtpvorbispay->payload_pos;
+ gst_buffer_resize (rtpvorbispay->packet, 0, hlen + rtpvorbispay->payload_pos);
GST_BUFFER_DURATION (rtpvorbispay->packet) = rtpvorbispay->payload_duration;
/* push, this gives away our ref to the packet, so clear it. */
ret =
- gst_basertppayload_push (GST_BASE_RTP_PAYLOAD (rtpvorbispay),
+ gst_rtp_base_payload_push (GST_RTP_BASE_PAYLOAD (rtpvorbispay),
rtpvorbispay->packet);
rtpvorbispay->packet = NULL;
}
static gboolean
-gst_rtp_vorbis_pay_finish_headers (GstBaseRTPPayload * basepayload)
+gst_rtp_vorbis_pay_finish_headers (GstRTPBasePayload * basepayload)
{
GstRtpVorbisPay *rtpvorbispay = GST_RTP_VORBIS_PAY (basepayload);
GList *walk;
ident = fnv1_hash_32_new ();
for (walk = rtpvorbispay->headers; walk; walk = g_list_next (walk)) {
GstBuffer *buf = GST_BUFFER_CAST (walk->data);
+ GstMapInfo map;
guint bsize;
- bsize = GST_BUFFER_SIZE (buf);
+ bsize = gst_buffer_get_size (buf);
length += bsize;
n_headers++;
} while (bsize);
}
/* update hash */
- ident = fnv1_hash_32_update (ident, GST_BUFFER_DATA (buf),
- GST_BUFFER_SIZE (buf));
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ ident = fnv1_hash_32_update (ident, map.data, map.size);
+ gst_buffer_unmap (buf, &map);
}
/* packet length is header size + packet length */
if (!g_list_next (walk))
break;
- bsize = GST_BUFFER_SIZE (buf);
+ bsize = gst_buffer_get_size (buf);
/* calc size */
size = 0;
} while (bsize);
temp = size;
- bsize = GST_BUFFER_SIZE (buf);
+ bsize = gst_buffer_get_size (buf);
/* write the size backwards */
flag = 0;
while (size) {
for (walk = rtpvorbispay->headers; walk; walk = g_list_next (walk)) {
GstBuffer *buf = GST_BUFFER_CAST (walk->data);
- memcpy (data, GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (buf));
- data += GST_BUFFER_SIZE (buf);
+ gst_buffer_extract (buf, 0, data, gst_buffer_get_size (buf));
+ data += gst_buffer_get_size (buf);
}
/* serialize to base64 */
/* configure payloader settings */
cstr = g_strdup_printf ("%d", rtpvorbispay->channels);
- gst_basertppayload_set_options (basepayload, "audio", TRUE, "VORBIS",
+ gst_rtp_base_payload_set_options (basepayload, "audio", TRUE, "VORBIS",
rtpvorbispay->rate);
res =
- gst_basertppayload_set_outcaps (basepayload, "encoding-params",
+ gst_rtp_base_payload_set_outcaps (basepayload, "encoding-params",
G_TYPE_STRING, cstr, "configuration", G_TYPE_STRING, configuration, NULL);
g_free (cstr);
g_free (configuration);
}
static gboolean
-gst_rtp_vorbis_pay_parse_id (GstBaseRTPPayload * basepayload, guint8 * data,
+gst_rtp_vorbis_pay_parse_id (GstRTPBasePayload * basepayload, guint8 * data,
guint size)
{
GstRtpVorbisPay *rtpvorbispay = GST_RTP_VORBIS_PAY (basepayload);
}
static GstFlowReturn
-gst_rtp_vorbis_pay_handle_buffer (GstBaseRTPPayload * basepayload,
+gst_rtp_vorbis_pay_handle_buffer (GstRTPBasePayload * basepayload,
GstBuffer * buffer)
{
GstRtpVorbisPay *rtpvorbispay;
GstFlowReturn ret;
- guint size, newsize;
+ guint newsize;
+ GstMapInfo map;
+ gsize size;
guint8 *data;
guint packet_len;
GstClockTime duration, newduration, timestamp;
guint plen;
guint8 *ppos, *payload;
gboolean fragmented;
+ GstRTPBuffer rtp = { NULL };
rtpvorbispay = GST_RTP_VORBIS_PAY (basepayload);
- size = GST_BUFFER_SIZE (buffer);
- data = GST_BUFFER_DATA (buffer);
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ data = map.data;
+ size = map.size;
duration = GST_BUFFER_DURATION (buffer);
timestamp = GST_BUFFER_TIMESTAMP (buffer);
- GST_LOG_OBJECT (rtpvorbispay, "size %u, duration %" GST_TIME_FORMAT,
- size, GST_TIME_ARGS (duration));
+ GST_LOG_OBJECT (rtpvorbispay, "size %" G_GSIZE_FORMAT
+ ", duration %" GST_TIME_FORMAT, size, GST_TIME_ARGS (duration));
if (G_UNLIKELY (size < 1 || size > 0xffff))
goto wrong_size;
if (VDT != 0) {
GST_DEBUG_OBJECT (rtpvorbispay, "collecting header");
/* append header to the list of headers */
+ gst_buffer_unmap (buffer, &map);
rtpvorbispay->headers = g_list_append (rtpvorbispay->headers, buffer);
ret = GST_FLOW_OK;
goto done;
packet_len = gst_rtp_buffer_calc_packet_len (newsize, 0, 0);
/* check buffer filled against length and max latency */
- flush = gst_basertppayload_is_filled (basepayload, packet_len, newduration);
+ flush = gst_rtp_base_payload_is_filled (basepayload, packet_len, newduration);
/* we can store up to 15 vorbis packets in one RTP packet. */
flush |= (rtpvorbispay->payload_pkts == 15);
/* flush if we have a new VDT */
gst_rtp_vorbis_pay_init_packet (rtpvorbispay, VDT, timestamp);
}
- payload = gst_rtp_buffer_get_payload (rtpvorbispay->packet);
+ gst_rtp_buffer_map (rtpvorbispay->packet, GST_MAP_WRITE, &rtp);
+ payload = gst_rtp_buffer_get_payload (&rtp);
ppos = payload + rtpvorbispay->payload_pos;
fragmented = FALSE;
}
}
if (fragmented) {
+ gst_rtp_buffer_unmap (&rtp);
/* fragmented packets are always flushed and have ptks of 0 */
rtpvorbispay->payload_pkts = 0;
ret = gst_rtp_vorbis_pay_flush_packet (rtpvorbispay);
/* start new packet and get pointers. VDT stays the same. */
gst_rtp_vorbis_pay_init_packet (rtpvorbispay,
rtpvorbispay->payload_VDT, timestamp);
- payload = gst_rtp_buffer_get_payload (rtpvorbispay->packet);
+ gst_rtp_buffer_map (rtpvorbispay->packet, GST_MAP_WRITE, &rtp);
+ payload = gst_rtp_buffer_get_payload (&rtp);
ppos = payload + rtpvorbispay->payload_pos;
}
} else {
rtpvorbispay->payload_duration += duration;
}
}
+
+ if (rtp.buffer)
+ gst_rtp_buffer_unmap (&rtp);
+
+ gst_buffer_unmap (buffer, &map);
gst_buffer_unref (buffer);
done:
wrong_size:
{
GST_ELEMENT_WARNING (rtpvorbispay, STREAM, DECODE,
- ("Invalid packet size (1 < %d <= 0xffff)", size), (NULL));
+ ("Invalid packet size (1 < %" G_GSIZE_FORMAT " <= 0xffff)", size),
+ (NULL));
+ gst_buffer_unmap (buffer, &map);
gst_buffer_unref (buffer);
return GST_FLOW_OK;
}
parse_id_failed:
{
+ gst_buffer_unmap (buffer, &map);
gst_buffer_unref (buffer);
return GST_FLOW_ERROR;
}
{
GST_ELEMENT_WARNING (rtpvorbispay, STREAM, DECODE,
(NULL), ("Ignoring unknown header received"));
+ gst_buffer_unmap (buffer, &map);
gst_buffer_unref (buffer);
return GST_FLOW_OK;
}
{
GST_ELEMENT_WARNING (rtpvorbispay, STREAM, DECODE,
(NULL), ("Error initializing header config"));
+ gst_buffer_unmap (buffer, &map);
gst_buffer_unref (buffer);
return GST_FLOW_OK;
}
}
static gboolean
-gst_rtp_vorbis_pay_handle_event (GstPad * pad, GstEvent * event)
+gst_rtp_vorbis_pay_sink_event (GstRTPBasePayload * payload, GstEvent * event)
{
- GstRtpVorbisPay *rtpvorbispay = GST_RTP_VORBIS_PAY (GST_PAD_PARENT (pad));
+ GstRtpVorbisPay *rtpvorbispay = GST_RTP_VORBIS_PAY (payload);
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_FLUSH_STOP:
break;
}
/* false to let parent handle event as well */
- return FALSE;
+ return GST_RTP_BASE_PAYLOAD_CLASS (parent_class)->sink_event (payload, event);
}
static GstStateChangeReturn
#define __GST_RTP_VORBIS_PAY_H__
#include <gst/gst.h>
-#include <gst/rtp/gstbasertppayload.h>
+#include <gst/rtp/gstrtpbasepayload.h>
#include <gst/base/gstadapter.h>
G_BEGIN_DECLS
struct _GstRtpVorbisPay
{
- GstBaseRTPPayload payload;
+ GstRTPBasePayload payload;
/* the headers */
gboolean need_headers;
struct _GstRtpVorbisPayClass
{
- GstBaseRTPPayloadClass parent_class;
+ GstRTPBasePayloadClass parent_class;
};
GType gst_rtp_vorbis_pay_get_type (void);
#define GST_CAT_DEFAULT (rtpvrawdepay_debug)
static GstStaticPadTemplate gst_rtp_vraw_depay_src_template =
- GST_STATIC_PAD_TEMPLATE ("src",
+GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("video/x-raw-rgb; video/x-raw-yuv")
+ GST_STATIC_CAPS ("video/x-raw")
);
static GstStaticPadTemplate gst_rtp_vraw_depay_sink_template =
"clock-rate = (int) 90000, " "encoding-name = (string) \"RAW\"")
);
-GST_BOILERPLATE (GstRtpVRawDepay, gst_rtp_vraw_depay, GstBaseRTPDepayload,
- GST_TYPE_BASE_RTP_DEPAYLOAD);
+#define gst_rtp_vraw_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpVRawDepay, gst_rtp_vraw_depay,
+ GST_TYPE_RTP_BASE_DEPAYLOAD);
-static gboolean gst_rtp_vraw_depay_setcaps (GstBaseRTPDepayload * depayload,
+static gboolean gst_rtp_vraw_depay_setcaps (GstRTPBaseDepayload * depayload,
GstCaps * caps);
-static GstBuffer *gst_rtp_vraw_depay_process (GstBaseRTPDepayload * depayload,
+static GstBuffer *gst_rtp_vraw_depay_process (GstRTPBaseDepayload * depayload,
GstBuffer * buf);
static GstStateChangeReturn gst_rtp_vraw_depay_change_state (GstElement *
element, GstStateChange transition);
-static gboolean gst_rtp_vraw_depay_handle_event (GstBaseRTPDepayload * filter,
+static gboolean gst_rtp_vraw_depay_handle_event (GstRTPBaseDepayload * filter,
GstEvent * event);
static void
-gst_rtp_vraw_depay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_vraw_depay_src_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_vraw_depay_sink_template);
-
- gst_element_class_set_details_simple (element_class,
- "RTP Raw Video depayloader", "Codec/Depayloader/Network/RTP",
- "Extracts raw video from RTP packets (RFC 4175)",
- "Wim Taymans <wim.taymans@gmail.com>");
-}
-
-static void
gst_rtp_vraw_depay_class_init (GstRtpVRawDepayClass * klass)
{
GstElementClass *gstelement_class;
- GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
gstelement_class = (GstElementClass *) klass;
- gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
+ gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
gstelement_class->change_state = gst_rtp_vraw_depay_change_state;
- gstbasertpdepayload_class->set_caps = gst_rtp_vraw_depay_setcaps;
- gstbasertpdepayload_class->process = gst_rtp_vraw_depay_process;
- gstbasertpdepayload_class->handle_event = gst_rtp_vraw_depay_handle_event;
+ gstrtpbasedepayload_class->set_caps = gst_rtp_vraw_depay_setcaps;
+ gstrtpbasedepayload_class->process = gst_rtp_vraw_depay_process;
+ gstrtpbasedepayload_class->handle_event = gst_rtp_vraw_depay_handle_event;
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_vraw_depay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_vraw_depay_sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP Raw Video depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts raw video from RTP packets (RFC 4175)",
+ "Wim Taymans <wim.taymans@gmail.com>");
GST_DEBUG_CATEGORY_INIT (rtpvrawdepay_debug, "rtpvrawdepay", 0,
"raw video RTP Depayloader");
}
static void
-gst_rtp_vraw_depay_init (GstRtpVRawDepay * rtpvrawdepay,
- GstRtpVRawDepayClass * klass)
+gst_rtp_vraw_depay_init (GstRtpVRawDepay * rtpvrawdepay)
{
/* needed because of GST_BOILERPLATE */
}
rtpvrawdepay->outbuf = NULL;
}
rtpvrawdepay->timestamp = -1;
+ if (rtpvrawdepay->pool) {
+ gst_buffer_pool_set_active (rtpvrawdepay->pool, FALSE);
+ gst_object_unref (rtpvrawdepay->pool);
+ rtpvrawdepay->pool = NULL;
+ }
+}
+
+static GstFlowReturn
+gst_rtp_vraw_depay_negotiate_pool (GstRtpVRawDepay * depay, GstCaps * caps,
+ GstVideoInfo * info)
+{
+ GstQuery *query;
+ GstBufferPool *pool = NULL;
+ guint size, min, max, prefix, alignment;
+ GstStructure *config;
+
+ /* find a pool for the negotiated caps now */
+ query = gst_query_new_allocation (caps, TRUE);
+
+ if (gst_pad_peer_query (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depay), query)) {
+ GST_DEBUG_OBJECT (depay, "got downstream ALLOCATION hints");
+ /* we got configuration from our peer, parse them */
+ gst_query_parse_allocation_params (query, &size, &min, &max, &prefix,
+ &alignment, &pool);
+ } else {
+ GST_DEBUG_OBJECT (depay, "didn't get downstream ALLOCATION hints");
+ size = info->size;
+ min = max = 0;
+ prefix = 0;
+ alignment = 0;
+ }
+
+ if (pool == NULL) {
+ /* we did not get a pool, make one ourselves then */
+ pool = gst_buffer_pool_new ();
+ }
+
+ if (depay->pool)
+ gst_object_unref (depay->pool);
+ depay->pool = pool;
+
+ config = gst_buffer_pool_get_config (pool);
+ gst_buffer_pool_config_set (config, caps, size, min, max, prefix, alignment);
+ /* just set the metadata, if the pool can support it we will transparently use
+ * it through the video info API. We could also see if the pool support this
+ * metadata and only activate it then. */
+ gst_buffer_pool_config_add_option (config, GST_BUFFER_POOL_OPTION_VIDEO_META);
+
+ gst_buffer_pool_set_config (pool, config);
+ /* and activate */
+ gst_buffer_pool_set_active (pool, TRUE);
+
+ gst_query_unref (query);
+
+ return GST_FLOW_OK;
}
static gboolean
-gst_rtp_vraw_depay_setcaps (GstBaseRTPDepayload * depayload, GstCaps * caps)
+gst_rtp_vraw_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
{
GstStructure *structure;
GstRtpVRawDepay *rtpvrawdepay;
gint clock_rate;
- const gchar *str, *type;
+ const gchar *str;
gint format, width, height, pgroup, xinc, yinc;
- guint ystride, uvstride, yp, up, vp, outsize;
GstCaps *srccaps;
- guint32 fourcc = 0;
gboolean res;
- gint rmask = 0, gmask = 0, bmask = 0, amask = 0, bpp = 0, depth = 0;
+ GstFlowReturn ret;
rtpvrawdepay = GST_RTP_VRAW_DEPAY (depayload);
structure = gst_caps_get_structure (caps, 0);
- yp = up = vp = uvstride = 0;
xinc = yinc = 1;
if (!gst_structure_get_int (structure, "clock-rate", &clock_rate))
if (!strcmp (str, "RGB")) {
format = GST_VIDEO_FORMAT_RGB;
pgroup = 3;
- ystride = GST_ROUND_UP_4 (width * 3);
- outsize = ystride * height;
- type = "video/x-raw-rgb";
- rmask = 0x00ff0000;
- gmask = 0x0000ff00;
- bmask = 0x000000ff;
- depth = 24;
- bpp = 24;
} else if (!strcmp (str, "RGBA")) {
format = GST_VIDEO_FORMAT_RGBA;
pgroup = 4;
- ystride = width * 4;
- outsize = ystride * height;
- type = "video/x-raw-rgb";
- rmask = 0xff000000;
- gmask = 0x00ff0000;
- bmask = 0x0000ff00;
- amask = 0x000000ff;
- depth = 32;
- bpp = 32;
} else if (!strcmp (str, "BGR")) {
format = GST_VIDEO_FORMAT_BGR;
pgroup = 3;
- ystride = GST_ROUND_UP_4 (width * 3);
- outsize = ystride * height;
- type = "video/x-raw-rgb";
- rmask = 0x000000ff;
- gmask = 0x0000ff00;
- bmask = 0x00ff0000;
- depth = 24;
- bpp = 24;
} else if (!strcmp (str, "BGRA")) {
format = GST_VIDEO_FORMAT_BGRA;
pgroup = 4;
- ystride = width * 4;
- outsize = ystride * height;
- type = "video/x-raw-rgb";
- rmask = 0x0000ff00;
- gmask = 0x00ff0000;
- bmask = 0xff000000;
- amask = 0x000000ff;
- depth = 32;
- bpp = 32;
} else if (!strcmp (str, "YCbCr-4:4:4")) {
format = GST_VIDEO_FORMAT_AYUV;
pgroup = 3;
- ystride = width * 4;
- outsize = ystride * height;
- type = "video/x-raw-yuv";
- fourcc = GST_MAKE_FOURCC ('A', 'Y', 'U', 'V');
} else if (!strcmp (str, "YCbCr-4:2:2")) {
format = GST_VIDEO_FORMAT_UYVY;
pgroup = 4;
- ystride = GST_ROUND_UP_2 (width) * 2;
- outsize = ystride * height;
- type = "video/x-raw-yuv";
- fourcc = GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y');
xinc = 2;
} else if (!strcmp (str, "YCbCr-4:2:0")) {
format = GST_VIDEO_FORMAT_I420;
pgroup = 6;
- ystride = GST_ROUND_UP_4 (width);
- uvstride = GST_ROUND_UP_8 (width) / 2;
- up = ystride * GST_ROUND_UP_2 (height);
- vp = up + uvstride * GST_ROUND_UP_2 (height) / 2;
- outsize = vp + uvstride * GST_ROUND_UP_2 (height) / 2;
- type = "video/x-raw-yuv";
- fourcc = GST_MAKE_FOURCC ('I', '4', '2', '0');
xinc = yinc = 2;
} else if (!strcmp (str, "YCbCr-4:1:1")) {
format = GST_VIDEO_FORMAT_Y41B;
pgroup = 6;
- ystride = GST_ROUND_UP_4 (width);
- uvstride = GST_ROUND_UP_8 (width) / 4;
- up = ystride * height;
- vp = up + uvstride * height;
- outsize = vp + uvstride * height;
- type = "video/x-raw-yuv";
- fourcc = GST_MAKE_FOURCC ('Y', '4', '1', 'B');
xinc = 4;
} else
goto unknown_format;
- rtpvrawdepay->width = width;
- rtpvrawdepay->height = height;
- rtpvrawdepay->format = format;
- rtpvrawdepay->yp = yp;
- rtpvrawdepay->up = up;
- rtpvrawdepay->vp = vp;
+ gst_video_info_init (&rtpvrawdepay->vinfo);
+ gst_video_info_set_format (&rtpvrawdepay->vinfo, format, width, height);
+ GST_VIDEO_INFO_FPS_N (&rtpvrawdepay->vinfo) = 0;
+ GST_VIDEO_INFO_FPS_D (&rtpvrawdepay->vinfo) = 1;
+
rtpvrawdepay->pgroup = pgroup;
rtpvrawdepay->xinc = xinc;
rtpvrawdepay->yinc = yinc;
- rtpvrawdepay->ystride = ystride;
- rtpvrawdepay->uvstride = uvstride;
- rtpvrawdepay->outsize = outsize;
-
- srccaps = gst_caps_new_simple (type,
- "width", G_TYPE_INT, width,
- "height", G_TYPE_INT, height,
- "format", GST_TYPE_FOURCC, fourcc,
- "framerate", GST_TYPE_FRACTION, 0, 1, NULL);
-
- if (!strcmp (type, "video/x-raw-rgb")) {
- gst_caps_set_simple (srccaps,
- "endianness", G_TYPE_INT, G_BIG_ENDIAN,
- "red_mask", G_TYPE_INT, rmask,
- "green_mask", G_TYPE_INT, gmask,
- "blue_mask", G_TYPE_INT, bmask,
- "bpp", G_TYPE_INT, bpp, "depth", G_TYPE_INT, depth, NULL);
-
- if (amask > 0) {
- gst_caps_set_simple (srccaps, "alpha_mask", G_TYPE_INT, amask, NULL);
- }
- }
- res = gst_pad_set_caps (GST_BASE_RTP_DEPAYLOAD_SRCPAD (depayload), srccaps);
+ srccaps = gst_video_info_to_caps (&rtpvrawdepay->vinfo);
+ res = gst_pad_set_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload), srccaps);
gst_caps_unref (srccaps);
GST_DEBUG_OBJECT (depayload, "width %d, height %d, format %d", width, height,
format);
- GST_DEBUG_OBJECT (depayload, "yp %d, up %d, vp %d", yp, up, vp);
- GST_DEBUG_OBJECT (depayload, "xinc %d, yinc %d", xinc, yinc);
- GST_DEBUG_OBJECT (depayload, "pgroup %d, ystride %d, uvstride %d", pgroup,
- ystride, uvstride);
- GST_DEBUG_OBJECT (depayload, "outsize %u", outsize);
+ GST_DEBUG_OBJECT (depayload, "xinc %d, yinc %d, pgroup %d",
+ xinc, yinc, pgroup);
+
+ /* negotiate a bufferpool */
+ if ((ret = gst_rtp_vraw_depay_negotiate_pool (rtpvrawdepay, caps,
+ &rtpvrawdepay->vinfo)) != GST_FLOW_OK)
+ goto no_bufferpool;
return res;
GST_ERROR_OBJECT (depayload, "unknown sampling format '%s'", str);
return FALSE;
}
+no_bufferpool:
+ {
+ GST_DEBUG_OBJECT (depayload, "no bufferpool");
+ return FALSE;
+ }
}
static GstBuffer *
-gst_rtp_vraw_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
+gst_rtp_vraw_depay_process (GstRTPBaseDepayload * depayload, GstBuffer * buf)
{
GstRtpVRawDepay *rtpvrawdepay;
- guint8 *payload, *data, *yp, *up, *vp, *headers;
+ guint8 *payload, *yp, *up, *vp, *headers;
guint32 timestamp;
guint cont, ystride, uvstride, pgroup, payload_len;
gint width, height, xinc, yinc;
+ GstRTPBuffer rtp = { NULL };
+ GstVideoFrame frame;
rtpvrawdepay = GST_RTP_VRAW_DEPAY (depayload);
- timestamp = gst_rtp_buffer_get_timestamp (buf);
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
+
+ timestamp = gst_rtp_buffer_get_timestamp (&rtp);
if (timestamp != rtpvrawdepay->timestamp || rtpvrawdepay->outbuf == NULL) {
GstBuffer *outbuf;
GST_LOG_OBJECT (depayload, "new frame with timestamp %u", timestamp);
/* new timestamp, flush old buffer and create new output buffer */
if (rtpvrawdepay->outbuf) {
- gst_base_rtp_depayload_push_ts (depayload, rtpvrawdepay->timestamp,
- rtpvrawdepay->outbuf);
+ gst_rtp_base_depayload_push (depayload, rtpvrawdepay->outbuf);
rtpvrawdepay->outbuf = NULL;
}
- ret = gst_pad_alloc_buffer (depayload->srcpad, -1, rtpvrawdepay->outsize,
- GST_PAD_CAPS (depayload->srcpad), &outbuf);
- if (ret != GST_FLOW_OK)
+ if (gst_pad_check_reconfigure (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload))) {
+ GstCaps *caps;
+
+ caps =
+ gst_pad_get_current_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload));
+ gst_rtp_vraw_depay_negotiate_pool (rtpvrawdepay, caps,
+ &rtpvrawdepay->vinfo);
+ gst_caps_unref (caps);
+ }
+
+ ret = gst_buffer_pool_acquire_buffer (rtpvrawdepay->pool, &outbuf, NULL);
+ if (G_UNLIKELY (ret != GST_FLOW_OK))
goto alloc_failed;
/* clear timestamp from alloc... */
rtpvrawdepay->timestamp = timestamp;
}
- data = GST_BUFFER_DATA (rtpvrawdepay->outbuf);
+ if (!gst_video_frame_map (&frame, &rtpvrawdepay->vinfo, rtpvrawdepay->outbuf,
+ GST_MAP_WRITE))
+ goto invalid_frame;
/* get pointer and strides of the planes */
- yp = data + rtpvrawdepay->yp;
- up = data + rtpvrawdepay->up;
- vp = data + rtpvrawdepay->vp;
+ yp = GST_VIDEO_FRAME_COMP_DATA (&frame, 0);
+ up = GST_VIDEO_FRAME_COMP_DATA (&frame, 1);
+ vp = GST_VIDEO_FRAME_COMP_DATA (&frame, 2);
+
+ ystride = GST_VIDEO_FRAME_COMP_STRIDE (&frame, 0);
+ uvstride = GST_VIDEO_FRAME_COMP_STRIDE (&frame, 1);
- ystride = rtpvrawdepay->ystride;
- uvstride = rtpvrawdepay->uvstride;
pgroup = rtpvrawdepay->pgroup;
- width = rtpvrawdepay->width;
- height = rtpvrawdepay->height;
+ width = GST_VIDEO_INFO_WIDTH (&rtpvrawdepay->vinfo);
+ height = GST_VIDEO_INFO_HEIGHT (&rtpvrawdepay->vinfo);
xinc = rtpvrawdepay->xinc;
yinc = rtpvrawdepay->yinc;
- payload = gst_rtp_buffer_get_payload (buf);
- payload_len = gst_rtp_buffer_get_payload_len (buf);
+ payload = gst_rtp_buffer_get_payload (&rtp);
+ payload_len = gst_rtp_buffer_get_payload_len (&rtp);
if (payload_len < 3)
goto short_packet;
"writing length %u/%u, line %u, offset %u, remaining %u", plen, length,
line, offs, payload_len);
- switch (rtpvrawdepay->format) {
+ switch (GST_VIDEO_INFO_FORMAT (&rtpvrawdepay->vinfo)) {
case GST_VIDEO_FORMAT_RGB:
case GST_VIDEO_FORMAT_RGBA:
case GST_VIDEO_FORMAT_BGR:
payload_len -= length;
}
- if (gst_rtp_buffer_get_marker (buf)) {
+ gst_video_frame_unmap (&frame);
+ gst_rtp_buffer_unmap (&rtp);
+
+ if (gst_rtp_buffer_get_marker (&rtp)) {
GST_LOG_OBJECT (depayload, "marker, flushing frame");
if (rtpvrawdepay->outbuf) {
- gst_base_rtp_depayload_push_ts (depayload, timestamp,
- rtpvrawdepay->outbuf);
+ gst_rtp_base_depayload_push (depayload, rtpvrawdepay->outbuf);
rtpvrawdepay->outbuf = NULL;
}
rtpvrawdepay->timestamp = -1;
{
GST_ELEMENT_ERROR (depayload, STREAM, FORMAT,
(NULL), ("unimplemented sampling"));
+ gst_video_frame_unmap (&frame);
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
alloc_failed:
{
GST_WARNING_OBJECT (depayload, "failed to alloc output buffer");
+ gst_rtp_buffer_unmap (&rtp);
+ return NULL;
+ }
+invalid_frame:
+ {
+ GST_ERROR_OBJECT (depayload, "could not map video frame");
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
wrong_length:
{
GST_WARNING_OBJECT (depayload, "length not multiple of pgroup");
+ gst_video_frame_unmap (&frame);
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
short_packet:
{
GST_WARNING_OBJECT (depayload, "short packet");
+ gst_video_frame_unmap (&frame);
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
}
static gboolean
-gst_rtp_vraw_depay_handle_event (GstBaseRTPDepayload * filter, GstEvent * event)
+gst_rtp_vraw_depay_handle_event (GstRTPBaseDepayload * filter, GstEvent * event)
{
gboolean ret;
GstRtpVRawDepay *rtpvrawdepay;
}
ret =
- GST_BASE_RTP_DEPAYLOAD_CLASS (parent_class)->handle_event (filter, event);
+ GST_RTP_BASE_DEPAYLOAD_CLASS (parent_class)->handle_event (filter, event);
return ret;
}
#include <gst/gst.h>
#include <gst/video/video.h>
-#include <gst/rtp/gstbasertpdepayload.h>
+#include <gst/video/gstvideometa.h>
+#include <gst/video/gstvideopool.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
G_BEGIN_DECLS
struct _GstRtpVRawDepay
{
- GstBaseRTPDepayload payload;
+ GstRTPBaseDepayload payload;
- gint width, height;
- GstVideoFormat format;
+ GstBufferPool *pool;
+ GstVideoInfo vinfo;
GstBuffer *outbuf;
guint32 timestamp;
gint pgroup;
gint xinc, yinc;
- guint yp, up, vp;
- gint ystride;
- gint uvstride;
};
struct _GstRtpVRawDepayClass
{
- GstBaseRTPDepayloadClass parent_class;
+ GstRTPBaseDepayloadClass parent_class;
};
GType gst_rtp_vraw_depay_get_type (void);
GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("video/x-raw-rgb, "
- "bpp = (int) 24, "
- "depth = (int) 24, "
- "endianness = (int) BIG_ENDIAN, "
- "red_mask = (int) 0x00FF0000, "
- "green_mask = (int) 0x0000FF00, "
- "blue_mask = (int) 0x000000FF, "
- "width = (int) [ 1, 32767 ], "
- "height = (int) [ 1, 32767 ]; "
- "video/x-raw-rgb, "
- "bpp = (int) 32, "
- "depth = (int) 32, "
- "endianness = (int) BIG_ENDIAN, "
- "red_mask = (int) 0xFF000000, "
- "green_mask = (int) 0x00FF0000, "
- "blue_mask = (int) 0x0000FF00, "
- "alpha_mask = (int) 0x000000FF, "
- "width = (int) [ 1, 32767 ], "
- "height = (int) [ 1, 32767 ]; "
- "video/x-raw-rgb, "
- "bpp = (int) 24, "
- "depth = (int) 24, "
- "endianness = (int) BIG_ENDIAN, "
- "red_mask = (int) 0x000000FF, "
- "green_mask = (int) 0x0000FF00, "
- "blue_mask = (int) 0x00FF0000, "
- "width = (int) [ 1, 32767 ], "
- "height = (int) [ 1, 32767 ]; "
- "video/x-raw-rgb, "
- "bpp = (int) 32, "
- "depth = (int) 32, "
- "endianness = (int) BIG_ENDIAN, "
- "red_mask = (int) 0x0000FF00, "
- "green_mask = (int) 0x00FF0000, "
- "blue_mask = (int) 0xFF000000, "
- "alpha_mask = (int) 0x000000FF, "
- "width = (int) [ 1, 32767 ], "
- "height = (int) [ 1, 32767 ]; "
- "video/x-raw-yuv, "
- "format = (fourcc) { AYUV, UYVY, I420, Y41B, UYVP }, "
+ GST_STATIC_CAPS ("video/x-raw, "
+ "format = (string) { RGB, RGBA, BGR, BGRA, AYUYV, UYVY, I420, Y41B, UYVP, I420, Y42B, Y444 }, "
"width = (int) [ 1, 32767 ], " "height = (int) [ 1, 32767 ]; ")
);
)
);
-static gboolean gst_rtp_vraw_pay_setcaps (GstBaseRTPPayload * payload,
+static gboolean gst_rtp_vraw_pay_setcaps (GstRTPBasePayload * payload,
GstCaps * caps);
-static GstFlowReturn gst_rtp_vraw_pay_handle_buffer (GstBaseRTPPayload *
+static GstFlowReturn gst_rtp_vraw_pay_handle_buffer (GstRTPBasePayload *
payload, GstBuffer * buffer);
-GST_BOILERPLATE (GstRtpVRawPay, gst_rtp_vraw_pay, GstBaseRTPPayload,
- GST_TYPE_BASE_RTP_PAYLOAD)
+G_DEFINE_TYPE (GstRtpVRawPay, gst_rtp_vraw_pay, GST_TYPE_RTP_BASE_PAYLOAD)
- static void gst_rtp_vraw_pay_base_init (gpointer klass)
+ static void gst_rtp_vraw_pay_class_init (GstRtpVRawPayClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstRTPBasePayloadClass *gstrtpbasepayload_class;
+ GstElementClass *gstelement_class;
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_vraw_pay_src_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_vraw_pay_sink_template);
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasepayload_class = (GstRTPBasePayloadClass *) klass;
- gst_element_class_set_details_simple (element_class,
+ gstrtpbasepayload_class->set_caps = gst_rtp_vraw_pay_setcaps;
+ gstrtpbasepayload_class->handle_buffer = gst_rtp_vraw_pay_handle_buffer;
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_vraw_pay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_vraw_pay_sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
"RTP Raw Video payloader", "Codec/Payloader/Network/RTP",
"Payload raw video as RTP packets (RFC 4175)",
"Wim Taymans <wim.taymans@gmail.com>");
-}
-
-static void
-gst_rtp_vraw_pay_class_init (GstRtpVRawPayClass * klass)
-{
- GstBaseRTPPayloadClass *gstbasertppayload_class;
-
- gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
-
- gstbasertppayload_class->set_caps = gst_rtp_vraw_pay_setcaps;
- gstbasertppayload_class->handle_buffer = gst_rtp_vraw_pay_handle_buffer;
GST_DEBUG_CATEGORY_INIT (rtpvrawpay_debug, "rtpvrawpay", 0,
"Raw video RTP Payloader");
}
static void
-gst_rtp_vraw_pay_init (GstRtpVRawPay * rtpvrawpay, GstRtpVRawPayClass * klass)
+gst_rtp_vraw_pay_init (GstRtpVRawPay * rtpvrawpay)
{
}
static gboolean
-gst_rtp_vraw_pay_setcaps (GstBaseRTPPayload * payload, GstCaps * caps)
+gst_rtp_vraw_pay_setcaps (GstRTPBasePayload * payload, GstCaps * caps)
{
GstRtpVRawPay *rtpvrawpay;
- GstStructure *s;
gboolean res;
- const gchar *name;
- gint width, height;
- gint yp, up, vp;
- gint pgroup, ystride, uvstride = 0, xinc, yinc;
- GstVideoFormat sampling;
+ gint pgroup, xinc, yinc;
const gchar *depthstr, *samplingstr, *colorimetrystr;
gchar *wstr, *hstr;
- gboolean interlaced;
- const gchar *color_matrix;
gint depth;
+ GstVideoInfo info;
rtpvrawpay = GST_RTP_VRAW_PAY (payload);
- s = gst_caps_get_structure (caps, 0);
+ if (!gst_video_info_from_caps (&info, caps))
+ goto invalid_caps;
- /* start parsing the format */
- name = gst_structure_get_name (s);
-
- /* these values are the only thing we can do */
- depthstr = "8";
+ rtpvrawpay->vinfo = info;
- /* parse common width/height */
- res = gst_structure_get_int (s, "width", &width);
- res &= gst_structure_get_int (s, "height", &height);
- if (!res)
- goto missing_dimension;
-
- if (!gst_structure_get_boolean (s, "interlaced", &interlaced))
- interlaced = FALSE;
-
- color_matrix = gst_structure_get_string (s, "color-matrix");
- colorimetrystr = "SMPTE240M";
- if (color_matrix) {
- if (g_str_equal (color_matrix, "sdtv")) {
- /* BT.601 implies a bit more than just color-matrix */
- colorimetrystr = "BT601-5";
- } else if (g_str_equal (color_matrix, "hdtv")) {
- colorimetrystr = "BT709-2";
- }
+ if (gst_video_colorimetry_matches (&info.colorimetry,
+ GST_VIDEO_COLORIMETRY_BT601)) {
+ colorimetrystr = "BT601-5";
+ } else if (gst_video_colorimetry_matches (&info.colorimetry,
+ GST_VIDEO_COLORIMETRY_BT709)) {
+ colorimetrystr = "BT709-2";
+ } else if (gst_video_colorimetry_matches (&info.colorimetry,
+ GST_VIDEO_COLORIMETRY_SMPTE240M)) {
+ colorimetrystr = "SMPTE240M";
+ } else {
+ colorimetrystr = "SMPTE240M";
}
- yp = up = vp = 0;
xinc = yinc = 1;
- if (!strcmp (name, "video/x-raw-rgb")) {
- gint amask, rmask;
- gboolean has_alpha;
-
- has_alpha = gst_structure_get_int (s, "alpha_mask", &amask);
- depth = 8;
-
- if (!gst_structure_get_int (s, "red_mask", &rmask))
- goto unknown_mask;
+ /* these values are the only thing we can do */
+ depthstr = "8";
+ depth = 8;
- if (has_alpha) {
+ switch (GST_VIDEO_INFO_FORMAT (&info)) {
+ case GST_VIDEO_FORMAT_RGBA:
+ samplingstr = "RGBA";
pgroup = 4;
- ystride = width * 4;
- if (rmask == 0xFF000000) {
- sampling = GST_VIDEO_FORMAT_RGBA;
- samplingstr = "RGBA";
- } else {
- sampling = GST_VIDEO_FORMAT_BGRA;
- samplingstr = "BGRA";
- }
- } else {
+ break;
+ case GST_VIDEO_FORMAT_BGRA:
+ samplingstr = "BGRA";
+ pgroup = 4;
+ break;
+ case GST_VIDEO_FORMAT_RGB:
+ samplingstr = "RGB";
pgroup = 3;
- ystride = GST_ROUND_UP_4 (width * 3);
- if (rmask == 0x00FF0000) {
- sampling = GST_VIDEO_FORMAT_RGB;
- samplingstr = "RGB";
- } else {
- sampling = GST_VIDEO_FORMAT_BGR;
- samplingstr = "BGR";
- }
- }
- } else if (!strcmp (name, "video/x-raw-yuv")) {
- guint32 fourcc;
-
- if (!gst_structure_get_fourcc (s, "format", &fourcc))
- goto unknown_fourcc;
-
- GST_LOG_OBJECT (payload, "have fourcc %" GST_FOURCC_FORMAT,
- GST_FOURCC_ARGS (fourcc));
-
- switch (fourcc) {
- case GST_MAKE_FOURCC ('A', 'Y', 'U', 'V'):
- sampling = GST_VIDEO_FORMAT_AYUV;
- samplingstr = "YCbCr-4:4:4";
- pgroup = 3;
- ystride = width * 4;
- depth = 8;
- break;
- case GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y'):
- sampling = GST_VIDEO_FORMAT_UYVY;
- samplingstr = "YCbCr-4:2:2";
- pgroup = 4;
- xinc = 2;
- ystride = GST_ROUND_UP_2 (width) * 2;
- depth = 8;
- break;
- case GST_MAKE_FOURCC ('Y', '4', '1', 'B'):
- sampling = GST_VIDEO_FORMAT_Y41B;
- samplingstr = "YCbCr-4:1:1";
- pgroup = 6;
- xinc = 4;
- ystride = GST_ROUND_UP_4 (width);
- uvstride = GST_ROUND_UP_8 (width) / 4;
- up = ystride * height;
- vp = up + uvstride * height;
- depth = 8;
- break;
- case GST_MAKE_FOURCC ('I', '4', '2', '0'):
- sampling = GST_VIDEO_FORMAT_I420;
- samplingstr = "YCbCr-4:2:0";
- pgroup = 6;
- xinc = yinc = 2;
- ystride = GST_ROUND_UP_4 (width);
- uvstride = GST_ROUND_UP_8 (width) / 2;
- up = ystride * GST_ROUND_UP_2 (height);
- vp = up + uvstride * GST_ROUND_UP_2 (height) / 2;
- depth = 8;
- break;
- case GST_MAKE_FOURCC ('U', 'Y', 'V', 'P'):
-#define GST_VIDEO_FORMAT_UYVP GST_VIDEO_FORMAT_UYVY /* FIXME */
- sampling = GST_VIDEO_FORMAT_UYVP;
- samplingstr = "YCbCr-4:2:2";
- pgroup = 4;
- xinc = 2;
- ystride = GST_ROUND_UP_2 (width) * 2;
- depth = 10;
- break;
- default:
- goto unknown_fourcc;
- }
- } else
- goto unknown_format;
+ case GST_VIDEO_FORMAT_BGR:
+ samplingstr = "BGR";
+ pgroup = 3;
+ break;
+ case GST_VIDEO_FORMAT_AYUV:
+ samplingstr = "YCbCr-4:4:4";
+ pgroup = 3;
+ break;
+ case GST_VIDEO_FORMAT_UYVY:
+ samplingstr = "YCbCr-4:2:2";
+ pgroup = 4;
+ xinc = 2;
+ break;
+ case GST_VIDEO_FORMAT_Y41B:
+ samplingstr = "YCbCr-4:1:1";
+ pgroup = 6;
+ xinc = 4;
+ break;
+ case GST_VIDEO_FORMAT_I420:
+ samplingstr = "YCbCr-4:2:0";
+ pgroup = 6;
+ xinc = yinc = 2;
+ break;
+ case GST_VIDEO_FORMAT_UYVP:
+ samplingstr = "YCbCr-4:2:2";
+ pgroup = 4;
+ xinc = 2;
+ depth = 10;
+ depthstr = "10";
+ break;
+ default:
+ goto unknown_format;
+ break;
+ }
- if (interlaced) {
+ if (info.flags & GST_VIDEO_FLAG_INTERLACED) {
yinc *= 2;
}
- if (depth == 10) {
- depthstr = "10";
- }
- rtpvrawpay->width = width;
- rtpvrawpay->height = height;
- rtpvrawpay->sampling = sampling;
rtpvrawpay->pgroup = pgroup;
rtpvrawpay->xinc = xinc;
rtpvrawpay->yinc = yinc;
- rtpvrawpay->yp = yp;
- rtpvrawpay->up = up;
- rtpvrawpay->vp = vp;
- rtpvrawpay->ystride = ystride;
- rtpvrawpay->uvstride = uvstride;
- rtpvrawpay->interlaced = interlaced;
rtpvrawpay->depth = depth;
- GST_DEBUG_OBJECT (payload, "width %d, height %d, sampling %d", width, height,
- sampling);
- GST_DEBUG_OBJECT (payload, "yp %d, up %d, vp %d", yp, up, vp);
- GST_DEBUG_OBJECT (payload, "pgroup %d, ystride %d, uvstride %d", pgroup,
- ystride, uvstride);
+ GST_DEBUG_OBJECT (payload, "width %d, height %d, sampling %s",
+ GST_VIDEO_INFO_WIDTH (&info), GST_VIDEO_INFO_HEIGHT (&info), samplingstr);
+ GST_DEBUG_OBJECT (payload, "xinc %d, yinc %d, pgroup %d", xinc, yinc, pgroup);
- wstr = g_strdup_printf ("%d", rtpvrawpay->width);
- hstr = g_strdup_printf ("%d", rtpvrawpay->height);
+ wstr = g_strdup_printf ("%d", GST_VIDEO_INFO_WIDTH (&info));
+ hstr = g_strdup_printf ("%d", GST_VIDEO_INFO_HEIGHT (&info));
- gst_basertppayload_set_options (payload, "video", TRUE, "RAW", 90000);
- if (interlaced) {
- res = gst_basertppayload_set_outcaps (payload, "sampling", G_TYPE_STRING,
+ gst_rtp_base_payload_set_options (payload, "video", TRUE, "RAW", 90000);
+ if (info.flags & GST_VIDEO_FLAG_INTERLACED) {
+ res = gst_rtp_base_payload_set_outcaps (payload, "sampling", G_TYPE_STRING,
samplingstr, "depth", G_TYPE_STRING, depthstr, "width", G_TYPE_STRING,
wstr, "height", G_TYPE_STRING, hstr, "colorimetry", G_TYPE_STRING,
colorimetrystr, "interlace", G_TYPE_STRING, "true", NULL);
} else {
- res = gst_basertppayload_set_outcaps (payload, "sampling", G_TYPE_STRING,
+ res = gst_rtp_base_payload_set_outcaps (payload, "sampling", G_TYPE_STRING,
samplingstr, "depth", G_TYPE_STRING, depthstr, "width", G_TYPE_STRING,
wstr, "height", G_TYPE_STRING, hstr, "colorimetry", G_TYPE_STRING,
colorimetrystr, NULL);
return res;
/* ERRORS */
-unknown_mask:
+invalid_caps:
{
- GST_ERROR_OBJECT (payload, "unknown red mask specified");
+ GST_ERROR_OBJECT (payload, "could not parse caps");
return FALSE;
}
unknown_format:
GST_ERROR_OBJECT (payload, "unknown caps format");
return FALSE;
}
-unknown_fourcc:
- {
- GST_ERROR_OBJECT (payload, "invalid or missing fourcc");
- return FALSE;
- }
-missing_dimension:
- {
- GST_ERROR_OBJECT (payload, "missing width or height property");
- return FALSE;
- }
}
static GstFlowReturn
-gst_rtp_vraw_pay_handle_buffer (GstBaseRTPPayload * payload, GstBuffer * buffer)
+gst_rtp_vraw_pay_handle_buffer (GstRTPBasePayload * payload, GstBuffer * buffer)
{
GstRtpVRawPay *rtpvrawpay;
GstFlowReturn ret = GST_FLOW_OK;
guint line, offset;
- guint8 *data, *yp, *up, *vp;
+ guint8 *yp, *up, *vp;
guint ystride, uvstride;
- guint size, pgroup;
+ guint pgroup;
guint mtu;
guint width, height;
gint field;
+ GstVideoFrame frame;
+ gint interlaced;
+ GstRTPBuffer rtp = { NULL, };
rtpvrawpay = GST_RTP_VRAW_PAY (payload);
- data = GST_BUFFER_DATA (buffer);
- size = GST_BUFFER_SIZE (buffer);
+ gst_video_frame_map (&frame, &rtpvrawpay->vinfo, buffer, GST_MAP_READ);
- GST_LOG_OBJECT (rtpvrawpay, "new frame of %u bytes", size);
+ GST_LOG_OBJECT (rtpvrawpay, "new frame of %" G_GSIZE_FORMAT " bytes",
+ gst_buffer_get_size (buffer));
/* get pointer and strides of the planes */
- yp = data + rtpvrawpay->yp;
- up = data + rtpvrawpay->up;
- vp = data + rtpvrawpay->vp;
+ yp = GST_VIDEO_FRAME_COMP_DATA (&frame, 0);
+ up = GST_VIDEO_FRAME_COMP_DATA (&frame, 1);
+ vp = GST_VIDEO_FRAME_COMP_DATA (&frame, 2);
- ystride = rtpvrawpay->ystride;
- uvstride = rtpvrawpay->uvstride;
+ ystride = GST_VIDEO_FRAME_COMP_STRIDE (&frame, 0);
+ uvstride = GST_VIDEO_FRAME_COMP_STRIDE (&frame, 1);
- mtu = GST_BASE_RTP_PAYLOAD_MTU (payload);
+ mtu = GST_RTP_BASE_PAYLOAD_MTU (payload);
/* amount of bytes for one pixel */
pgroup = rtpvrawpay->pgroup;
- width = rtpvrawpay->width;
- height = rtpvrawpay->height;
+ width = GST_VIDEO_INFO_WIDTH (&rtpvrawpay->vinfo);
+ height = GST_VIDEO_INFO_HEIGHT (&rtpvrawpay->vinfo);
- /* start with line 0, offset 0 */
+ interlaced = ! !(rtpvrawpay->vinfo.flags & GST_VIDEO_FLAG_INTERLACED);
- for (field = 0; field < 1 + rtpvrawpay->interlaced; field++) {
+ /* start with line 0, offset 0 */
+ for (field = 0; field < 1 + interlaced; field++) {
line = field;
offset = 0;
GST_BUFFER_DURATION (buffer) / 2;
}
- outdata = gst_rtp_buffer_get_payload (out);
+ gst_rtp_buffer_map (out, GST_MAP_WRITE, &rtp);
+ outdata = gst_rtp_buffer_get_payload (&rtp);
GST_LOG_OBJECT (rtpvrawpay, "created buffer of size %u for MTU %u", left,
mtu);
"writing length %u, line %u, offset %u, cont %d", length, lin, offs,
cont);
- switch (rtpvrawpay->sampling) {
+ switch (GST_VIDEO_INFO_FORMAT (&rtpvrawpay->vinfo)) {
case GST_VIDEO_FORMAT_RGB:
case GST_VIDEO_FORMAT_RGBA:
case GST_VIDEO_FORMAT_BGR:
break;
}
default:
+ gst_rtp_buffer_unmap (&rtp);
gst_buffer_unref (out);
goto unknown_sampling;
}
if (line >= height) {
GST_LOG_OBJECT (rtpvrawpay, "field/frame complete, set marker");
- gst_rtp_buffer_set_marker (out, TRUE);
+ gst_rtp_buffer_set_marker (&rtp, TRUE);
}
+ gst_rtp_buffer_unmap (&rtp);
if (left > 0) {
GST_LOG_OBJECT (rtpvrawpay, "we have %u bytes left", left);
- GST_BUFFER_SIZE (out) -= left;
+ gst_buffer_resize (out, 0, gst_buffer_get_size (out) - left);
}
/* push buffer */
- ret = gst_basertppayload_push (payload, out);
+ ret = gst_rtp_base_payload_push (payload, out);
}
}
+
+ gst_video_frame_unmap (&frame);
gst_buffer_unref (buffer);
return ret;
{
GST_ELEMENT_ERROR (payload, STREAM, FORMAT,
(NULL), ("unimplemented sampling"));
+ gst_video_frame_unmap (&frame);
gst_buffer_unref (buffer);
return GST_FLOW_NOT_SUPPORTED;
}
#include <gst/gst.h>
#include <gst/video/video.h>
-#include <gst/rtp/gstbasertppayload.h>
+#include <gst/rtp/gstrtpbasepayload.h>
G_BEGIN_DECLS
struct _GstRtpVRawPay
{
- GstBaseRTPPayload payload;
+ GstRTPBasePayload payload;
- gint width, height;
- GstVideoFormat sampling;
+ GstVideoInfo vinfo;
gint pgroup;
gint xinc, yinc;
- guint yp, up, vp;
- gint ystride;
- gint uvstride;
- gboolean interlaced;
+// guint yp, up, vp;
+// gint ystride;
+// gint uvstride;
+// gboolean interlaced;
gint depth;
};
struct _GstRtpVRawPayClass
{
- GstBaseRTPPayloadClass parent_class;
+ GstRTPBasePayloadClass parent_class;
};
GType gst_rtp_vraw_pay_get_type (void);
plugin_LTLIBRARIES = libgstrtpmanager.la
+# FIXME 0.11: ignore GValueArray warnings for now until this is sorted
+ERROR_CFLAGS=
+
glib_enum_define = GST_RTP_BIN
glib_gen_prefix = gst_rtp_bin
glib_gen_basename = gstrtpbin
gstrtpsession.h
libgstrtpmanager_la_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_CFLAGS) \
- $(WARNING_CFLAGS) $(ERROR_CFLAGS)
+ $(GST_NET_CFLAGS) $(WARNING_CFLAGS) $(ERROR_CFLAGS)
libgstrtpmanager_la_LIBADD = $(GST_PLUGINS_BASE_LIBS) \
- -lgstnetbuffer-@GST_MAJORMINOR@ -lgstrtp-@GST_MAJORMINOR@ \
+ $(GST_NET_LIBS) -lgstrtp-@GST_MAJORMINOR@ \
$(GST_BASE_LIBS) $(GST_LIBS_LIBS)
libgstrtpmanager_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
libgstrtpmanager_la_LIBTOOLFLAGS = --tag=disable-static
VOID:OBJECT,OBJECT
UINT64:BOOL,UINT64
VOID:UINT64
+BOOL:BOXED,BOOL
+VOID:UINT,UINT,UINT,UINT,BOXED
* #GstRtpBin is configured with a number of request pads that define the
* functionality that is activated, similar to the #GstRtpSession element.
*
- * To use #GstRtpBin as an RTP receiver, request a recv_rtp_sink_\%d pad. The session
+ * To use #GstRtpBin as an RTP receiver, request a recv_rtp_sink_\%u pad. The session
* number must be specified in the pad name.
- * Data received on the recv_rtp_sink_\%d pad will be processed in the #GstRtpSession
+ * Data received on the recv_rtp_sink_\%u pad will be processed in the #GstRtpSession
* manager and after being validated forwarded on #GstRtpSsrcDemux element. Each
* RTP stream is demuxed based on the SSRC and send to a #GstRtpJitterBuffer. After
* the packets are released from the jitterbuffer, they will be forwarded to a
* #GstRtpPtDemux element. The #GstRtpPtDemux element will demux the packets based
- * on the payload type and will create a unique pad recv_rtp_src_\%d_\%d_\%d on
+ * on the payload type and will create a unique pad recv_rtp_src_\%u_\%u_\%u on
* gstrtpbin with the session number, SSRC and payload type respectively as the pad
* name.
*
- * To also use #GstRtpBin as an RTCP receiver, request a recv_rtcp_sink_\%d pad. The
+ * To also use #GstRtpBin as an RTCP receiver, request a recv_rtcp_sink_\%u pad. The
* session number must be specified in the pad name.
*
* If you want the session manager to generate and send RTCP packets, request
- * the send_rtcp_src_\%d pad with the session number in the pad name. Packet pushed
+ * the send_rtcp_src_\%u pad with the session number in the pad name. Packet pushed
* on this pad contain SR/RR RTCP reports that should be sent to all participants
* in the session.
*
- * To use #GstRtpBin as a sender, request a send_rtp_sink_\%d pad, which will
- * automatically create a send_rtp_src_\%d pad. If the session number is not provided,
+ * To use #GstRtpBin as a sender, request a send_rtp_sink_\%u pad, which will
+ * automatically create a send_rtp_src_\%u pad. If the session number is not provided,
* the pad from the lowest available session will be returned. The session manager will modify the
* SSRC in the RTP packets to its own SSRC and wil forward the packets on the
- * send_rtp_src_\%d pad after updating its internal state.
+ * send_rtp_src_\%u pad after updating its internal state.
*
* The session manager needs the clock-rate of the payload types it is handling
* and will signal the #GstRtpSession::request-pt-map signal when it needs such a
/* sink pads */
static GstStaticPadTemplate rtpbin_recv_rtp_sink_template =
-GST_STATIC_PAD_TEMPLATE ("recv_rtp_sink_%d",
+GST_STATIC_PAD_TEMPLATE ("recv_rtp_sink_%u",
GST_PAD_SINK,
GST_PAD_REQUEST,
GST_STATIC_CAPS ("application/x-rtp")
);
static GstStaticPadTemplate rtpbin_recv_rtcp_sink_template =
-GST_STATIC_PAD_TEMPLATE ("recv_rtcp_sink_%d",
+GST_STATIC_PAD_TEMPLATE ("recv_rtcp_sink_%u",
GST_PAD_SINK,
GST_PAD_REQUEST,
GST_STATIC_CAPS ("application/x-rtcp")
);
static GstStaticPadTemplate rtpbin_send_rtp_sink_template =
-GST_STATIC_PAD_TEMPLATE ("send_rtp_sink_%d",
+GST_STATIC_PAD_TEMPLATE ("send_rtp_sink_%u",
GST_PAD_SINK,
GST_PAD_REQUEST,
GST_STATIC_CAPS ("application/x-rtp")
/* src pads */
static GstStaticPadTemplate rtpbin_recv_rtp_src_template =
-GST_STATIC_PAD_TEMPLATE ("recv_rtp_src_%d_%d_%d",
+GST_STATIC_PAD_TEMPLATE ("recv_rtp_src_%u_%u_%u",
GST_PAD_SRC,
GST_PAD_SOMETIMES,
GST_STATIC_CAPS ("application/x-rtp")
);
static GstStaticPadTemplate rtpbin_send_rtcp_src_template =
-GST_STATIC_PAD_TEMPLATE ("send_rtcp_src_%d",
+GST_STATIC_PAD_TEMPLATE ("send_rtcp_src_%u",
GST_PAD_SRC,
GST_PAD_REQUEST,
GST_STATIC_CAPS ("application/x-rtcp")
);
static GstStaticPadTemplate rtpbin_send_rtp_src_template =
-GST_STATIC_PAD_TEMPLATE ("send_rtp_src_%d",
+GST_STATIC_PAD_TEMPLATE ("send_rtp_src_%u",
GST_PAD_SRC,
GST_PAD_SOMETIMES,
GST_STATIC_CAPS ("application/x-rtp")
#define GST_RTP_BIN_GET_PRIVATE(obj) \
(G_TYPE_INSTANCE_GET_PRIVATE ((obj), GST_TYPE_RTP_BIN, GstRtpBinPrivate))
-#define GST_RTP_BIN_LOCK(bin) g_mutex_lock ((bin)->priv->bin_lock)
-#define GST_RTP_BIN_UNLOCK(bin) g_mutex_unlock ((bin)->priv->bin_lock)
+#define GST_RTP_BIN_LOCK(bin) g_mutex_lock (&(bin)->priv->bin_lock)
+#define GST_RTP_BIN_UNLOCK(bin) g_mutex_unlock (&(bin)->priv->bin_lock)
/* lock to protect dynamic callbacks, like pad-added and new ssrc. */
-#define GST_RTP_BIN_DYN_LOCK(bin) g_mutex_lock ((bin)->priv->dyn_lock)
-#define GST_RTP_BIN_DYN_UNLOCK(bin) g_mutex_unlock ((bin)->priv->dyn_lock)
+#define GST_RTP_BIN_DYN_LOCK(bin) g_mutex_lock (&(bin)->priv->dyn_lock)
+#define GST_RTP_BIN_DYN_UNLOCK(bin) g_mutex_unlock (&(bin)->priv->dyn_lock)
/* lock for shutdown */
#define GST_RTP_BIN_SHUTDOWN_LOCK(bin,label) \
struct _GstRtpBinPrivate
{
- GMutex *bin_lock;
+ GMutex bin_lock;
/* lock protecting dynamic adding/removing */
- GMutex *dyn_lock;
+ GMutex dyn_lock;
/* if we are shutting down or not */
gint shutdown;
gint64 clock_base;
};
-#define GST_RTP_SESSION_LOCK(sess) g_mutex_lock ((sess)->lock)
-#define GST_RTP_SESSION_UNLOCK(sess) g_mutex_unlock ((sess)->lock)
+#define GST_RTP_SESSION_LOCK(sess) g_mutex_lock (&(sess)->lock)
+#define GST_RTP_SESSION_UNLOCK(sess) g_mutex_unlock (&(sess)->lock)
/* Manages the receiving end of the packets.
*
gulong demux_newpad_sig;
gulong demux_padremoved_sig;
- GMutex *lock;
+ GMutex lock;
/* list of GstRtpBinStream */
GSList *streams;
GstElement *session, *demux;
GstState target;
- if (!(session = gst_element_factory_make ("gstrtpsession", NULL)))
+ if (!(session = gst_element_factory_make ("rtpsession", NULL)))
goto no_session;
- if (!(demux = gst_element_factory_make ("gstrtpssrcdemux", NULL)))
+ if (!(demux = gst_element_factory_make ("rtpssrcdemux", NULL)))
goto no_demux;
sess = g_new0 (GstRtpBinSession, 1);
- sess->lock = g_mutex_new ();
+ g_mutex_init (&sess->lock);
sess->id = id;
sess->bin = rtpbin;
sess->session = session;
/* ERRORS */
no_session:
{
- g_warning ("gstrtpbin: could not create gstrtpsession element");
+ g_warning ("rtpbin: could not create gstrtpsession element");
return NULL;
}
no_demux:
{
gst_object_unref (session);
- g_warning ("gstrtpbin: could not create gstrtpssrcdemux element");
+ g_warning ("rtpbin: could not create gstrtpssrcdemux element");
return NULL;
}
}
g_slist_foreach (sess->streams, (GFunc) free_stream, NULL);
g_slist_free (sess->streams);
- g_mutex_free (sess->lock);
+ g_mutex_clear (&sess->lock);
g_hash_table_destroy (sess->ptmap);
g_free (sess);
guint64 clock_base;
guint64 extrtptime;
GstBuffer *buffer;
+ GstRTCPBuffer rtcp = { NULL, };
bin = stream->bin;
have_sr = FALSE;
have_sdes = FALSE;
- GST_RTCP_BUFFER_FOR_PACKETS (more, buffer, &packet) {
+
+ gst_rtcp_buffer_map (buffer, GST_MAP_READ, &rtcp);
+
+ GST_RTCP_BUFFER_FOR_PACKETS (more, &rtcp, &packet) {
/* first packet must be SR or RR or else the validate would have failed */
switch (gst_rtcp_packet_get_type (&packet)) {
case GST_RTCP_TYPE_SR:
break;
}
}
+ gst_rtcp_buffer_unmap (&rtcp);
}
/* create a new stream with @ssrc in @session. Must be called with
rtpbin = session->bin;
- if (!(buffer = gst_element_factory_make ("gstrtpjitterbuffer", NULL)))
+ if (!(buffer = gst_element_factory_make ("rtpjitterbuffer", NULL)))
goto no_jitterbuffer;
if (!rtpbin->ignore_pt)
- if (!(demux = gst_element_factory_make ("gstrtpptdemux", NULL)))
+ if (!(demux = gst_element_factory_make ("rtpptdemux", NULL)))
goto no_demux;
/* ERRORS */
no_jitterbuffer:
{
- g_warning ("gstrtpbin: could not create gstrtpjitterbuffer element");
+ g_warning ("rtpbin: could not create gstrtpjitterbuffer element");
return NULL;
}
no_demux:
{
gst_object_unref (buffer);
- g_warning ("gstrtpbin: could not create gstrtpptdemux element");
+ g_warning ("rtpbin: could not create gstrtpptdemux element");
return NULL;
}
}
static GstStateChangeReturn gst_rtp_bin_change_state (GstElement * element,
GstStateChange transition);
static GstPad *gst_rtp_bin_request_new_pad (GstElement * element,
- GstPadTemplate * templ, const gchar * name);
+ GstPadTemplate * templ, const gchar * name, const GstCaps * caps);
static void gst_rtp_bin_release_pad (GstElement * element, GstPad * pad);
static void gst_rtp_bin_handle_message (GstBin * bin, GstMessage * message);
-GST_BOILERPLATE (GstRtpBin, gst_rtp_bin, GstBin, GST_TYPE_BIN);
-
-static void
-gst_rtp_bin_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- /* sink pads */
- gst_element_class_add_static_pad_template (element_class,
- &rtpbin_recv_rtp_sink_template);
- gst_element_class_add_static_pad_template (element_class,
- &rtpbin_recv_rtcp_sink_template);
- gst_element_class_add_static_pad_template (element_class,
- &rtpbin_send_rtp_sink_template);
-
- /* src pads */
- gst_element_class_add_static_pad_template (element_class,
- &rtpbin_recv_rtp_src_template);
- gst_element_class_add_static_pad_template (element_class,
- &rtpbin_send_rtcp_src_template);
- gst_element_class_add_static_pad_template (element_class,
- &rtpbin_send_rtp_src_template);
-
- gst_element_class_set_details_simple (element_class, "RTP Bin",
- "Filter/Network/RTP",
- "Real-Time Transport Protocol bin",
- "Wim Taymans <wim.taymans@gmail.com>");
-}
+#define gst_rtp_bin_parent_class parent_class
+G_DEFINE_TYPE (GstRtpBin, gst_rtp_bin, GST_TYPE_BIN);
static void
gst_rtp_bin_class_init (GstRtpBinClass * klass)
GST_DEBUG_FUNCPTR (gst_rtp_bin_request_new_pad);
gstelement_class->release_pad = GST_DEBUG_FUNCPTR (gst_rtp_bin_release_pad);
+ /* sink pads */
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&rtpbin_recv_rtp_sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&rtpbin_recv_rtcp_sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&rtpbin_send_rtp_sink_template));
+
+ /* src pads */
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&rtpbin_recv_rtp_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&rtpbin_send_rtcp_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&rtpbin_send_rtp_src_template));
+
+ gst_element_class_set_details_simple (gstelement_class, "RTP Bin",
+ "Filter/Network/RTP",
+ "Real-Time Transport Protocol bin",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
gstbin_class->handle_message = GST_DEBUG_FUNCPTR (gst_rtp_bin_handle_message);
klass->clear_pt_map = GST_DEBUG_FUNCPTR (gst_rtp_bin_clear_pt_map);
}
static void
-gst_rtp_bin_init (GstRtpBin * rtpbin, GstRtpBinClass * klass)
+gst_rtp_bin_init (GstRtpBin * rtpbin)
{
gchar *cname;
rtpbin->priv = GST_RTP_BIN_GET_PRIVATE (rtpbin);
- rtpbin->priv->bin_lock = g_mutex_new ();
- rtpbin->priv->dyn_lock = g_mutex_new ();
+ g_mutex_init (&rtpbin->priv->bin_lock);
+ g_mutex_init (&rtpbin->priv->dyn_lock);
rtpbin->latency_ms = DEFAULT_LATENCY_MS;
rtpbin->latency_ns = DEFAULT_LATENCY_MS * GST_MSECOND;
if (rtpbin->sdes)
gst_structure_free (rtpbin->sdes);
- g_mutex_free (rtpbin->priv->bin_lock);
- g_mutex_free (rtpbin->priv->dyn_lock);
+ g_mutex_clear (&rtpbin->priv->bin_lock);
+ g_mutex_clear (&rtpbin->priv->dyn_lock);
G_OBJECT_CLASS (parent_class)->finalize (object);
}
/* ghost the pad to the parent */
klass = GST_ELEMENT_GET_CLASS (rtpbin);
- templ = gst_element_class_get_pad_template (klass, "recv_rtp_src_%d_%d_%d");
- padname = g_strdup_printf ("recv_rtp_src_%d_%u_%d",
+ templ = gst_element_class_get_pad_template (klass, "recv_rtp_src_%u_%u_%u");
+ padname = g_strdup_printf ("recv_rtp_src_%u_%u_%u",
stream->session->id, stream->ssrc, pt);
gpad = gst_ghost_pad_new_from_template (padname, pad, templ);
g_free (padname);
g_object_set_data (G_OBJECT (pad), "GstRTPBin.ghostpad", gpad);
- gst_pad_set_caps (gpad, GST_PAD_CAPS (pad));
gst_pad_set_active (gpad, TRUE);
GST_RTP_BIN_SHUTDOWN_UNLOCK (rtpbin);
/* get pad and link */
GST_DEBUG_OBJECT (rtpbin, "linking jitterbuffer RTP");
- padname = g_strdup_printf ("src_%d", ssrc);
+ padname = g_strdup_printf ("src_%u", ssrc);
srcpad = gst_element_get_static_pad (element, padname);
g_free (padname);
sinkpad = gst_element_get_static_pad (stream->buffer, "sink");
gst_object_unref (srcpad);
GST_DEBUG_OBJECT (rtpbin, "linking jitterbuffer RTCP");
- padname = g_strdup_printf ("rtcp_src_%d", ssrc);
+ padname = g_strdup_printf ("rtcp_src_%u", ssrc);
srcpad = gst_element_get_static_pad (element, padname);
g_free (padname);
sinkpad = gst_element_get_request_pad (stream->buffer, "sink_rtcp");
/* ghost the pad to the parent */
klass = GST_ELEMENT_GET_CLASS (rtpbin);
- templ = gst_element_class_get_pad_template (klass, "recv_rtp_src_%d_%d_%d");
- padname = g_strdup_printf ("recv_rtp_src_%d_%u_%d",
+ templ = gst_element_class_get_pad_template (klass, "recv_rtp_src_%u_%u_%u");
+ padname = g_strdup_printf ("recv_rtp_src_%u_%u_%u",
stream->session->id, stream->ssrc, 255);
gpad = gst_ghost_pad_new_from_template (padname, pad, templ);
g_free (padname);
- gst_pad_set_caps (gpad, GST_PAD_CAPS (pad));
gst_pad_set_active (gpad, TRUE);
gst_element_add_pad (GST_ELEMENT_CAST (rtpbin), gpad);
GstPadLinkReturn lres;
/* first get the session number */
- if (name == NULL || sscanf (name, "recv_rtp_sink_%d", &sessid) != 1)
+ if (name == NULL || sscanf (name, "recv_rtp_sink_%u", &sessid) != 1)
goto no_name;
GST_DEBUG_OBJECT (rtpbin, "finding session %d", sessid);
/* ERRORS */
no_name:
{
- g_warning ("gstrtpbin: invalid name given");
+ g_warning ("rtpbin: invalid name given");
return NULL;
}
create_error:
}
pad_failed:
{
- g_warning ("gstrtpbin: failed to get session pad");
+ g_warning ("rtpbin: failed to get session pad");
return NULL;
}
link_failed:
{
- g_warning ("gstrtpbin: failed to link pads");
+ g_warning ("rtpbin: failed to link pads");
return NULL;
}
}
GstPadLinkReturn lres;
/* first get the session number */
- if (name == NULL || sscanf (name, "recv_rtcp_sink_%d", &sessid) != 1)
+ if (name == NULL || sscanf (name, "recv_rtcp_sink_%u", &sessid) != 1)
goto no_name;
GST_DEBUG_OBJECT (rtpbin, "finding session %d", sessid);
/* ERRORS */
no_name:
{
- g_warning ("gstrtpbin: invalid name given");
+ g_warning ("rtpbin: invalid name given");
return NULL;
}
create_error:
}
pad_failed:
{
- g_warning ("gstrtpbin: failed to get session pad");
+ g_warning ("rtpbin: failed to get session pad");
return NULL;
}
link_failed:
{
- g_warning ("gstrtpbin: failed to link pads");
+ g_warning ("rtpbin: failed to link pads");
return NULL;
}
}
GstElementClass *klass;
/* first get the session number */
- if (name == NULL || sscanf (name, "send_rtp_sink_%d", &sessid) != 1)
+ if (name == NULL || sscanf (name, "send_rtp_sink_%u", &sessid) != 1)
goto no_name;
/* get or create session */
/* ghost the new source pad */
klass = GST_ELEMENT_GET_CLASS (rtpbin);
- gname = g_strdup_printf ("send_rtp_src_%d", sessid);
- templ = gst_element_class_get_pad_template (klass, "send_rtp_src_%d");
+ gname = g_strdup_printf ("send_rtp_src_%u", sessid);
+ templ = gst_element_class_get_pad_template (klass, "send_rtp_src_%u");
session->send_rtp_src_ghost =
gst_ghost_pad_new_from_template (gname, session->send_rtp_src, templ);
gst_pad_set_active (session->send_rtp_src_ghost, TRUE);
/* ERRORS */
no_name:
{
- g_warning ("gstrtpbin: invalid name given");
+ g_warning ("rtpbin: invalid name given");
return NULL;
}
create_error:
}
pad_failed:
{
- g_warning ("gstrtpbin: failed to get session pad for session %d", sessid);
+ g_warning ("rtpbin: failed to get session pad for session %d", sessid);
return NULL;
}
no_srcpad:
{
- g_warning ("gstrtpbin: failed to get rtp source pad for session %d",
- sessid);
+ g_warning ("rtpbin: failed to get rtp source pad for session %d", sessid);
return NULL;
}
}
GstRtpBinSession *session;
/* first get the session number */
- if (name == NULL || sscanf (name, "send_rtcp_src_%d", &sessid) != 1)
+ if (name == NULL || sscanf (name, "send_rtcp_src_%u", &sessid) != 1)
goto no_name;
/* get or create session */
/* ERRORS */
no_name:
{
- g_warning ("gstrtpbin: invalid name given");
+ g_warning ("rtpbin: invalid name given");
return NULL;
}
no_session:
{
- g_warning ("gstrtpbin: session with id %d does not exist", sessid);
+ g_warning ("rtpbin: session with id %d does not exist", sessid);
return NULL;
}
pad_failed:
{
- g_warning ("gstrtpbin: failed to get rtcp pad for session %d", sessid);
+ g_warning ("rtpbin: failed to get rtcp pad for session %d", sessid);
return NULL;
}
}
gint session = 0;
GstIterator *pad_it = NULL;
gchar *pad_name = NULL;
+ GValue data = { 0, };
GST_DEBUG_OBJECT (element, "find a free pad name for template");
while (!name_found) {
gboolean done = FALSE;
+
g_free (pad_name);
pad_name = g_strdup_printf (templ->name_template, session++);
pad_it = gst_element_iterate_pads (GST_ELEMENT (element));
name_found = TRUE;
while (!done) {
- gpointer data;
-
switch (gst_iterator_next (pad_it, &data)) {
case GST_ITERATOR_OK:
{
GstPad *pad;
gchar *name;
- pad = GST_PAD_CAST (data);
+ pad = g_value_get_object (&data);
name = gst_pad_get_name (pad);
if (strcmp (name, pad_name) == 0) {
name_found = FALSE;
}
g_free (name);
- gst_object_unref (pad);
+ g_value_reset (&data);
break;
}
case GST_ITERATOR_ERROR:
break;
}
}
+ g_value_unset (&data);
gst_iterator_free (pad_it);
}
*/
static GstPad *
gst_rtp_bin_request_new_pad (GstElement * element,
- GstPadTemplate * templ, const gchar * name)
+ GstPadTemplate * templ, const gchar * name, const GstCaps * caps)
{
GstRtpBin *rtpbin;
GstElementClass *klass;
GST_DEBUG_OBJECT (rtpbin, "Trying to request a pad with name %s", pad_name);
/* figure out the template */
- if (templ == gst_element_class_get_pad_template (klass, "recv_rtp_sink_%d")) {
+ if (templ == gst_element_class_get_pad_template (klass, "recv_rtp_sink_%u")) {
result = create_recv_rtp (rtpbin, templ, pad_name);
} else if (templ == gst_element_class_get_pad_template (klass,
- "recv_rtcp_sink_%d")) {
+ "recv_rtcp_sink_%u")) {
result = create_recv_rtcp (rtpbin, templ, pad_name);
} else if (templ == gst_element_class_get_pad_template (klass,
- "send_rtp_sink_%d")) {
+ "send_rtp_sink_%u")) {
result = create_send_rtp (rtpbin, templ, pad_name);
} else if (templ == gst_element_class_get_pad_template (klass,
- "send_rtcp_src_%d")) {
+ "send_rtcp_src_%u")) {
result = create_rtcp (rtpbin, templ, pad_name);
} else
goto wrong_template;
{
g_free (pad_name);
GST_RTP_BIN_UNLOCK (rtpbin);
- g_warning ("gstrtpbin: this is not our template");
+ g_warning ("rtpbin: this is not our template");
return NULL;
}
}
unknown_pad:
{
GST_RTP_BIN_UNLOCK (rtpbin);
- g_warning ("gstrtpbin: %s:%s is not one of our request pads",
+ g_warning ("rtpbin: %s:%s is not one of our request pads",
GST_DEBUG_PAD_NAME (pad));
return;
}
PROP_LAST
};
-#define JBUF_LOCK(priv) (g_mutex_lock ((priv)->jbuf_lock))
+#define JBUF_LOCK(priv) (g_mutex_lock (&(priv)->jbuf_lock))
#define JBUF_LOCK_CHECK(priv,label) G_STMT_START { \
JBUF_LOCK (priv); \
goto label; \
} G_STMT_END
-#define JBUF_UNLOCK(priv) (g_mutex_unlock ((priv)->jbuf_lock))
-#define JBUF_WAIT(priv) (g_cond_wait ((priv)->jbuf_cond, (priv)->jbuf_lock))
+#define JBUF_UNLOCK(priv) (g_mutex_unlock (&(priv)->jbuf_lock))
+#define JBUF_WAIT(priv) (g_cond_wait (&(priv)->jbuf_cond, &(priv)->jbuf_lock))
#define JBUF_WAIT_CHECK(priv,label) G_STMT_START { \
JBUF_WAIT(priv); \
goto label; \
} G_STMT_END
-#define JBUF_SIGNAL(priv) (g_cond_signal ((priv)->jbuf_cond))
+#define JBUF_SIGNAL(priv) (g_cond_signal (&(priv)->jbuf_cond))
struct _GstRtpJitterBufferPrivate
{
GstPad *rtcpsinkpad;
RTPJitterBuffer *jbuf;
- GMutex *jbuf_lock;
- GCond *jbuf_cond;
+ GMutex jbuf_lock;
+ GCond jbuf_cond;
gboolean waiting;
gboolean discont;
gboolean active;
static guint gst_rtp_jitter_buffer_signals[LAST_SIGNAL] = { 0 };
-GST_BOILERPLATE (GstRtpJitterBuffer, gst_rtp_jitter_buffer, GstElement,
- GST_TYPE_ELEMENT);
+#define gst_rtp_jitter_buffer_parent_class parent_class
+G_DEFINE_TYPE (GstRtpJitterBuffer, gst_rtp_jitter_buffer, GST_TYPE_ELEMENT);
/* object overrides */
static void gst_rtp_jitter_buffer_set_property (GObject * object,
static GstStateChangeReturn gst_rtp_jitter_buffer_change_state (GstElement
* element, GstStateChange transition);
static GstPad *gst_rtp_jitter_buffer_request_new_pad (GstElement * element,
- GstPadTemplate * templ, const gchar * name);
+ GstPadTemplate * templ, const gchar * name, const GstCaps * filter);
static void gst_rtp_jitter_buffer_release_pad (GstElement * element,
GstPad * pad);
static GstClock *gst_rtp_jitter_buffer_provide_clock (GstElement * element);
/* pad overrides */
-static GstCaps *gst_rtp_jitter_buffer_getcaps (GstPad * pad);
-static GstIterator *gst_rtp_jitter_buffer_iterate_internal_links (GstPad * pad);
+static GstCaps *gst_rtp_jitter_buffer_getcaps (GstPad * pad, GstCaps * filter);
+static GstIterator *gst_rtp_jitter_buffer_iterate_internal_links (GstPad * pad,
+ GstObject * parent);
/* sinkpad overrides */
-static gboolean gst_jitter_buffer_sink_setcaps (GstPad * pad, GstCaps * caps);
static gboolean gst_rtp_jitter_buffer_sink_event (GstPad * pad,
- GstEvent * event);
+ GstObject * parent, GstEvent * event);
static GstFlowReturn gst_rtp_jitter_buffer_chain (GstPad * pad,
- GstBuffer * buffer);
+ GstObject * parent, GstBuffer * buffer);
static gboolean gst_rtp_jitter_buffer_sink_rtcp_event (GstPad * pad,
- GstEvent * event);
+ GstObject * parent, GstEvent * event);
static GstFlowReturn gst_rtp_jitter_buffer_chain_rtcp (GstPad * pad,
- GstBuffer * buffer);
+ GstObject * parent, GstBuffer * buffer);
+
+static gboolean gst_rtp_jitter_buffer_sink_query (GstPad * pad,
+ GstObject * parent, GstQuery * query);
/* srcpad overrides */
static gboolean gst_rtp_jitter_buffer_src_event (GstPad * pad,
- GstEvent * event);
-static gboolean
-gst_rtp_jitter_buffer_src_activate_push (GstPad * pad, gboolean active);
+ GstObject * parent, GstEvent * event);
+static gboolean gst_rtp_jitter_buffer_src_activate_mode (GstPad * pad,
+ GstObject * parent, GstPadMode mode, gboolean active);
static void gst_rtp_jitter_buffer_loop (GstRtpJitterBuffer * jitterbuffer);
-static gboolean gst_rtp_jitter_buffer_query (GstPad * pad, GstQuery * query);
+static gboolean gst_rtp_jitter_buffer_src_query (GstPad * pad,
+ GstObject * parent, GstQuery * query);
static void
gst_rtp_jitter_buffer_clear_pt_map (GstRtpJitterBuffer * jitterbuffer);
gboolean active, guint64 base_time);
static void
-gst_rtp_jitter_buffer_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_jitter_buffer_src_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_jitter_buffer_sink_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_jitter_buffer_sink_rtcp_template);
-
- gst_element_class_set_details_simple (element_class,
- "RTP packet jitter-buffer", "Filter/Network/RTP",
- "A buffer that deals with network jitter and other transmission faults",
- "Philippe Kalaf <philippe.kalaf@collabora.co.uk>, "
- "Wim Taymans <wim.taymans@gmail.com>");
-}
-
-static void
gst_rtp_jitter_buffer_class_init (GstRtpJitterBufferClass * klass)
{
GObjectClass *gobject_class;
gstelement_class->provide_clock =
GST_DEBUG_FUNCPTR (gst_rtp_jitter_buffer_provide_clock);
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_jitter_buffer_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_jitter_buffer_sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_jitter_buffer_sink_rtcp_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP packet jitter-buffer", "Filter/Network/RTP",
+ "A buffer that deals with network jitter and other transmission faults",
+ "Philippe Kalaf <philippe.kalaf@collabora.co.uk>, "
+ "Wim Taymans <wim.taymans@gmail.com>");
+
klass->clear_pt_map = GST_DEBUG_FUNCPTR (gst_rtp_jitter_buffer_clear_pt_map);
klass->set_active = GST_DEBUG_FUNCPTR (gst_rtp_jitter_buffer_set_active);
}
static void
-gst_rtp_jitter_buffer_init (GstRtpJitterBuffer * jitterbuffer,
- GstRtpJitterBufferClass * klass)
+gst_rtp_jitter_buffer_init (GstRtpJitterBuffer * jitterbuffer)
{
GstRtpJitterBufferPrivate *priv;
priv->do_lost = DEFAULT_DO_LOST;
priv->jbuf = rtp_jitter_buffer_new ();
- priv->jbuf_lock = g_mutex_new ();
- priv->jbuf_cond = g_cond_new ();
+ g_mutex_init (&priv->jbuf_lock);
+ g_cond_init (&priv->jbuf_cond);
/* reset skew detection initialy */
rtp_jitter_buffer_reset_skew (priv->jbuf);
gst_pad_new_from_static_template (&gst_rtp_jitter_buffer_src_template,
"src");
- gst_pad_set_activatepush_function (priv->srcpad,
- GST_DEBUG_FUNCPTR (gst_rtp_jitter_buffer_src_activate_push));
+ gst_pad_set_activatemode_function (priv->srcpad,
+ GST_DEBUG_FUNCPTR (gst_rtp_jitter_buffer_src_activate_mode));
gst_pad_set_query_function (priv->srcpad,
- GST_DEBUG_FUNCPTR (gst_rtp_jitter_buffer_query));
- gst_pad_set_getcaps_function (priv->srcpad,
- GST_DEBUG_FUNCPTR (gst_rtp_jitter_buffer_getcaps));
+ GST_DEBUG_FUNCPTR (gst_rtp_jitter_buffer_src_query));
gst_pad_set_event_function (priv->srcpad,
GST_DEBUG_FUNCPTR (gst_rtp_jitter_buffer_src_event));
GST_DEBUG_FUNCPTR (gst_rtp_jitter_buffer_chain));
gst_pad_set_event_function (priv->sinkpad,
GST_DEBUG_FUNCPTR (gst_rtp_jitter_buffer_sink_event));
- gst_pad_set_setcaps_function (priv->sinkpad,
- GST_DEBUG_FUNCPTR (gst_jitter_buffer_sink_setcaps));
- gst_pad_set_getcaps_function (priv->sinkpad,
- GST_DEBUG_FUNCPTR (gst_rtp_jitter_buffer_getcaps));
+ gst_pad_set_query_function (priv->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_rtp_jitter_buffer_sink_query));
gst_element_add_pad (GST_ELEMENT (jitterbuffer), priv->srcpad);
gst_element_add_pad (GST_ELEMENT (jitterbuffer), priv->sinkpad);
+
+ GST_OBJECT_FLAG_SET (jitterbuffer, GST_ELEMENT_FLAG_PROVIDE_CLOCK);
}
static void
jitterbuffer = GST_RTP_JITTER_BUFFER (object);
- g_mutex_free (jitterbuffer->priv->jbuf_lock);
- g_cond_free (jitterbuffer->priv->jbuf_cond);
+ g_mutex_clear (&jitterbuffer->priv->jbuf_lock);
+ g_cond_clear (&jitterbuffer->priv->jbuf_cond);
g_object_unref (jitterbuffer->priv->jbuf);
}
static GstIterator *
-gst_rtp_jitter_buffer_iterate_internal_links (GstPad * pad)
+gst_rtp_jitter_buffer_iterate_internal_links (GstPad * pad, GstObject * parent)
{
GstRtpJitterBuffer *jitterbuffer;
GstPad *otherpad = NULL;
GstIterator *it;
+ GValue val = { 0, };
- jitterbuffer = GST_RTP_JITTER_BUFFER (gst_pad_get_parent (pad));
+ jitterbuffer = GST_RTP_JITTER_BUFFER (parent);
if (pad == jitterbuffer->priv->sinkpad) {
otherpad = jitterbuffer->priv->srcpad;
otherpad = NULL;
}
- it = gst_iterator_new_single (GST_TYPE_PAD, otherpad,
- (GstCopyFunction) gst_object_ref, (GFreeFunc) gst_object_unref);
-
- gst_object_unref (jitterbuffer);
+ g_value_init (&val, GST_TYPE_PAD);
+ g_value_set_object (&val, otherpad);
+ it = gst_iterator_new_single (GST_TYPE_PAD, &val);
+ g_value_unset (&val);
return it;
}
static GstPad *
gst_rtp_jitter_buffer_request_new_pad (GstElement * element,
- GstPadTemplate * templ, const gchar * name)
+ GstPadTemplate * templ, const gchar * name, const GstCaps * filter)
{
GstRtpJitterBuffer *jitterbuffer;
GstElementClass *klass;
}
static GstCaps *
-gst_rtp_jitter_buffer_getcaps (GstPad * pad)
+gst_rtp_jitter_buffer_getcaps (GstPad * pad, GstCaps * filter)
{
GstRtpJitterBuffer *jitterbuffer;
GstRtpJitterBufferPrivate *priv;
GstPad *other;
GstCaps *caps;
- const GstCaps *templ;
+ GstCaps *templ;
jitterbuffer = GST_RTP_JITTER_BUFFER (gst_pad_get_parent (pad));
priv = jitterbuffer->priv;
other = (pad == priv->srcpad ? priv->sinkpad : priv->srcpad);
- caps = gst_pad_peer_get_caps (other);
+ caps = gst_pad_peer_query_caps (other, filter);
templ = gst_pad_get_pad_template_caps (pad);
if (caps == NULL) {
- GST_DEBUG_OBJECT (jitterbuffer, "copy template");
- caps = gst_caps_copy (templ);
+ GST_DEBUG_OBJECT (jitterbuffer, "use template");
+ caps = templ;
} else {
GstCaps *intersect;
intersect = gst_caps_intersect (caps, templ);
gst_caps_unref (caps);
+ gst_caps_unref (templ);
caps = intersect;
}
}
}
-static gboolean
-gst_jitter_buffer_sink_setcaps (GstPad * pad, GstCaps * caps)
-{
- GstRtpJitterBuffer *jitterbuffer;
- GstRtpJitterBufferPrivate *priv;
- gboolean res;
-
- jitterbuffer = GST_RTP_JITTER_BUFFER (gst_pad_get_parent (pad));
- priv = jitterbuffer->priv;
-
- JBUF_LOCK (priv);
- res = gst_jitter_buffer_sink_parse_caps (jitterbuffer, caps);
- JBUF_UNLOCK (priv);
-
- /* set same caps on srcpad on success */
- if (res)
- gst_pad_set_caps (priv->srcpad, caps);
-
- gst_object_unref (jitterbuffer);
-
- return res;
-}
-
static void
gst_rtp_jitter_buffer_flush_start (GstRtpJitterBuffer * jitterbuffer)
{
JBUF_LOCK (priv);
/* mark ourselves as flushing */
- priv->srcresult = GST_FLOW_WRONG_STATE;
+ priv->srcresult = GST_FLOW_FLUSHING;
GST_DEBUG_OBJECT (jitterbuffer, "Disabling pop on queue");
/* this unblocks any waiting pops on the src pad task */
JBUF_SIGNAL (priv);
}
static gboolean
-gst_rtp_jitter_buffer_src_activate_push (GstPad * pad, gboolean active)
+gst_rtp_jitter_buffer_src_activate_mode (GstPad * pad, GstObject * parent,
+ GstPadMode mode, gboolean active)
{
- gboolean result = TRUE;
+ gboolean result;
GstRtpJitterBuffer *jitterbuffer = NULL;
- jitterbuffer = GST_RTP_JITTER_BUFFER (gst_pad_get_parent (pad));
+ jitterbuffer = GST_RTP_JITTER_BUFFER (parent);
- if (active) {
- /* allow data processing */
- gst_rtp_jitter_buffer_flush_stop (jitterbuffer);
+ switch (mode) {
+ case GST_PAD_MODE_PUSH:
+ if (active) {
+ /* allow data processing */
+ gst_rtp_jitter_buffer_flush_stop (jitterbuffer);
- /* start pushing out buffers */
- GST_DEBUG_OBJECT (jitterbuffer, "Starting task on srcpad");
- gst_pad_start_task (jitterbuffer->priv->srcpad,
- (GstTaskFunction) gst_rtp_jitter_buffer_loop, jitterbuffer);
- } else {
- /* make sure all data processing stops ASAP */
- gst_rtp_jitter_buffer_flush_start (jitterbuffer);
+ /* start pushing out buffers */
+ GST_DEBUG_OBJECT (jitterbuffer, "Starting task on srcpad");
+ result = gst_pad_start_task (jitterbuffer->priv->srcpad,
+ (GstTaskFunction) gst_rtp_jitter_buffer_loop, jitterbuffer);
+ } else {
+ /* make sure all data processing stops ASAP */
+ gst_rtp_jitter_buffer_flush_start (jitterbuffer);
- /* NOTE this will hardlock if the state change is called from the src pad
- * task thread because we will _join() the thread. */
- GST_DEBUG_OBJECT (jitterbuffer, "Stopping task on srcpad");
- result = gst_pad_stop_task (pad);
+ /* NOTE this will hardlock if the state change is called from the src pad
+ * task thread because we will _join() the thread. */
+ GST_DEBUG_OBJECT (jitterbuffer, "Stopping task on srcpad");
+ result = gst_pad_stop_task (pad);
+ }
+ break;
+ default:
+ result = FALSE;
+ break;
}
-
- gst_object_unref (jitterbuffer);
-
return result;
}
}
static gboolean
-gst_rtp_jitter_buffer_src_event (GstPad * pad, GstEvent * event)
+gst_rtp_jitter_buffer_src_event (GstPad * pad, GstObject * parent,
+ GstEvent * event)
{
gboolean ret = TRUE;
GstRtpJitterBuffer *jitterbuffer;
GstRtpJitterBufferPrivate *priv;
- jitterbuffer = GST_RTP_JITTER_BUFFER (gst_pad_get_parent (pad));
- if (G_UNLIKELY (jitterbuffer == NULL)) {
- gst_event_unref (event);
- return FALSE;
- }
+ jitterbuffer = GST_RTP_JITTER_BUFFER (parent);
priv = jitterbuffer->priv;
GST_DEBUG_OBJECT (jitterbuffer, "received %s", GST_EVENT_TYPE_NAME (event));
ret = gst_pad_push_event (priv->sinkpad, event);
break;
}
- gst_object_unref (jitterbuffer);
return ret;
}
static gboolean
-gst_rtp_jitter_buffer_sink_event (GstPad * pad, GstEvent * event)
+gst_rtp_jitter_buffer_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event)
{
gboolean ret = TRUE;
GstRtpJitterBuffer *jitterbuffer;
GstRtpJitterBufferPrivate *priv;
- jitterbuffer = GST_RTP_JITTER_BUFFER (gst_pad_get_parent (pad));
- if (G_UNLIKELY (jitterbuffer == NULL)) {
- gst_event_unref (event);
- return FALSE;
- }
+ jitterbuffer = GST_RTP_JITTER_BUFFER (parent);
priv = jitterbuffer->priv;
GST_DEBUG_OBJECT (jitterbuffer, "received %s", GST_EVENT_TYPE_NAME (event));
switch (GST_EVENT_TYPE (event)) {
- case GST_EVENT_NEWSEGMENT:
+ case GST_EVENT_CAPS:
{
- GstFormat format;
- gdouble rate, arate;
- gint64 start, stop, time;
- gboolean update;
+ GstCaps *caps;
- gst_event_parse_new_segment_full (event, &update, &rate, &arate, &format,
- &start, &stop, &time);
+ gst_event_parse_caps (event, &caps);
+
+ JBUF_LOCK (priv);
+ ret = gst_jitter_buffer_sink_parse_caps (jitterbuffer, caps);
+ JBUF_UNLOCK (priv);
+
+ /* set same caps on srcpad on success */
+ if (ret)
+ ret = gst_pad_push_event (priv->srcpad, event);
+ else
+ gst_event_unref (event);
+ break;
+ }
+ case GST_EVENT_SEGMENT:
+ {
+ gst_event_copy_segment (event, &priv->segment);
/* we need time for now */
- if (format != GST_FORMAT_TIME)
+ if (priv->segment.format != GST_FORMAT_TIME)
goto newseg_wrong_format;
GST_DEBUG_OBJECT (jitterbuffer,
- "newsegment: update %d, rate %g, arate %g, start %" GST_TIME_FORMAT
- ", stop %" GST_TIME_FORMAT ", time %" GST_TIME_FORMAT,
- update, rate, arate, GST_TIME_ARGS (start), GST_TIME_ARGS (stop),
- GST_TIME_ARGS (time));
-
- /* now configure the values, we need these to time the release of the
- * buffers on the srcpad. */
- gst_segment_set_newsegment_full (&priv->segment, update,
- rate, arate, format, start, stop, time);
+ "newsegment: %" GST_SEGMENT_FORMAT, &priv->segment);
/* FIXME, push SEGMENT in the queue. Sorting order might be difficult. */
ret = gst_pad_push_event (priv->srcpad, event);
break;
case GST_EVENT_FLUSH_STOP:
ret = gst_pad_push_event (priv->srcpad, event);
- ret = gst_rtp_jitter_buffer_src_activate_push (priv->srcpad, TRUE);
+ ret =
+ gst_rtp_jitter_buffer_src_activate_mode (priv->srcpad, parent,
+ GST_PAD_MODE_PUSH, TRUE);
break;
case GST_EVENT_EOS:
{
}
done:
- gst_object_unref (jitterbuffer);
return ret;
}
static gboolean
-gst_rtp_jitter_buffer_sink_rtcp_event (GstPad * pad, GstEvent * event)
+gst_rtp_jitter_buffer_sink_rtcp_event (GstPad * pad, GstObject * parent,
+ GstEvent * event)
{
+ gboolean ret = TRUE;
GstRtpJitterBuffer *jitterbuffer;
- jitterbuffer = GST_RTP_JITTER_BUFFER (gst_pad_get_parent (pad));
+ jitterbuffer = GST_RTP_JITTER_BUFFER (parent);
GST_DEBUG_OBJECT (jitterbuffer, "received %s", GST_EVENT_TYPE_NAME (event));
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_FLUSH_START:
+ gst_event_unref (event);
break;
case GST_EVENT_FLUSH_STOP:
+ gst_event_unref (event);
break;
default:
+ ret = gst_pad_event_default (pad, parent, event);
break;
}
- gst_event_unref (event);
- gst_object_unref (jitterbuffer);
- return TRUE;
+ return ret;
}
/*
* Must be called with JBUF_LOCK held, will release the LOCK when emiting the
* signal. The function returns GST_FLOW_ERROR when a parsing error happened and
- * GST_FLOW_WRONG_STATE when the element is shutting down. On success
+ * GST_FLOW_FLUSHING when the element is shutting down. On success
* GST_FLOW_OK is returned.
*/
static GstFlowReturn
out_flushing:
{
GST_DEBUG_OBJECT (jitterbuffer, "we are flushing");
- return GST_FLOW_WRONG_STATE;
+ return GST_FLOW_FLUSHING;
}
parse_failed:
{
}
static GstFlowReturn
-gst_rtp_jitter_buffer_chain (GstPad * pad, GstBuffer * buffer)
+gst_rtp_jitter_buffer_chain (GstPad * pad, GstObject * parent,
+ GstBuffer * buffer)
{
GstRtpJitterBuffer *jitterbuffer;
GstRtpJitterBufferPrivate *priv;
gboolean tail;
gint percent = -1;
guint8 pt;
+ GstRTPBuffer rtp = GST_RTP_BUFFER_INIT;
- jitterbuffer = GST_RTP_JITTER_BUFFER (gst_pad_get_parent (pad));
+ jitterbuffer = GST_RTP_JITTER_BUFFER (parent);
if (G_UNLIKELY (!gst_rtp_buffer_validate (buffer)))
goto invalid_buffer;
priv = jitterbuffer->priv;
- pt = gst_rtp_buffer_get_payload_type (buffer);
+ gst_rtp_buffer_map (buffer, GST_MAP_READ, &rtp);
+ pt = gst_rtp_buffer_get_payload_type (&rtp);
+ seqnum = gst_rtp_buffer_get_seq (&rtp);
+ gst_rtp_buffer_unmap (&rtp);
/* take the timestamp of the buffer. This is the time when the packet was
* received and is used to calculate jitter and clock skew. We will adjust
timestamp = gst_segment_to_running_time (&priv->segment, GST_FORMAT_TIME,
timestamp);
- seqnum = gst_rtp_buffer_get_seq (buffer);
-
GST_DEBUG_OBJECT (jitterbuffer,
"Received packet #%d at time %" GST_TIME_FORMAT, seqnum,
GST_TIME_ARGS (timestamp));
priv->last_pt = pt;
/* reset clock-rate so that we get a new one */
priv->clock_rate = -1;
+
/* Try to get the clock-rate from the caps first if we can. If there are no
* caps we must fire the signal to get the clock-rate. */
- if ((caps = GST_BUFFER_CAPS (buffer))) {
+ if ((caps = gst_pad_get_current_caps (pad))) {
gst_jitter_buffer_sink_parse_caps (jitterbuffer, caps);
+ gst_caps_unref (caps);
}
}
if (G_UNLIKELY (priv->clock_rate == -1)) {
/* no clock rate given on the caps, try to get one with the signal */
if (gst_rtp_jitter_buffer_get_clock_rate (jitterbuffer,
- pt) == GST_FLOW_WRONG_STATE)
+ pt) == GST_FLOW_FLUSHING)
goto out_flushing;
if (G_UNLIKELY (priv->clock_rate == -1))
old_buf = rtp_jitter_buffer_pop (priv->jbuf, &percent);
- GST_DEBUG_OBJECT (jitterbuffer, "Queue full, dropping old packet #%d",
- gst_rtp_buffer_get_seq (old_buf));
+ GST_DEBUG_OBJECT (jitterbuffer, "Queue full, dropping old packet %p",
+ old_buf);
gst_buffer_unref (old_buf);
}
/* we need to make the metadata writable before pushing it in the jitterbuffer
* because the jitterbuffer will update the timestamp */
- buffer = gst_buffer_make_metadata_writable (buffer);
+ buffer = gst_buffer_make_writable (buffer);
/* now insert the packet into the queue in sorted order. This function returns
* FALSE if a packet with the same seqnum was already in the queue, meaning we
if (percent != -1)
post_buffering_percent (jitterbuffer, percent);
- gst_object_unref (jitterbuffer);
-
return ret;
/* ERRORS */
GST_ELEMENT_WARNING (jitterbuffer, STREAM, DECODE, (NULL),
("Received invalid RTP payload, dropping"));
gst_buffer_unref (buffer);
- gst_object_unref (jitterbuffer);
return GST_FLOW_OK;
}
no_clock_rate:
}
have_eos:
{
- ret = GST_FLOW_UNEXPECTED;
+ ret = GST_FLOW_EOS;
GST_WARNING_OBJECT (jitterbuffer, "we are EOS, refusing buffer");
gst_buffer_unref (buffer);
goto finished;
guint64 ext_time, elapsed;
guint32 rtp_time;
GstRtpJitterBufferPrivate *priv;
+ GstRTPBuffer rtp = { NULL, };
priv = jitterbuffer->priv;
- rtp_time = gst_rtp_buffer_get_timestamp (outbuf);
+ gst_rtp_buffer_map (outbuf, GST_MAP_READ, &rtp);
+ rtp_time = gst_rtp_buffer_get_timestamp (&rtp);
+ gst_rtp_buffer_unmap (&rtp);
GST_LOG_OBJECT (jitterbuffer, "rtp %" G_GUINT32_FORMAT ", ext %"
G_GUINT64_FORMAT, rtp_time, priv->ext_timestamp);
GstClockID id;
GstClockTime sync_time;
gint percent = -1;
+ GstRTPBuffer rtp = { NULL, };
priv = jitterbuffer->priv;
outbuf = rtp_jitter_buffer_peek (priv->jbuf);
/* get the seqnum and the next expected seqnum */
- seqnum = gst_rtp_buffer_get_seq (outbuf);
+ gst_rtp_buffer_map (outbuf, GST_MAP_READ, &rtp);
+ seqnum = gst_rtp_buffer_get_seq (&rtp);
+ gst_rtp_buffer_unmap (&rtp);
next_seqnum = priv->next_seqnum;
/* get the timestamp, this is already corrected for clock skew by the
{
/* store result, we are flushing now */
GST_DEBUG_OBJECT (jitterbuffer, "We are EOS, pushing EOS downstream");
- priv->srcresult = GST_FLOW_UNEXPECTED;
+ priv->srcresult = GST_FLOW_EOS;
gst_pad_pause_task (priv->srcpad);
JBUF_UNLOCK (priv);
gst_pad_push_event (priv->srcpad, gst_event_new_eos ());
}
static GstFlowReturn
-gst_rtp_jitter_buffer_chain_rtcp (GstPad * pad, GstBuffer * buffer)
+gst_rtp_jitter_buffer_chain_rtcp (GstPad * pad, GstObject * parent,
+ GstBuffer * buffer)
{
GstRtpJitterBuffer *jitterbuffer;
GstRtpJitterBufferPrivate *priv;
guint64 ext_rtptime, diff;
guint32 rtptime;
gboolean drop = FALSE;
+ GstRTCPBuffer rtcp = { NULL, };
guint64 clock_base;
- jitterbuffer = GST_RTP_JITTER_BUFFER (gst_pad_get_parent (pad));
+ jitterbuffer = GST_RTP_JITTER_BUFFER (parent);
if (G_UNLIKELY (!gst_rtcp_buffer_validate (buffer)))
goto invalid_buffer;
priv = jitterbuffer->priv;
- if (!gst_rtcp_buffer_get_first_packet (buffer, &packet))
- goto invalid_buffer;
+ gst_rtcp_buffer_map (buffer, GST_MAP_READ, &rtcp);
+
+ if (!gst_rtcp_buffer_get_first_packet (&rtcp, &packet))
+ goto empty_buffer;
/* first packet must be SR or RR or else the validate would have failed */
switch (gst_rtcp_packet_get_type (&packet)) {
default:
goto ignore_buffer;
}
+ gst_rtcp_buffer_unmap (&rtcp);
GST_DEBUG_OBJECT (jitterbuffer, "received RTCP of SSRC %08x", ssrc);
done:
gst_buffer_unref (buffer);
- gst_object_unref (jitterbuffer);
return ret;
ret = GST_FLOW_OK;
goto done;
}
+empty_buffer:
+ {
+ /* this is not fatal but should be filtered earlier */
+ GST_ELEMENT_WARNING (jitterbuffer, STREAM, DECODE, (NULL),
+ ("Received empty RTCP payload, dropping"));
+ gst_rtcp_buffer_unmap (&rtcp);
+ ret = GST_FLOW_OK;
+ goto done;
+ }
ignore_buffer:
{
GST_DEBUG_OBJECT (jitterbuffer, "ignoring RTCP packet");
+ gst_rtcp_buffer_unmap (&rtcp);
ret = GST_FLOW_OK;
goto done;
}
}
static gboolean
-gst_rtp_jitter_buffer_query (GstPad * pad, GstQuery * query)
+gst_rtp_jitter_buffer_sink_query (GstPad * pad, GstObject * parent,
+ GstQuery * query)
+{
+ gboolean res = FALSE;
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_CAPS:
+ {
+ GstCaps *filter, *caps;
+
+ gst_query_parse_caps (query, &filter);
+ caps = gst_rtp_jitter_buffer_getcaps (pad, filter);
+ gst_query_set_caps_result (query, caps);
+ gst_caps_unref (caps);
+ res = TRUE;
+ break;
+ }
+ default:
+ res = gst_pad_query_default (pad, parent, query);
+ break;
+ }
+
+ return res;
+}
+
+static gboolean
+gst_rtp_jitter_buffer_src_query (GstPad * pad, GstObject * parent,
+ GstQuery * query)
{
GstRtpJitterBuffer *jitterbuffer;
GstRtpJitterBufferPrivate *priv;
gboolean res = FALSE;
- jitterbuffer = GST_RTP_JITTER_BUFFER (gst_pad_get_parent (pad));
- if (G_UNLIKELY (jitterbuffer == NULL))
- return FALSE;
+ jitterbuffer = GST_RTP_JITTER_BUFFER (parent);
priv = jitterbuffer->priv;
switch (GST_QUERY_TYPE (query)) {
gst_query_parse_position (query, &fmt, NULL);
if (fmt != GST_FORMAT_TIME) {
- res = gst_pad_query_default (pad, query);
+ res = gst_pad_query_default (pad, parent, query);
break;
}
gst_query_set_position (query, GST_FORMAT_TIME, start + last_out);
res = TRUE;
} else {
- res = gst_pad_query_default (pad, query);
+ res = gst_pad_query_default (pad, parent, query);
}
break;
}
+ case GST_QUERY_CAPS:
+ {
+ GstCaps *filter, *caps;
+
+ gst_query_parse_caps (query, &filter);
+ caps = gst_rtp_jitter_buffer_getcaps (pad, filter);
+ gst_query_set_caps_result (query, caps);
+ gst_caps_unref (caps);
+ res = TRUE;
+ break;
+ }
default:
- res = gst_pad_query_default (pad, query);
+ res = gst_pad_query_default (pad, parent, query);
break;
}
- gst_object_unref (jitterbuffer);
-
return res;
}
static gboolean
plugin_init (GstPlugin * plugin)
{
- if (!gst_element_register (plugin, "gstrtpbin", GST_RANK_NONE,
- GST_TYPE_RTP_BIN))
+ if (!gst_element_register (plugin, "rtpbin", GST_RANK_NONE, GST_TYPE_RTP_BIN))
return FALSE;
- if (!gst_element_register (plugin, "gstrtpjitterbuffer", GST_RANK_NONE,
+ if (!gst_element_register (plugin, "rtpjitterbuffer", GST_RANK_NONE,
GST_TYPE_RTP_JITTER_BUFFER))
return FALSE;
- if (!gst_element_register (plugin, "gstrtpptdemux", GST_RANK_NONE,
+ if (!gst_element_register (plugin, "rtpptdemux", GST_RANK_NONE,
GST_TYPE_RTP_PT_DEMUX))
return FALSE;
- if (!gst_element_register (plugin, "gstrtpsession", GST_RANK_NONE,
+ if (!gst_element_register (plugin, "rtpsession", GST_RANK_NONE,
GST_TYPE_RTP_SESSION))
return FALSE;
- if (!gst_element_register (plugin, "gstrtpssrcdemux", GST_RANK_NONE,
+ if (!gst_element_register (plugin, "rtpssrcdemux", GST_RANK_NONE,
GST_TYPE_RTP_SSRC_DEMUX))
return FALSE;
GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
GST_VERSION_MINOR,
- "gstrtpmanager",
+ "rtpmanager",
"RTP session management plugin library",
plugin_init, VERSION, "LGPL", GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)
);
static GstStaticPadTemplate rtp_pt_demux_src_template =
-GST_STATIC_PAD_TEMPLATE ("src_%d",
+GST_STATIC_PAD_TEMPLATE ("src_%u",
GST_PAD_SRC,
GST_PAD_SOMETIMES,
GST_STATIC_CAPS ("application/x-rtp, " "payload = (int) [ 0, 255 ]")
LAST_SIGNAL
};
-GST_BOILERPLATE (GstRtpPtDemux, gst_rtp_pt_demux, GstElement, GST_TYPE_ELEMENT);
+#define gst_rtp_pt_demux_parent_class parent_class
+G_DEFINE_TYPE (GstRtpPtDemux, gst_rtp_pt_demux, GST_TYPE_ELEMENT);
static void gst_rtp_pt_demux_finalize (GObject * object);
static void gst_rtp_pt_demux_release (GstRtpPtDemux * ptdemux);
static gboolean gst_rtp_pt_demux_setup (GstRtpPtDemux * ptdemux);
-static gboolean gst_rtp_pt_demux_sink_event (GstPad * pad, GstEvent * event);
-static GstFlowReturn gst_rtp_pt_demux_chain (GstPad * pad, GstBuffer * buf);
+static gboolean gst_rtp_pt_demux_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+static GstFlowReturn gst_rtp_pt_demux_chain (GstPad * pad, GstObject * parent,
+ GstBuffer * buf);
static GstStateChangeReturn gst_rtp_pt_demux_change_state (GstElement * element,
GstStateChange transition);
static void gst_rtp_pt_demux_clear_pt_map (GstRtpPtDemux * rtpdemux);
-static GstRtpPtDemuxPad *find_pad_for_pt (GstRtpPtDemux * rtpdemux, guint8 pt);
+static GstPad *find_pad_for_pt (GstRtpPtDemux * rtpdemux, guint8 pt);
-static gboolean gst_rtp_pt_demux_src_event (GstPad * pad, GstEvent * event);
+static gboolean gst_rtp_pt_demux_src_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
static guint gst_rtp_pt_demux_signals[LAST_SIGNAL] = { 0 };
static void
-gst_rtp_pt_demux_base_init (gpointer g_class)
-{
- GstElementClass *gstelement_klass = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_static_pad_template (gstelement_klass,
- &rtp_pt_demux_sink_template);
- gst_element_class_add_static_pad_template (gstelement_klass,
- &rtp_pt_demux_src_template);
-
- gst_element_class_set_details_simple (gstelement_klass, "RTP Demux",
- "Demux/Network/RTP",
- "Parses codec streams transmitted in the same RTP session",
- "Kai Vehmanen <kai.vehmanen@nokia.com>");
-}
-
-static void
gst_rtp_pt_demux_class_init (GstRtpPtDemuxClass * klass)
{
GObjectClass *gobject_klass;
klass->clear_pt_map = GST_DEBUG_FUNCPTR (gst_rtp_pt_demux_clear_pt_map);
+ gst_element_class_add_pad_template (gstelement_klass,
+ gst_static_pad_template_get (&rtp_pt_demux_sink_template));
+ gst_element_class_add_pad_template (gstelement_klass,
+ gst_static_pad_template_get (&rtp_pt_demux_src_template));
+
+ gst_element_class_set_details_simple (gstelement_klass, "RTP Demux",
+ "Demux/Network/RTP",
+ "Parses codec streams transmitted in the same RTP session",
+ "Kai Vehmanen <kai.vehmanen@nokia.com>");
+
GST_DEBUG_CATEGORY_INIT (gst_rtp_pt_demux_debug,
"rtpptdemux", 0, "RTP codec demuxer");
}
static void
-gst_rtp_pt_demux_init (GstRtpPtDemux * ptdemux, GstRtpPtDemuxClass * g_class)
+gst_rtp_pt_demux_init (GstRtpPtDemux * ptdemux)
{
GstElementClass *klass = GST_ELEMENT_GET_CLASS (ptdemux);
caps = g_value_dup_boxed (&ret);
g_value_unset (&ret);
if (caps == NULL) {
- caps = GST_PAD_CAPS (rtpdemux->sink);
- if (caps)
- gst_caps_ref (caps);
+ caps = gst_pad_get_current_caps (rtpdemux->sink);
}
GST_DEBUG ("pt %d, got caps %" GST_PTR_FORMAT, pt, caps);
GST_OBJECT_UNLOCK (rtpdemux);
}
+static gboolean
+need_caps_for_pt (GstRtpPtDemux * rtpdemux, guint8 pt)
+{
+ GSList *walk;
+ gboolean ret = FALSE;
+
+ GST_OBJECT_LOCK (rtpdemux);
+ for (walk = rtpdemux->srcpads; walk; walk = g_slist_next (walk)) {
+ GstRtpPtDemuxPad *pad = walk->data;
+
+ if (pad->pt == pt) {
+ ret = pad->newcaps;
+ }
+ }
+ GST_OBJECT_UNLOCK (rtpdemux);
+
+ return ret;
+}
+
+
+static void
+clear_newcaps_for_pt (GstRtpPtDemux * rtpdemux, guint8 pt)
+{
+ GSList *walk;
+
+ GST_OBJECT_LOCK (rtpdemux);
+ for (walk = rtpdemux->srcpads; walk; walk = g_slist_next (walk)) {
+ GstRtpPtDemuxPad *pad = walk->data;
+
+ if (pad->pt == pt) {
+ pad->newcaps = FALSE;
+ break;
+ }
+ }
+ GST_OBJECT_UNLOCK (rtpdemux);
+}
+
+static gboolean
+forward_sticky_events (GstPad * pad, GstEvent ** event, gpointer user_data)
+{
+ GstPad *srcpad = GST_PAD_CAST (user_data);
+
+ gst_pad_push_event (srcpad, gst_event_ref (*event));
+
+ return TRUE;
+}
+
static GstFlowReturn
-gst_rtp_pt_demux_chain (GstPad * pad, GstBuffer * buf)
+gst_rtp_pt_demux_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
{
GstFlowReturn ret = GST_FLOW_OK;
GstRtpPtDemux *rtpdemux;
- GstElement *element = GST_ELEMENT (GST_OBJECT_PARENT (pad));
guint8 pt;
GstPad *srcpad;
- GstRtpPtDemuxPad *rtpdemuxpad;
GstCaps *caps;
+ GstRTPBuffer rtp = { NULL };
- rtpdemux = GST_RTP_PT_DEMUX (GST_OBJECT_PARENT (pad));
+ rtpdemux = GST_RTP_PT_DEMUX (parent);
if (!gst_rtp_buffer_validate (buf))
goto invalid_buffer;
- pt = gst_rtp_buffer_get_payload_type (buf);
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
+ pt = gst_rtp_buffer_get_payload_type (&rtp);
+ gst_rtp_buffer_unmap (&rtp);
GST_DEBUG_OBJECT (rtpdemux, "received buffer for pt %d", pt);
- rtpdemuxpad = find_pad_for_pt (rtpdemux, pt);
- if (rtpdemuxpad == NULL) {
+ srcpad = find_pad_for_pt (rtpdemux, pt);
+ if (srcpad == NULL) {
/* new PT, create a src pad */
+ GstRtpPtDemuxPad *rtpdemuxpad;
GstElementClass *klass;
GstPadTemplate *templ;
gchar *padname;
goto no_caps;
klass = GST_ELEMENT_GET_CLASS (rtpdemux);
- templ = gst_element_class_get_pad_template (klass, "src_%d");
- padname = g_strdup_printf ("src_%d", pt);
+ templ = gst_element_class_get_pad_template (klass, "src_%u");
+ padname = g_strdup_printf ("src_%u", pt);
srcpad = gst_pad_new_from_template (templ, padname);
gst_pad_use_fixed_caps (srcpad);
g_free (padname);
gst_caps_unref (caps);
GST_DEBUG ("Adding pt=%d to the list.", pt);
- rtpdemuxpad = g_new0 (GstRtpPtDemuxPad, 1);
+ rtpdemuxpad = g_slice_new0 (GstRtpPtDemuxPad);
rtpdemuxpad->pt = pt;
rtpdemuxpad->newcaps = FALSE;
rtpdemuxpad->pad = srcpad;
+ gst_object_ref (srcpad);
GST_OBJECT_LOCK (rtpdemux);
rtpdemux->srcpads = g_slist_append (rtpdemux->srcpads, rtpdemuxpad);
GST_OBJECT_UNLOCK (rtpdemux);
gst_pad_set_active (srcpad, TRUE);
- gst_element_add_pad (element, srcpad);
+ gst_pad_sticky_events_foreach (rtpdemux->sink, forward_sticky_events,
+ srcpad);
+ gst_element_add_pad (GST_ELEMENT_CAST (rtpdemux), srcpad);
GST_DEBUG ("emitting new-payload-type for pt %d", pt);
g_signal_emit (G_OBJECT (rtpdemux),
gst_rtp_pt_demux_signals[SIGNAL_NEW_PAYLOAD_TYPE], 0, pt, srcpad);
}
- srcpad = rtpdemuxpad->pad;
-
if (pt != rtpdemux->last_pt) {
gint emit_pt = pt;
gst_rtp_pt_demux_signals[SIGNAL_PAYLOAD_TYPE_CHANGE], 0, emit_pt);
}
- if (rtpdemuxpad->newcaps) {
- GST_DEBUG ("need new caps");
+ while (need_caps_for_pt (rtpdemux, pt)) {
+ GST_DEBUG ("need new caps for %d", pt);
caps = gst_rtp_pt_demux_get_caps (rtpdemux, pt);
if (!caps)
goto no_caps;
+ clear_newcaps_for_pt (rtpdemux, pt);
+
caps = gst_caps_make_writable (caps);
gst_caps_set_simple (caps, "payload", G_TYPE_INT, pt, NULL);
gst_pad_set_caps (srcpad, caps);
gst_caps_unref (caps);
- rtpdemuxpad->newcaps = FALSE;
}
- gst_buffer_set_caps (buf, GST_PAD_CAPS (srcpad));
-
/* push to srcpad */
ret = gst_pad_push (srcpad, buf);
+ gst_object_unref (srcpad);
+
return ret;
/* ERRORS */
GST_ELEMENT_ERROR (rtpdemux, STREAM, DECODE, (NULL),
("Could not get caps for payload"));
gst_buffer_unref (buf);
+ if (srcpad)
+ gst_object_unref (srcpad);
return GST_FLOW_ERROR;
}
}
-static GstRtpPtDemuxPad *
+static GstPad *
find_pad_for_pt (GstRtpPtDemux * rtpdemux, guint8 pt)
{
- GstRtpPtDemuxPad *respad = NULL;
+ GstPad *respad = NULL;
GSList *walk;
+ GST_OBJECT_LOCK (rtpdemux);
for (walk = rtpdemux->srcpads; walk; walk = g_slist_next (walk)) {
GstRtpPtDemuxPad *pad = walk->data;
if (pad->pt == pt) {
- respad = pad;
+ respad = gst_object_ref (pad->pad);
break;
}
}
+ GST_OBJECT_UNLOCK (rtpdemux);
+
return respad;
}
static gboolean
-gst_rtp_pt_demux_sink_event (GstPad * pad, GstEvent * event)
+gst_rtp_pt_demux_sink_event (GstPad * pad, GstObject * parent, GstEvent * event)
{
GstRtpPtDemux *rtpdemux;
gboolean res = FALSE;
- rtpdemux = GST_RTP_PT_DEMUX (gst_pad_get_parent (pad));
- if (G_UNLIKELY (rtpdemux == NULL)) {
- gst_event_unref (event);
- return FALSE;
- }
+ rtpdemux = GST_RTP_PT_DEMUX (parent);
switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_CAPS:
+ {
+ gst_rtp_pt_demux_clear_pt_map (rtpdemux);
+ gst_event_unref (event);
+ res = TRUE;
+ break;
+ }
case GST_EVENT_CUSTOM_DOWNSTREAM:
{
const GstStructure *s;
s = gst_event_get_structure (event);
if (gst_structure_has_name (s, "GstRTPPacketLost")) {
- GstRtpPtDemuxPad *rtpdemuxpad =
- find_pad_for_pt (rtpdemux, rtpdemux->last_pt);
+ GstPad *srcpad = find_pad_for_pt (rtpdemux, rtpdemux->last_pt);
- if (rtpdemuxpad)
- res = gst_pad_push_event (rtpdemuxpad->pad, event);
- else
+ if (srcpad) {
+ res = gst_pad_push_event (srcpad, event);
+ gst_object_unref (srcpad);
+ } else {
gst_event_unref (event);
+ }
} else {
- res = gst_pad_event_default (pad, event);
+ res = gst_pad_event_default (pad, parent, event);
}
break;
}
default:
- res = gst_pad_event_default (pad, event);
+ res = gst_pad_event_default (pad, parent, event);
break;
}
- gst_object_unref (rtpdemux);
return res;
}
static gboolean
-gst_rtp_pt_demux_src_event (GstPad * pad, GstEvent * event)
+gst_rtp_pt_demux_src_event (GstPad * pad, GstObject * parent, GstEvent * event)
{
GstRtpPtDemux *demux;
const GstStructure *s;
- demux = GST_RTP_PT_DEMUX (gst_pad_get_parent (pad));
+ demux = GST_RTP_PT_DEMUX (parent);
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_CUSTOM_UPSTREAM:
if (s && !gst_structure_has_field (s, "payload")) {
GSList *walk;
+ GST_OBJECT_LOCK (demux);
for (walk = demux->srcpads; walk; walk = g_slist_next (walk)) {
GstRtpPtDemuxPad *dpad = (GstRtpPtDemuxPad *) walk->data;
if (dpad->pad == pad) {
+ GstStructure *ws;
+
event =
GST_EVENT_CAST (gst_mini_object_make_writable
(GST_MINI_OBJECT_CAST (event)));
- gst_structure_set (event->structure,
- "payload", G_TYPE_UINT, dpad->pt, NULL);
+ ws = gst_event_writable_structure (event);
+ gst_structure_set (ws, "payload", G_TYPE_UINT, dpad->pt, NULL);
break;
}
}
+ GST_OBJECT_UNLOCK (demux);
}
break;
default:
break;
}
- gst_object_unref (demux);
-
- return gst_pad_event_default (pad, event);
+ return gst_pad_event_default (pad, parent, event);
}
-
-
/*
* Reserves resources for the object.
*/
static void
gst_rtp_pt_demux_release (GstRtpPtDemux * ptdemux)
{
+ GSList *tmppads;
GSList *walk;
- for (walk = ptdemux->srcpads; walk; walk = g_slist_next (walk)) {
+ GST_OBJECT_LOCK (ptdemux);
+ tmppads = ptdemux->srcpads;
+ ptdemux->srcpads = NULL;
+ GST_OBJECT_UNLOCK (ptdemux);
+
+ for (walk = tmppads; walk; walk = g_slist_next (walk)) {
GstRtpPtDemuxPad *pad = walk->data;
gst_pad_set_active (pad->pad, FALSE);
gst_element_remove_pad (GST_ELEMENT_CAST (ptdemux), pad->pad);
- g_free (pad);
+ g_slice_free (GstRtpPtDemuxPad, pad);
}
- g_slist_free (ptdemux->srcpads);
- ptdemux->srcpads = NULL;
+ g_slist_free (tmppads);
}
static GstStateChangeReturn
#define GST_RTP_SESSION_GET_PRIVATE(obj) \
(G_TYPE_INSTANCE_GET_PRIVATE ((obj), GST_TYPE_RTP_SESSION, GstRtpSessionPrivate))
-#define GST_RTP_SESSION_LOCK(sess) g_mutex_lock ((sess)->priv->lock)
-#define GST_RTP_SESSION_UNLOCK(sess) g_mutex_unlock ((sess)->priv->lock)
+#define GST_RTP_SESSION_LOCK(sess) g_mutex_lock (&(sess)->priv->lock)
+#define GST_RTP_SESSION_UNLOCK(sess) g_mutex_unlock (&(sess)->priv->lock)
struct _GstRtpSessionPrivate
{
- GMutex *lock;
+ GMutex lock;
GstClock *sysclock;
RTPSession *session;
static GstStateChangeReturn gst_rtp_session_change_state (GstElement * element,
GstStateChange transition);
static GstPad *gst_rtp_session_request_new_pad (GstElement * element,
- GstPadTemplate * templ, const gchar * name);
+ GstPadTemplate * templ, const gchar * name, const GstCaps * caps);
static void gst_rtp_session_release_pad (GstElement * element, GstPad * pad);
+static gboolean gst_rtp_session_sink_setcaps (GstPad * pad,
+ GstRtpSession * rtpsession, GstCaps * caps);
+static gboolean gst_rtp_session_setcaps_send_rtp (GstPad * pad,
+ GstRtpSession * rtpsession, GstCaps * caps);
+
static void gst_rtp_session_clear_pt_map (GstRtpSession * rtpsession);
static guint gst_rtp_session_signals[LAST_SIGNAL] = { 0 };
src->ssrc);
}
-GST_BOILERPLATE (GstRtpSession, gst_rtp_session, GstElement, GST_TYPE_ELEMENT);
-
-static void
-gst_rtp_session_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- /* sink pads */
- gst_element_class_add_static_pad_template (element_class,
- &rtpsession_recv_rtp_sink_template);
- gst_element_class_add_static_pad_template (element_class,
- &rtpsession_recv_rtcp_sink_template);
- gst_element_class_add_static_pad_template (element_class,
- &rtpsession_send_rtp_sink_template);
-
- /* src pads */
- gst_element_class_add_static_pad_template (element_class,
- &rtpsession_recv_rtp_src_template);
- gst_element_class_add_static_pad_template (element_class,
- &rtpsession_sync_src_template);
- gst_element_class_add_static_pad_template (element_class,
- &rtpsession_send_rtp_src_template);
- gst_element_class_add_static_pad_template (element_class,
- &rtpsession_send_rtcp_src_template);
-
- gst_element_class_set_details_simple (element_class, "RTP Session",
- "Filter/Network/RTP",
- "Implement an RTP session", "Wim Taymans <wim.taymans@gmail.com>");
-}
+#define gst_rtp_session_parent_class parent_class
+G_DEFINE_TYPE (GstRtpSession, gst_rtp_session, GST_TYPE_ELEMENT);
static void
gst_rtp_session_class_init (GstRtpSessionClass * klass)
klass->clear_pt_map = GST_DEBUG_FUNCPTR (gst_rtp_session_clear_pt_map);
+ /* sink pads */
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&rtpsession_recv_rtp_sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&rtpsession_recv_rtcp_sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&rtpsession_send_rtp_sink_template));
+
+ /* src pads */
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&rtpsession_recv_rtp_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&rtpsession_sync_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&rtpsession_send_rtp_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&rtpsession_send_rtcp_src_template));
+
+ gst_element_class_set_details_simple (gstelement_class, "RTP Session",
+ "Filter/Network/RTP",
+ "Implement an RTP session", "Wim Taymans <wim.taymans@gmail.com>");
+
GST_DEBUG_CATEGORY_INIT (gst_rtp_session_debug,
"rtpsession", 0, "RTP Session");
}
static void
-gst_rtp_session_init (GstRtpSession * rtpsession, GstRtpSessionClass * klass)
+gst_rtp_session_init (GstRtpSession * rtpsession)
{
rtpsession->priv = GST_RTP_SESSION_GET_PRIVATE (rtpsession);
- rtpsession->priv->lock = g_mutex_new ();
+ g_mutex_init (&rtpsession->priv->lock);
rtpsession->priv->sysclock = gst_system_clock_obtain ();
rtpsession->priv->session = rtp_session_new ();
rtpsession->priv->use_pipeline_clock = DEFAULT_USE_PIPELINE_CLOCK;
rtpsession = GST_RTP_SESSION (object);
g_hash_table_destroy (rtpsession->priv->ptmap);
- g_mutex_free (rtpsession->priv->lock);
+ g_mutex_clear (&rtpsession->priv->lock);
g_object_unref (rtpsession->priv->sysclock);
g_object_unref (rtpsession->priv->session);
g_thread_join (rtpsession->priv->thread);
/* only create a new thread if the old one was stopped. Otherwise we can
* just reuse the currently running one. */
-#if !GLIB_CHECK_VERSION (2, 31, 0)
- rtpsession->priv->thread =
- g_thread_create ((GThreadFunc) rtcp_thread, rtpsession, TRUE, &error);
-#else
rtpsession->priv->thread = g_thread_try_new ("rtpsession-rtcp-thread",
(GThreadFunc) rtcp_thread, rtpsession, &error);
-#endif
rtpsession->priv->thread_stopped = FALSE;
}
GST_RTP_SESSION_UNLOCK (rtpsession);
break;
}
- res = parent_class->change_state (element, transition);
+ res = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
switch (transition) {
case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
GstCaps *caps;
/* set rtcp caps on output pad */
- if (!(caps = GST_PAD_CAPS (rtcp_src))) {
- caps = gst_caps_new_simple ("application/x-rtcp", NULL);
+ if (!(caps = gst_pad_get_current_caps (rtcp_src))) {
+ caps = gst_caps_new_empty_simple ("application/x-rtcp");
gst_pad_set_caps (rtcp_src, caps);
- } else
- gst_caps_ref (caps);
- gst_buffer_set_caps (buffer, caps);
+ }
gst_caps_unref (caps);
gst_object_ref (rtcp_src);
GstCaps *caps;
/* set rtcp caps on output pad */
- if (!(caps = GST_PAD_CAPS (sync_src))) {
- caps = gst_caps_new_simple ("application/x-rtcp", NULL);
+ if (!(caps = gst_pad_get_current_caps (sync_src))) {
+ caps = gst_caps_new_empty_simple ("application/x-rtcp");
gst_pad_set_caps (sync_src, caps);
- } else
- gst_caps_ref (caps);
- gst_buffer_set_caps (buffer, caps);
+ }
gst_caps_unref (caps);
gst_object_ref (sync_src);
}
static gboolean
-gst_rtp_session_event_recv_rtp_sink (GstPad * pad, GstEvent * event)
+gst_rtp_session_event_recv_rtp_sink (GstPad * pad, GstObject * parent,
+ GstEvent * event)
{
GstRtpSession *rtpsession;
gboolean ret = FALSE;
- rtpsession = GST_RTP_SESSION (gst_pad_get_parent (pad));
- if (G_UNLIKELY (rtpsession == NULL)) {
- gst_event_unref (event);
- return FALSE;
- }
+ rtpsession = GST_RTP_SESSION (parent);
GST_DEBUG_OBJECT (rtpsession, "received event %s",
GST_EVENT_TYPE_NAME (event));
switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_CAPS:
+ {
+ GstCaps *caps;
+
+ /* process */
+ gst_event_parse_caps (event, &caps);
+ gst_rtp_session_sink_setcaps (pad, rtpsession, caps);
+ ret = gst_pad_push_event (rtpsession->recv_rtp_src, event);
+ break;
+ }
case GST_EVENT_FLUSH_STOP:
gst_segment_init (&rtpsession->recv_rtp_seg, GST_FORMAT_UNDEFINED);
ret = gst_pad_push_event (rtpsession->recv_rtp_src, event);
break;
- case GST_EVENT_NEWSEGMENT:
+ case GST_EVENT_SEGMENT:
{
- gboolean update;
- gdouble rate, arate;
- GstFormat format;
- gint64 start, stop, time;
- GstSegment *segment;
+ GstSegment *segment, in_segment;
segment = &rtpsession->recv_rtp_seg;
/* the newsegment event is needed to convert the RTP timestamp to
* running_time, which is needed to generate a mapping from RTP to NTP
* timestamps in SR reports */
- gst_event_parse_new_segment_full (event, &update, &rate, &arate, &format,
- &start, &stop, &time);
-
- GST_DEBUG_OBJECT (rtpsession,
- "configured NEWSEGMENT update %d, rate %lf, applied rate %lf, "
- "format GST_FORMAT_TIME, "
- "%" GST_TIME_FORMAT " -- %" GST_TIME_FORMAT
- ", time %" GST_TIME_FORMAT ", accum %" GST_TIME_FORMAT,
- update, rate, arate, GST_TIME_ARGS (segment->start),
- GST_TIME_ARGS (segment->stop), GST_TIME_ARGS (segment->time),
- GST_TIME_ARGS (segment->accum));
+ gst_event_copy_segment (event, &in_segment);
+ GST_DEBUG_OBJECT (rtpsession, "received segment %" GST_SEGMENT_FORMAT,
+ &in_segment);
- gst_segment_set_newsegment_full (segment, update, rate,
- arate, format, start, stop, time);
+ /* accept upstream */
+ gst_segment_copy_into (&in_segment, segment);
/* push event forward */
ret = gst_pad_push_event (rtpsession->recv_rtp_src, event);
ret = gst_pad_push_event (rtpsession->recv_rtp_src, event);
break;
}
- gst_object_unref (rtpsession);
return ret;
}
static gboolean
-gst_rtp_session_event_recv_rtp_src (GstPad * pad, GstEvent * event)
+gst_rtp_session_event_recv_rtp_src (GstPad * pad, GstObject * parent,
+ GstEvent * event)
{
GstRtpSession *rtpsession;
gboolean forward = TRUE;
guint32 ssrc;
guint pt;
- rtpsession = GST_RTP_SESSION (gst_pad_get_parent (pad));
- if (G_UNLIKELY (rtpsession == NULL)) {
- gst_event_unref (event);
- return FALSE;
- }
+ rtpsession = GST_RTP_SESSION (parent);
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_CUSTOM_UPSTREAM:
if (forward)
ret = gst_pad_push_event (rtpsession->recv_rtp_sink, event);
- gst_object_unref (rtpsession);
-
return ret;
}
static GstIterator *
-gst_rtp_session_iterate_internal_links (GstPad * pad)
+gst_rtp_session_iterate_internal_links (GstPad * pad, GstObject * parent)
{
GstRtpSession *rtpsession;
GstPad *otherpad = NULL;
GstIterator *it = NULL;
- rtpsession = GST_RTP_SESSION (gst_pad_get_parent (pad));
- if (G_UNLIKELY (rtpsession == NULL))
- return NULL;
+ rtpsession = GST_RTP_SESSION (parent);
GST_RTP_SESSION_LOCK (rtpsession);
if (pad == rtpsession->recv_rtp_src) {
GST_RTP_SESSION_UNLOCK (rtpsession);
if (otherpad) {
- it = gst_iterator_new_single (GST_TYPE_PAD, otherpad,
- (GstCopyFunction) gst_object_ref, (GFreeFunc) gst_object_unref);
+ GValue val = { 0, };
+
+ g_value_init (&val, GST_TYPE_PAD);
+ g_value_set_object (&val, otherpad);
+ it = gst_iterator_new_single (GST_TYPE_PAD, &val);
+ g_value_unset (&val);
gst_object_unref (otherpad);
}
- gst_object_unref (rtpsession);
-
return it;
}
static gboolean
-gst_rtp_session_sink_setcaps (GstPad * pad, GstCaps * caps)
+gst_rtp_session_sink_setcaps (GstPad * pad, GstRtpSession * rtpsession,
+ GstCaps * caps)
{
- GstRtpSession *rtpsession;
-
- rtpsession = GST_RTP_SESSION (gst_pad_get_parent (pad));
-
GST_RTP_SESSION_LOCK (rtpsession);
gst_rtp_session_cache_caps (rtpsession, caps);
GST_RTP_SESSION_UNLOCK (rtpsession);
- gst_object_unref (rtpsession);
-
return TRUE;
}
* forward the packet on the rtp_src pad
*/
static GstFlowReturn
-gst_rtp_session_chain_recv_rtp (GstPad * pad, GstBuffer * buffer)
+gst_rtp_session_chain_recv_rtp (GstPad * pad, GstObject * parent,
+ GstBuffer * buffer)
{
GstRtpSession *rtpsession;
GstRtpSessionPrivate *priv;
GstClockTime current_time, running_time;
GstClockTime timestamp;
- rtpsession = GST_RTP_SESSION (gst_pad_get_parent (pad));
+ rtpsession = GST_RTP_SESSION (parent);
priv = rtpsession->priv;
GST_LOG_OBJECT (rtpsession, "received RTP packet");
goto push_error;
done:
- gst_object_unref (rtpsession);
return ret;
}
static gboolean
-gst_rtp_session_event_recv_rtcp_sink (GstPad * pad, GstEvent * event)
+gst_rtp_session_event_recv_rtcp_sink (GstPad * pad, GstObject * parent,
+ GstEvent * event)
{
GstRtpSession *rtpsession;
gboolean ret = FALSE;
- rtpsession = GST_RTP_SESSION (gst_pad_get_parent (pad));
+ rtpsession = GST_RTP_SESSION (parent);
GST_DEBUG_OBJECT (rtpsession, "received event %s",
GST_EVENT_TYPE_NAME (event));
ret = gst_pad_push_event (rtpsession->sync_src, event);
break;
}
- gst_object_unref (rtpsession);
return ret;
}
* forward the SR packets to the sync_src pad.
*/
static GstFlowReturn
-gst_rtp_session_chain_recv_rtcp (GstPad * pad, GstBuffer * buffer)
+gst_rtp_session_chain_recv_rtcp (GstPad * pad, GstObject * parent,
+ GstBuffer * buffer)
{
GstRtpSession *rtpsession;
GstRtpSessionPrivate *priv;
GstClockTime current_time;
guint64 ntpnstime;
- rtpsession = GST_RTP_SESSION (gst_pad_get_parent (pad));
+ rtpsession = GST_RTP_SESSION (parent);
priv = rtpsession->priv;
GST_LOG_OBJECT (rtpsession, "received RTCP packet");
rtp_session_process_rtcp (priv->session, buffer, current_time, ntpnstime);
- gst_object_unref (rtpsession);
-
return GST_FLOW_OK; /* always return OK */
}
static gboolean
-gst_rtp_session_query_send_rtcp_src (GstPad * pad, GstQuery * query)
+gst_rtp_session_query_send_rtcp_src (GstPad * pad, GstObject * parent,
+ GstQuery * query)
{
GstRtpSession *rtpsession;
gboolean ret = FALSE;
- rtpsession = GST_RTP_SESSION (gst_pad_get_parent (pad));
+ rtpsession = GST_RTP_SESSION (parent);
GST_DEBUG_OBJECT (rtpsession, "received QUERY");
break;
}
- gst_object_unref (rtpsession);
-
return ret;
}
static gboolean
-gst_rtp_session_event_send_rtcp_src (GstPad * pad, GstEvent * event)
+gst_rtp_session_event_send_rtcp_src (GstPad * pad, GstObject * parent,
+ GstEvent * event)
{
GstRtpSession *rtpsession;
gboolean ret = TRUE;
- rtpsession = GST_RTP_SESSION (gst_pad_get_parent (pad));
- if (G_UNLIKELY (rtpsession == NULL)) {
- gst_event_unref (event);
- return FALSE;
- }
+ rtpsession = GST_RTP_SESSION (parent);
GST_DEBUG_OBJECT (rtpsession, "received EVENT");
switch (GST_EVENT_TYPE (event)) {
break;
}
- gst_object_unref (rtpsession);
return ret;
}
static gboolean
-gst_rtp_session_event_send_rtp_sink (GstPad * pad, GstEvent * event)
+gst_rtp_session_event_send_rtp_sink (GstPad * pad, GstObject * parent,
+ GstEvent * event)
{
GstRtpSession *rtpsession;
gboolean ret = FALSE;
- rtpsession = GST_RTP_SESSION (gst_pad_get_parent (pad));
+ rtpsession = GST_RTP_SESSION (parent);
GST_DEBUG_OBJECT (rtpsession, "received event");
switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_CAPS:
+ {
+ GstCaps *caps;
+
+ /* process */
+ gst_event_parse_caps (event, &caps);
+ gst_rtp_session_setcaps_send_rtp (pad, rtpsession, caps);
+ ret = gst_pad_push_event (rtpsession->send_rtp_src, event);
+ break;
+ }
case GST_EVENT_FLUSH_STOP:
gst_segment_init (&rtpsession->send_rtp_seg, GST_FORMAT_UNDEFINED);
ret = gst_pad_push_event (rtpsession->send_rtp_src, event);
break;
- case GST_EVENT_NEWSEGMENT:{
- gboolean update;
- gdouble rate, arate;
- GstFormat format;
- gint64 start, stop, time;
- GstSegment *segment;
+ case GST_EVENT_SEGMENT:{
+ GstSegment *segment, in_segment;
segment = &rtpsession->send_rtp_seg;
/* the newsegment event is needed to convert the RTP timestamp to
* running_time, which is needed to generate a mapping from RTP to NTP
* timestamps in SR reports */
- gst_event_parse_new_segment_full (event, &update, &rate, &arate, &format,
- &start, &stop, &time);
-
- GST_DEBUG_OBJECT (rtpsession,
- "configured NEWSEGMENT update %d, rate %lf, applied rate %lf, "
- "format GST_FORMAT_TIME, "
- "%" GST_TIME_FORMAT " -- %" GST_TIME_FORMAT
- ", time %" GST_TIME_FORMAT ", accum %" GST_TIME_FORMAT,
- update, rate, arate, GST_TIME_ARGS (segment->start),
- GST_TIME_ARGS (segment->stop), GST_TIME_ARGS (segment->time),
- GST_TIME_ARGS (segment->accum));
+ gst_event_copy_segment (event, &in_segment);
+ GST_DEBUG_OBJECT (rtpsession, "received segment %" GST_SEGMENT_FORMAT,
+ &in_segment);
- gst_segment_set_newsegment_full (segment, update, rate,
- arate, format, start, stop, time);
+ /* accept upstream */
+ gst_segment_copy_into (&in_segment, segment);
/* push event forward */
ret = gst_pad_push_event (rtpsession->send_rtp_src, event);
break;
}
}
- gst_object_unref (rtpsession);
return ret;
}
static GstCaps *
-gst_rtp_session_getcaps_send_rtp (GstPad * pad)
+gst_rtp_session_getcaps_send_rtp (GstPad * pad, GstRtpSession * rtpsession,
+ GstCaps * filter)
{
- GstRtpSession *rtpsession;
GstRtpSessionPrivate *priv;
GstCaps *result;
GstStructure *s1, *s2;
guint ssrc;
- rtpsession = GST_RTP_SESSION (gst_pad_get_parent (pad));
priv = rtpsession->priv;
ssrc = rtp_session_get_internal_ssrc (priv->session);
* internal SSRC so that we don't have to patch it. Create a structure with
* the SSRC and another one without. */
s1 = gst_structure_new ("application/x-rtp", "ssrc", G_TYPE_UINT, ssrc, NULL);
- s2 = gst_structure_new ("application/x-rtp", NULL);
+ s2 = gst_structure_new_empty ("application/x-rtp");
result = gst_caps_new_full (s1, s2, NULL);
- GST_DEBUG_OBJECT (rtpsession, "getting caps %" GST_PTR_FORMAT, result);
+ if (filter) {
+ GstCaps *caps = result;
- gst_object_unref (rtpsession);
+ result = gst_caps_intersect_full (filter, caps, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (caps);
+ }
+
+ GST_DEBUG_OBJECT (rtpsession, "getting caps %" GST_PTR_FORMAT, result);
return result;
}
static gboolean
-gst_rtp_session_setcaps_send_rtp (GstPad * pad, GstCaps * caps)
+gst_rtp_session_query_send_rtp (GstPad * pad, GstObject * parent,
+ GstQuery * query)
{
+ gboolean res = FALSE;
GstRtpSession *rtpsession;
+
+ rtpsession = GST_RTP_SESSION (parent);
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_CAPS:
+ {
+ GstCaps *filter, *caps;
+
+ gst_query_parse_caps (query, &filter);
+ caps = gst_rtp_session_getcaps_send_rtp (pad, rtpsession, filter);
+ gst_query_set_caps_result (query, caps);
+ gst_caps_unref (caps);
+ res = TRUE;
+ break;
+ }
+ default:
+ res = gst_pad_query_default (pad, parent, query);
+ break;
+ }
+
+ return res;
+}
+
+static gboolean
+gst_rtp_session_setcaps_send_rtp (GstPad * pad, GstRtpSession * rtpsession,
+ GstCaps * caps)
+{
GstRtpSessionPrivate *priv;
GstStructure *s = gst_caps_get_structure (caps, 0);
guint ssrc;
- rtpsession = GST_RTP_SESSION (gst_pad_get_parent (pad));
priv = rtpsession->priv;
if (gst_structure_get_uint (s, "ssrc", &ssrc)) {
GST_DEBUG_OBJECT (rtpsession, "setting internal SSRC to %08x", ssrc);
rtp_session_set_internal_ssrc (priv->session, ssrc);
}
-
- gst_object_unref (rtpsession);
-
return TRUE;
}
* send to RTP session manager and forward to send_rtp_src.
*/
static GstFlowReturn
-gst_rtp_session_chain_send_rtp_common (GstPad * pad, gpointer data,
- gboolean is_list)
+gst_rtp_session_chain_send_rtp_common (GstRtpSession * rtpsession,
+ gpointer data, gboolean is_list)
{
- GstRtpSession *rtpsession;
GstRtpSessionPrivate *priv;
GstFlowReturn ret;
GstClockTime timestamp, running_time;
GstClockTime current_time;
- rtpsession = GST_RTP_SESSION (gst_pad_get_parent (pad));
priv = rtpsession->priv;
GST_LOG_OBJECT (rtpsession, "received RTP %s", is_list ? "list" : "packet");
/* All groups in an list have the same timestamp.
* So, just take it from the first group. */
- buffer = gst_buffer_list_get (GST_BUFFER_LIST_CAST (data), 0, 0);
+ buffer = gst_buffer_list_get (GST_BUFFER_LIST_CAST (data), 0);
if (buffer)
timestamp = GST_BUFFER_TIMESTAMP (buffer);
else
goto push_error;
done:
- gst_object_unref (rtpsession);
return ret;
}
static GstFlowReturn
-gst_rtp_session_chain_send_rtp (GstPad * pad, GstBuffer * buffer)
+gst_rtp_session_chain_send_rtp (GstPad * pad, GstObject * parent,
+ GstBuffer * buffer)
{
- return gst_rtp_session_chain_send_rtp_common (pad, buffer, FALSE);
+ GstRtpSession *rtpsession = GST_RTP_SESSION (parent);
+
+ return gst_rtp_session_chain_send_rtp_common (rtpsession, buffer, FALSE);
}
static GstFlowReturn
-gst_rtp_session_chain_send_rtp_list (GstPad * pad, GstBufferList * list)
+gst_rtp_session_chain_send_rtp_list (GstPad * pad, GstObject * parent,
+ GstBufferList * list)
{
- return gst_rtp_session_chain_send_rtp_common (pad, list, TRUE);
+ GstRtpSession *rtpsession = GST_RTP_SESSION (parent);
+
+ return gst_rtp_session_chain_send_rtp_common (rtpsession, list, TRUE);
}
/* Create sinkpad to receive RTP packets from senders. This will also create a
gst_rtp_session_chain_recv_rtp);
gst_pad_set_event_function (rtpsession->recv_rtp_sink,
(GstPadEventFunction) gst_rtp_session_event_recv_rtp_sink);
- gst_pad_set_setcaps_function (rtpsession->recv_rtp_sink,
- gst_rtp_session_sink_setcaps);
gst_pad_set_iterate_internal_links_function (rtpsession->recv_rtp_sink,
gst_rtp_session_iterate_internal_links);
gst_pad_set_active (rtpsession->recv_rtp_sink, TRUE);
gst_rtp_session_chain_send_rtp);
gst_pad_set_chain_list_function (rtpsession->send_rtp_sink,
gst_rtp_session_chain_send_rtp_list);
- gst_pad_set_getcaps_function (rtpsession->send_rtp_sink,
- gst_rtp_session_getcaps_send_rtp);
- gst_pad_set_setcaps_function (rtpsession->send_rtp_sink,
- gst_rtp_session_setcaps_send_rtp);
+ gst_pad_set_query_function (rtpsession->send_rtp_sink,
+ gst_rtp_session_query_send_rtp);
gst_pad_set_event_function (rtpsession->send_rtp_sink,
(GstPadEventFunction) gst_rtp_session_event_send_rtp_sink);
gst_pad_set_iterate_internal_links_function (rtpsession->send_rtp_sink,
static GstPad *
gst_rtp_session_request_new_pad (GstElement * element,
- GstPadTemplate * templ, const gchar * name)
+ GstPadTemplate * templ, const gchar * name, const GstCaps * caps)
{
GstRtpSession *rtpsession;
GstElementClass *klass;
#include "config.h"
#endif
-/* FIXME 0.11: suppress warnings for deprecated API such as GStaticRecMutex
- * with newer GLib versions (>= 2.31.0) */
-#define GLIB_DISABLE_DEPRECATION_WARNINGS
-
#include <string.h>
#include <gst/rtp/gstrtpbuffer.h>
#include <gst/rtp/gstrtcpbuffer.h>
);
static GstStaticPadTemplate rtp_ssrc_demux_src_template =
-GST_STATIC_PAD_TEMPLATE ("src_%d",
+GST_STATIC_PAD_TEMPLATE ("src_%u",
GST_PAD_SRC,
GST_PAD_SOMETIMES,
GST_STATIC_CAPS ("application/x-rtp")
);
static GstStaticPadTemplate rtp_ssrc_demux_rtcp_src_template =
-GST_STATIC_PAD_TEMPLATE ("rtcp_src_%d",
+GST_STATIC_PAD_TEMPLATE ("rtcp_src_%u",
GST_PAD_SRC,
GST_PAD_SOMETIMES,
GST_STATIC_CAPS ("application/x-rtcp")
);
-#define GST_PAD_LOCK(obj) (g_static_rec_mutex_lock (&(obj)->padlock))
-#define GST_PAD_UNLOCK(obj) (g_static_rec_mutex_unlock (&(obj)->padlock))
+#define GST_PAD_LOCK(obj) (g_rec_mutex_lock (&(obj)->padlock))
+#define GST_PAD_UNLOCK(obj) (g_rec_mutex_unlock (&(obj)->padlock))
/* signals */
enum
LAST_SIGNAL
};
-GST_BOILERPLATE (GstRtpSsrcDemux, gst_rtp_ssrc_demux, GstElement,
- GST_TYPE_ELEMENT);
-
+#define gst_rtp_ssrc_demux_parent_class parent_class
+G_DEFINE_TYPE (GstRtpSsrcDemux, gst_rtp_ssrc_demux, GST_TYPE_ELEMENT);
/* GObject vmethods */
static void gst_rtp_ssrc_demux_dispose (GObject * object);
guint32 ssrc);
/* sinkpad stuff */
-static GstFlowReturn gst_rtp_ssrc_demux_chain (GstPad * pad, GstBuffer * buf);
-static gboolean gst_rtp_ssrc_demux_sink_event (GstPad * pad, GstEvent * event);
+static GstFlowReturn gst_rtp_ssrc_demux_chain (GstPad * pad, GstObject * parent,
+ GstBuffer * buf);
+static gboolean gst_rtp_ssrc_demux_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
static GstFlowReturn gst_rtp_ssrc_demux_rtcp_chain (GstPad * pad,
- GstBuffer * buf);
+ GstObject * parent, GstBuffer * buf);
static gboolean gst_rtp_ssrc_demux_rtcp_sink_event (GstPad * pad,
- GstEvent * event);
+ GstObject * parent, GstEvent * event);
static GstIterator *gst_rtp_ssrc_demux_iterate_internal_links_sink (GstPad *
- pad);
+ pad, GstObject * parent);
/* srcpad stuff */
-static gboolean gst_rtp_ssrc_demux_src_event (GstPad * pad, GstEvent * event);
-static GstIterator *gst_rtp_ssrc_demux_iterate_internal_links_src (GstPad *
- pad);
-static gboolean gst_rtp_ssrc_demux_src_query (GstPad * pad, GstQuery * query);
+static gboolean gst_rtp_ssrc_demux_src_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+static GstIterator *gst_rtp_ssrc_demux_iterate_internal_links_src (GstPad * pad,
+ GstObject * parent);
+static gboolean gst_rtp_ssrc_demux_src_query (GstPad * pad, GstObject * parent,
+ GstQuery * query);
static guint gst_rtp_ssrc_demux_signals[LAST_SIGNAL] = { 0 };
return NULL;
}
+static GstEvent *
+add_ssrc_and_ref (GstEvent * event, guint32 ssrc)
+{
+ /* Set the ssrc on the output caps */
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_CAPS:
+ {
+ GstCaps *caps;
+ GstCaps *newcaps;
+ GstStructure *s;
+
+ gst_event_parse_caps (event, &caps);
+ newcaps = gst_caps_copy (caps);
+
+ s = gst_caps_get_structure (newcaps, 0);
+ gst_structure_set (s, "ssrc", G_TYPE_UINT, ssrc, NULL);
+ event = gst_event_new_caps (newcaps);
+ gst_caps_unref (newcaps);
+ break;
+ }
+ default:
+ gst_event_ref (event);
+ break;
+ }
+
+ return event;
+}
+
+struct ForwardEventData
+{
+ GstPad *pad;
+ guint32 ssrc;
+};
+
+static gboolean
+forward_sticky_events (GstPad * pad, GstEvent ** event, gpointer user_data)
+{
+ struct ForwardEventData *data = user_data;
+ GstEvent *newevent;
+
+ newevent = add_ssrc_and_ref (*event, data->ssrc);
+
+ gst_pad_push_event (data->pad, newevent);
+
+ return TRUE;
+}
+
+
/* with PAD_LOCK */
static GstRtpSsrcDemuxPad *
find_or_create_demux_pad_for_ssrc (GstRtpSsrcDemux * demux, guint32 ssrc)
GstPadTemplate *templ;
gchar *padname;
GstRtpSsrcDemuxPad *demuxpad;
+ GstCaps *caps;
+ struct ForwardEventData fdata;
GST_DEBUG_OBJECT (demux, "creating pad for SSRC %08x", ssrc);
}
klass = GST_ELEMENT_GET_CLASS (demux);
- templ = gst_element_class_get_pad_template (klass, "src_%d");
- padname = g_strdup_printf ("src_%d", ssrc);
+ templ = gst_element_class_get_pad_template (klass, "src_%u");
+ padname = g_strdup_printf ("src_%u", ssrc);
rtp_pad = gst_pad_new_from_template (templ, padname);
g_free (padname);
- templ = gst_element_class_get_pad_template (klass, "rtcp_src_%d");
- padname = g_strdup_printf ("rtcp_src_%d", ssrc);
+ templ = gst_element_class_get_pad_template (klass, "rtcp_src_%u");
+ padname = g_strdup_printf ("rtcp_src_%u", ssrc);
rtcp_pad = gst_pad_new_from_template (templ, padname);
g_free (padname);
demuxpad->rtp_pad = rtp_pad;
demuxpad->rtcp_pad = rtcp_pad;
+ fdata.ssrc = ssrc;
+
gst_pad_set_element_private (rtp_pad, demuxpad);
gst_pad_set_element_private (rtcp_pad, demuxpad);
demux->srcpads = g_slist_prepend (demux->srcpads, demuxpad);
- /* copy caps from input */
- gst_pad_set_caps (rtp_pad, GST_PAD_CAPS (demux->rtp_sink));
- gst_pad_use_fixed_caps (rtp_pad);
- gst_pad_set_caps (rtcp_pad, GST_PAD_CAPS (demux->rtcp_sink));
- gst_pad_use_fixed_caps (rtcp_pad);
-
- gst_pad_set_event_function (rtp_pad, gst_rtp_ssrc_demux_src_event);
gst_pad_set_query_function (rtp_pad, gst_rtp_ssrc_demux_src_query);
gst_pad_set_iterate_internal_links_function (rtp_pad,
gst_rtp_ssrc_demux_iterate_internal_links_src);
+ gst_pad_set_event_function (rtp_pad, gst_rtp_ssrc_demux_src_event);
+ gst_pad_use_fixed_caps (rtp_pad);
gst_pad_set_active (rtp_pad, TRUE);
+ fdata.pad = rtp_pad;
+ gst_pad_sticky_events_foreach (demux->rtp_sink, forward_sticky_events,
+ &fdata);
gst_pad_set_event_function (rtcp_pad, gst_rtp_ssrc_demux_src_event);
gst_pad_set_iterate_internal_links_function (rtcp_pad,
gst_rtp_ssrc_demux_iterate_internal_links_src);
+ gst_pad_use_fixed_caps (rtcp_pad);
gst_pad_set_active (rtcp_pad, TRUE);
+ fdata.pad = rtcp_pad;
+ gst_pad_sticky_events_foreach (demux->rtcp_sink, forward_sticky_events,
+ &fdata);
+ /* copy caps from input */
+ if ((caps = gst_pad_get_current_caps (demux->rtp_sink))) {
+ gst_pad_set_caps (rtp_pad, caps);
+ gst_caps_unref (caps);
+ }
+ if ((caps = gst_pad_get_current_caps (demux->rtcp_sink))) {
+ gst_pad_set_caps (rtcp_pad, caps);
+ gst_caps_unref (caps);
+ }
gst_element_add_pad (GST_ELEMENT_CAST (demux), rtp_pad);
gst_element_add_pad (GST_ELEMENT_CAST (demux), rtcp_pad);
}
static void
-gst_rtp_ssrc_demux_base_init (gpointer g_class)
-{
- GstElementClass *gstelement_klass = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_static_pad_template (gstelement_klass,
- &rtp_ssrc_demux_sink_template);
- gst_element_class_add_static_pad_template (gstelement_klass,
- &rtp_ssrc_demux_rtcp_sink_template);
- gst_element_class_add_static_pad_template (gstelement_klass,
- &rtp_ssrc_demux_src_template);
- gst_element_class_add_static_pad_template (gstelement_klass,
- &rtp_ssrc_demux_rtcp_src_template);
-
- gst_element_class_set_details_simple (gstelement_klass, "RTP SSRC Demux",
- "Demux/Network/RTP",
- "Splits RTP streams based on the SSRC",
- "Wim Taymans <wim.taymans@gmail.com>");
-}
-
-static void
gst_rtp_ssrc_demux_class_init (GstRtpSsrcDemuxClass * klass)
{
GObjectClass *gobject_klass;
gstrtpssrcdemux_klass->clear_ssrc =
GST_DEBUG_FUNCPTR (gst_rtp_ssrc_demux_clear_ssrc);
+ gst_element_class_add_pad_template (gstelement_klass,
+ gst_static_pad_template_get (&rtp_ssrc_demux_sink_template));
+ gst_element_class_add_pad_template (gstelement_klass,
+ gst_static_pad_template_get (&rtp_ssrc_demux_rtcp_sink_template));
+ gst_element_class_add_pad_template (gstelement_klass,
+ gst_static_pad_template_get (&rtp_ssrc_demux_src_template));
+ gst_element_class_add_pad_template (gstelement_klass,
+ gst_static_pad_template_get (&rtp_ssrc_demux_rtcp_src_template));
+
+ gst_element_class_set_details_simple (gstelement_klass, "RTP SSRC Demux",
+ "Demux/Network/RTP",
+ "Splits RTP streams based on the SSRC",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
GST_DEBUG_CATEGORY_INIT (gst_rtp_ssrc_demux_debug,
"rtpssrcdemux", 0, "RTP SSRC demuxer");
}
static void
-gst_rtp_ssrc_demux_init (GstRtpSsrcDemux * demux,
- GstRtpSsrcDemuxClass * g_class)
+gst_rtp_ssrc_demux_init (GstRtpSsrcDemux * demux)
{
GstElementClass *klass = GST_ELEMENT_GET_CLASS (demux);
gst_rtp_ssrc_demux_iterate_internal_links_sink);
gst_element_add_pad (GST_ELEMENT_CAST (demux), demux->rtcp_sink);
- g_static_rec_mutex_init (&demux->padlock);
+ g_rec_mutex_init (&demux->padlock);
gst_segment_init (&demux->segment, GST_FORMAT_UNDEFINED);
}
GstRtpSsrcDemux *demux;
demux = GST_RTP_SSRC_DEMUX (object);
- g_static_rec_mutex_free (&demux->padlock);
+ g_rec_mutex_clear (&demux->padlock);
G_OBJECT_CLASS (parent_class)->finalize (object);
}
}
static gboolean
-gst_rtp_ssrc_demux_sink_event (GstPad * pad, GstEvent * event)
+gst_rtp_ssrc_demux_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event)
{
GstRtpSsrcDemux *demux;
gboolean res = FALSE;
- demux = GST_RTP_SSRC_DEMUX (gst_pad_get_parent (pad));
- if (G_UNLIKELY (demux == NULL)) {
- gst_event_unref (event);
- return FALSE;
- }
+ demux = GST_RTP_SSRC_DEMUX (parent);
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_FLUSH_STOP:
gst_segment_init (&demux->segment, GST_FORMAT_UNDEFINED);
- case GST_EVENT_NEWSEGMENT:
+ /* fallthrough */
default:
{
GSList *walk;
for (walk = demux->srcpads; walk; walk = g_slist_next (walk)) {
GstRtpSsrcDemuxPad *pad = (GstRtpSsrcDemuxPad *) walk->data;
- pads = g_slist_prepend (pads, gst_object_ref (pad->rtp_pad));
+ pad = g_slice_dup (GstRtpSsrcDemuxPad, pad);
+ gst_object_ref (pad->rtp_pad);
+
+ pads = g_slist_prepend (pads, pad);
}
GST_PAD_UNLOCK (demux);
+
for (walk = pads; walk; walk = g_slist_next (walk)) {
- GstPad *pad = (GstPad *) walk->data;
+ GstRtpSsrcDemuxPad *dpad = walk->data;
+ GstEvent *newevent;
- gst_event_ref (event);
- res &= gst_pad_push_event (pad, event);
- gst_object_unref (pad);
+ newevent = add_ssrc_and_ref (event, dpad->ssrc);
+
+ res &= gst_pad_push_event (dpad->rtp_pad, newevent);
+ gst_object_unref (dpad->rtp_pad);
+ g_slice_free (GstRtpSsrcDemuxPad, dpad);
}
g_slist_free (pads);
gst_event_unref (event);
}
}
- gst_object_unref (demux);
return res;
}
static gboolean
-gst_rtp_ssrc_demux_rtcp_sink_event (GstPad * pad, GstEvent * event)
+gst_rtp_ssrc_demux_rtcp_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event)
{
GstRtpSsrcDemux *demux;
- gboolean res = FALSE;
+ gboolean res = TRUE;
+ GSList *walk;
+ GSList *pads = NULL;
- demux = GST_RTP_SSRC_DEMUX (gst_pad_get_parent (pad));
+ demux = GST_RTP_SSRC_DEMUX (parent);
- switch (GST_EVENT_TYPE (event)) {
- case GST_EVENT_NEWSEGMENT:
- default:
- {
- GSList *walk;
- GSList *pads = NULL;
+ GST_PAD_LOCK (demux);
+ for (walk = demux->srcpads; walk; walk = g_slist_next (walk)) {
+ GstRtpSsrcDemuxPad *pad = (GstRtpSsrcDemuxPad *) walk->data;
- res = TRUE;
- GST_PAD_LOCK (demux);
- for (walk = demux->srcpads; walk; walk = g_slist_next (walk)) {
- GstRtpSsrcDemuxPad *pad = (GstRtpSsrcDemuxPad *) walk->data;
+ pad = g_slice_dup (GstRtpSsrcDemuxPad, pad);
+ gst_object_ref (pad->rtcp_pad);
- pads = g_slist_prepend (pads, gst_object_ref (pad->rtcp_pad));
- }
- GST_PAD_UNLOCK (demux);
- for (walk = pads; walk; walk = g_slist_next (walk)) {
- GstPad *pad = (GstPad *) walk->data;
+ pads = g_slist_prepend (pads, pad);
+ }
+ GST_PAD_UNLOCK (demux);
- gst_event_ref (event);
- res &= gst_pad_push_event (pad, event);
- gst_object_unref (pad);
- }
- g_slist_free (pads);
- gst_event_unref (event);
- break;
- }
+ for (walk = pads; walk; walk = g_slist_next (walk)) {
+ GstRtpSsrcDemuxPad *dpad = walk->data;
+ GstEvent *newevent;
+
+ newevent = add_ssrc_and_ref (event, dpad->ssrc);
+
+ res &= gst_pad_push_event (dpad->rtcp_pad, newevent);
+ gst_object_unref (dpad->rtcp_pad);
+ g_slice_free (GstRtpSsrcDemuxPad, dpad);
}
- gst_object_unref (demux);
+ g_slist_free (pads);
+ gst_event_unref (event);
+
return res;
}
static GstFlowReturn
-gst_rtp_ssrc_demux_chain (GstPad * pad, GstBuffer * buf)
+gst_rtp_ssrc_demux_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
{
GstFlowReturn ret;
GstRtpSsrcDemux *demux;
guint32 ssrc;
GstRtpSsrcDemuxPad *dpad;
+ GstRTPBuffer rtp = { NULL };
GstPad *srcpad;
- demux = GST_RTP_SSRC_DEMUX (GST_OBJECT_PARENT (pad));
+ demux = GST_RTP_SSRC_DEMUX (parent);
if (!gst_rtp_buffer_validate (buf))
goto invalid_payload;
- ssrc = gst_rtp_buffer_get_ssrc (buf);
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
+ ssrc = gst_rtp_buffer_get_ssrc (&rtp);
+ gst_rtp_buffer_unmap (&rtp);
GST_DEBUG_OBJECT (demux, "received buffer of SSRC %08x", ssrc);
}
static GstFlowReturn
-gst_rtp_ssrc_demux_rtcp_chain (GstPad * pad, GstBuffer * buf)
+gst_rtp_ssrc_demux_rtcp_chain (GstPad * pad, GstObject * parent,
+ GstBuffer * buf)
{
GstFlowReturn ret;
GstRtpSsrcDemux *demux;
guint32 ssrc;
GstRtpSsrcDemuxPad *dpad;
GstRTCPPacket packet;
+ GstRTCPBuffer rtcp = { NULL, };
GstPad *srcpad;
- demux = GST_RTP_SSRC_DEMUX (GST_OBJECT_PARENT (pad));
+ demux = GST_RTP_SSRC_DEMUX (parent);
if (!gst_rtcp_buffer_validate (buf))
goto invalid_rtcp;
- if (!gst_rtcp_buffer_get_first_packet (buf, &packet))
+ gst_rtcp_buffer_map (buf, GST_MAP_READ, &rtcp);
+ if (!gst_rtcp_buffer_get_first_packet (&rtcp, &packet)) {
+ gst_rtcp_buffer_unmap (&rtcp);
goto invalid_rtcp;
+ }
/* first packet must be SR or RR or else the validate would have failed */
switch (gst_rtcp_packet_get_type (&packet)) {
default:
goto unexpected_rtcp;
}
+ gst_rtcp_buffer_unmap (&rtcp);
GST_DEBUG_OBJECT (demux, "received RTCP of SSRC %08x", ssrc);
}
}
+static GstRtpSsrcDemuxPad *
+find_demux_pad_for_pad (GstRtpSsrcDemux * demux, GstPad * pad)
+{
+ GSList *walk;
+
+ for (walk = demux->srcpads; walk; walk = g_slist_next (walk)) {
+ GstRtpSsrcDemuxPad *dpad = (GstRtpSsrcDemuxPad *) walk->data;
+ if (dpad->rtp_pad == pad || dpad->rtcp_pad == pad) {
+ return dpad;
+ }
+ }
+
+ return NULL;
+}
+
+
static gboolean
-gst_rtp_ssrc_demux_src_event (GstPad * pad, GstEvent * event)
+gst_rtp_ssrc_demux_src_event (GstPad * pad, GstObject * parent,
+ GstEvent * event)
{
GstRtpSsrcDemux *demux;
const GstStructure *s;
- demux = GST_RTP_SSRC_DEMUX (gst_pad_get_parent (pad));
+ demux = GST_RTP_SSRC_DEMUX (parent);
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_CUSTOM_UPSTREAM:
case GST_EVENT_CUSTOM_BOTH_OOB:
s = gst_event_get_structure (event);
if (s && !gst_structure_has_field (s, "ssrc")) {
- GSList *walk;
-
- for (walk = demux->srcpads; walk; walk = g_slist_next (walk)) {
- GstRtpSsrcDemuxPad *dpad = (GstRtpSsrcDemuxPad *) walk->data;
-
- if (dpad->rtp_pad == pad || dpad->rtcp_pad == pad) {
- event =
- GST_EVENT_CAST (gst_mini_object_make_writable
- (GST_MINI_OBJECT_CAST (event)));
- gst_structure_set (event->structure, "ssrc", G_TYPE_UINT,
- dpad->ssrc, NULL);
- break;
- }
+ GstRtpSsrcDemuxPad *dpad = find_demux_pad_for_pad (demux, pad);
+
+ if (dpad) {
+ GstStructure *ws;
+
+ event = gst_event_make_writable (event);
+ ws = gst_event_writable_structure (event);
+ gst_structure_set (ws, "ssrc", G_TYPE_UINT, dpad->ssrc, NULL);
}
}
break;
break;
}
- gst_object_unref (demux);
-
- return gst_pad_event_default (pad, event);
+ return gst_pad_event_default (pad, parent, event);
}
static GstIterator *
-gst_rtp_ssrc_demux_iterate_internal_links_src (GstPad * pad)
+gst_rtp_ssrc_demux_iterate_internal_links_src (GstPad * pad, GstObject * parent)
{
GstRtpSsrcDemux *demux;
GstPad *otherpad = NULL;
GstIterator *it = NULL;
GSList *current;
- demux = GST_RTP_SSRC_DEMUX (gst_pad_get_parent (pad));
-
- if (!demux)
- return NULL;
+ demux = GST_RTP_SSRC_DEMUX (parent);
GST_PAD_LOCK (demux);
for (current = demux->srcpads; current; current = g_slist_next (current)) {
break;
}
}
- it = gst_iterator_new_single (GST_TYPE_PAD, otherpad,
- (GstCopyFunction) gst_object_ref, (GFreeFunc) gst_object_unref);
+ if (otherpad) {
+ GValue val = { 0, };
+
+ g_value_init (&val, GST_TYPE_PAD);
+ g_value_set_object (&val, otherpad);
+ it = gst_iterator_new_single (GST_TYPE_PAD, &val);
+ g_value_unset (&val);
+
+ }
GST_PAD_UNLOCK (demux);
- gst_object_unref (demux);
return it;
}
static gint
src_pad_compare_func (gconstpointer a, gconstpointer b)
{
- GstPad *pad = GST_PAD (a);
- const gchar *prefix = b;
+ GstPad *pad = GST_PAD (g_value_get_object (a));
+ const gchar *prefix = g_value_get_string (b);
gint res = 1;
GST_OBJECT_LOCK (pad);
}
static GstIterator *
-gst_rtp_ssrc_demux_iterate_internal_links_sink (GstPad * pad)
+gst_rtp_ssrc_demux_iterate_internal_links_sink (GstPad * pad,
+ GstObject * parent)
{
GstRtpSsrcDemux *demux;
GstIterator *it = NULL;
- const gchar *prefix = NULL;
-
- demux = GST_RTP_SSRC_DEMUX (gst_pad_get_parent (pad));
+ GValue gval = { 0, };
- if (!demux)
- return NULL;
+ demux = GST_RTP_SSRC_DEMUX (parent);
+ g_value_init (&gval, G_TYPE_STRING);
if (pad == demux->rtp_sink)
- prefix = "src_";
+ g_value_set_static_string (&gval, "src_");
else if (pad == demux->rtcp_sink)
- prefix = "rtcp_src_";
+ g_value_set_static_string (&gval, "rtcp_src_");
else
g_assert_not_reached ();
- it = gst_element_iterate_src_pads (GST_ELEMENT (demux));
-
- it = gst_iterator_filter (it, src_pad_compare_func, (gpointer) prefix);
+ it = gst_element_iterate_src_pads (GST_ELEMENT_CAST (demux));
+ it = gst_iterator_filter (it, src_pad_compare_func, &gval);
- gst_object_unref (demux);
return it;
}
static gboolean
-gst_rtp_ssrc_demux_src_query (GstPad * pad, GstQuery * query)
+gst_rtp_ssrc_demux_src_query (GstPad * pad, GstObject * parent,
+ GstQuery * query)
{
GstRtpSsrcDemux *demux;
gboolean res = FALSE;
- demux = GST_RTP_SSRC_DEMUX (gst_pad_get_parent (pad));
- if (G_UNLIKELY (demux == NULL))
- return FALSE;
+ demux = GST_RTP_SSRC_DEMUX (parent);
switch (GST_QUERY_TYPE (query)) {
case GST_QUERY_LATENCY:
break;
}
default:
- res = gst_pad_query_default (pad, query);
+ res = gst_pad_query_default (pad, parent, query);
break;
}
- gst_object_unref (demux);
return res;
}
GstPad *rtp_sink;
GstPad *rtcp_sink;
- GStaticRecMutex padlock;
+ GRecMutex padlock;
GSList *srcpads;
};
GList *list;
guint32 rtptime;
guint16 seqnum;
+ GstRTPBuffer rtp = {NULL};
g_return_val_if_fail (jbuf != NULL, FALSE);
g_return_val_if_fail (buf != NULL, FALSE);
- seqnum = gst_rtp_buffer_get_seq (buf);
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
+
+ seqnum = gst_rtp_buffer_get_seq (&rtp);
/* loop the list to skip strictly smaller seqnum buffers */
for (list = jbuf->packets->head; list; list = g_list_next (list)) {
guint16 qseq;
gint gap;
+ GstRTPBuffer rtpb = {NULL};
- qseq = gst_rtp_buffer_get_seq (GST_BUFFER_CAST (list->data));
+ gst_rtp_buffer_map (GST_BUFFER_CAST (list->data), GST_MAP_READ, &rtpb);
+ qseq = gst_rtp_buffer_get_seq (&rtpb);
+ gst_rtp_buffer_unmap (&rtpb);
/* compare the new seqnum to the one in the buffer */
gap = gst_rtp_buffer_compare_seqnum (seqnum, qseq);
break;
}
- rtptime = gst_rtp_buffer_get_timestamp (buf);
+ rtptime = gst_rtp_buffer_get_timestamp (&rtp);
/* rtp time jumps are checked for during skew calculation, but bypassed
* in other mode, so mind those here and reset jb if needed.
* Only reset if valid input time, which is likely for UDP input
if (G_LIKELY (tail))
*tail = (list == NULL);
+ gst_rtp_buffer_unmap (&rtp);
+
return TRUE;
/* ERRORS */
duplicate:
{
+ gst_rtp_buffer_unmap (&rtp);
GST_WARNING ("duplicate packet %d found", (gint) seqnum);
return FALSE;
}
guint64 high_ts, low_ts;
GstBuffer *high_buf, *low_buf;
guint32 result;
+ GstRTPBuffer rtp = {NULL};
g_return_val_if_fail (jbuf != NULL, 0);
if (!high_buf || !low_buf || high_buf == low_buf)
return 0;
- high_ts = gst_rtp_buffer_get_timestamp (high_buf);
- low_ts = gst_rtp_buffer_get_timestamp (low_buf);
+ gst_rtp_buffer_map (high_buf, GST_MAP_READ, &rtp);
+ high_ts = gst_rtp_buffer_get_timestamp (&rtp);
+ gst_rtp_buffer_unmap (&rtp);
+ gst_rtp_buffer_map (low_buf, GST_MAP_READ, &rtp);
+ low_ts = gst_rtp_buffer_get_timestamp (&rtp);
+ gst_rtp_buffer_unmap (&rtp);
/* it needs to work if ts wraps */
if (high_ts >= low_ts) {
#include <gst/rtp/gstrtpbuffer.h>
#include <gst/rtp/gstrtcpbuffer.h>
-#include <gst/netbuffer/gstnetbuffer.h>
#include <gst/glib-compat-private.h>
}
static void
-gst_rtp_bin_marshal_BOOLEAN__MINIOBJECT_BOOLEAN (GClosure * closure,
- GValue * return_value G_GNUC_UNUSED, guint n_param_values,
- const GValue * param_values, gpointer invocation_hint G_GNUC_UNUSED,
- gpointer marshal_data)
-{
- typedef gboolean (*GMarshalFunc_BOOLEAN__MINIOBJECT_BOOLEAN) (gpointer data1,
- gpointer arg_1, gboolean arg_2, gpointer data2);
- register GMarshalFunc_BOOLEAN__MINIOBJECT_BOOLEAN callback;
- register GCClosure *cc = (GCClosure *) closure;
- register gpointer data1, data2;
- gboolean v_return;
-
- g_return_if_fail (return_value != NULL);
- g_return_if_fail (n_param_values == 3);
-
- if (G_CCLOSURE_SWAP_DATA (closure)) {
- data1 = closure->data;
- data2 = g_value_peek_pointer (param_values + 0);
- } else {
- data1 = g_value_peek_pointer (param_values + 0);
- data2 = closure->data;
- }
- callback =
- (GMarshalFunc_BOOLEAN__MINIOBJECT_BOOLEAN) (marshal_data ? marshal_data :
- cc->callback);
-
- v_return = callback (data1,
- gst_value_get_mini_object (param_values + 1),
- g_value_get_boolean (param_values + 2), data2);
-
- g_value_set_boolean (return_value, v_return);
-}
-
-static void
-gst_rtp_bin_marshal_VOID__UINT_UINT_UINT_UINT_MINIOBJECT (GClosure * closure,
- GValue * return_value G_GNUC_UNUSED, guint n_param_values,
- const GValue * param_values, gpointer invocation_hint G_GNUC_UNUSED,
- gpointer marshal_data)
-{
- typedef void (*GMarshalFunc_VOID__UINT_UINT_UINT_UINT_MINIOBJECT) (gpointer
- data1, guint arg_1, guint arg_2, guint arg_3, guint arg_4, gpointer arg_5,
- gpointer data2);
- register GMarshalFunc_VOID__UINT_UINT_UINT_UINT_MINIOBJECT callback;
- register GCClosure *cc = (GCClosure *) closure;
- register gpointer data1, data2;
-
- g_return_if_fail (n_param_values == 6);
-
- if (G_CCLOSURE_SWAP_DATA (closure)) {
- data1 = closure->data;
- data2 = g_value_peek_pointer (param_values + 0);
- } else {
- data1 = g_value_peek_pointer (param_values + 0);
- data2 = closure->data;
- }
- callback =
- (GMarshalFunc_VOID__UINT_UINT_UINT_UINT_MINIOBJECT) (marshal_data ?
- marshal_data : cc->callback);
-
- callback (data1,
- g_value_get_uint (param_values + 1),
- g_value_get_uint (param_values + 2),
- g_value_get_uint (param_values + 3),
- g_value_get_uint (param_values + 4),
- gst_value_get_mini_object (param_values + 5), data2);
-}
-
-
-static void
rtp_session_class_init (RTPSessionClass * klass)
{
GObjectClass *gobject_class;
rtp_session_signals[SIGNAL_ON_SENDING_RTCP] =
g_signal_new ("on-sending-rtcp", G_TYPE_FROM_CLASS (klass),
G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (RTPSessionClass, on_sending_rtcp),
- accumulate_trues, NULL, gst_rtp_bin_marshal_BOOLEAN__MINIOBJECT_BOOLEAN,
- G_TYPE_BOOLEAN, 2, GST_TYPE_BUFFER, G_TYPE_BOOLEAN);
+ accumulate_trues, NULL, gst_rtp_bin_marshal_BOOLEAN__BOXED_BOOLEAN,
+ G_TYPE_BOOLEAN, 2, GST_TYPE_BUFFER | G_SIGNAL_TYPE_STATIC_SCOPE,
+ G_TYPE_BOOLEAN);
/**
* RTPSession::on-feedback-rtcp:
*
* Notify that a RTCP feedback packet has been received
*/
-
rtp_session_signals[SIGNAL_ON_FEEDBACK_RTCP] =
g_signal_new ("on-feedback-rtcp", G_TYPE_FROM_CLASS (klass),
G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (RTPSessionClass, on_feedback_rtcp),
- NULL, NULL, gst_rtp_bin_marshal_VOID__UINT_UINT_UINT_UINT_MINIOBJECT,
+ NULL, NULL, gst_rtp_bin_marshal_VOID__UINT_UINT_UINT_UINT_BOXED,
G_TYPE_NONE, 5, G_TYPE_UINT, G_TYPE_UINT, G_TYPE_UINT, G_TYPE_UINT,
GST_TYPE_BUFFER);
gint i;
gchar *str;
- sess->lock = g_mutex_new ();
+ g_mutex_init (&sess->lock);
sess->key = g_random_int ();
sess->mask_idx = 0;
sess->mask = 0;
sess = RTP_SESSION_CAST (object);
- g_mutex_free (sess->lock);
+ g_mutex_clear (&sess->lock);
for (i = 0; i < 32; i++)
g_hash_table_destroy (sess->ssrcs[i]);
RTPArrivalStats * arrival, gboolean rtp)
{
/* If we have no arrival address, we can't do collision checking */
- if (!arrival->have_address)
+ if (!arrival->address)
return FALSE;
if (sess->source != source) {
- GstNetAddress *from;
- gboolean have_from;
+ GSocketAddress *from;
/* This is not our local source, but lets check if two remote
* source collide
*/
if (rtp) {
- from = &source->rtp_from;
- have_from = source->have_rtp_from;
+ from = source->rtp_from;
} else {
- from = &source->rtcp_from;
- have_from = source->have_rtcp_from;
+ from = source->rtcp_from;
}
- if (have_from) {
- if (gst_netaddress_equal (from, &arrival->address)) {
+ if (from) {
+ if (__g_socket_address_equal (from, arrival->address)) {
/* Address is the same */
return FALSE;
} else {
rtp_source_get_ssrc (source));
if (sess->favor_new) {
if (rtp_source_find_conflicting_address (source,
- &arrival->address, arrival->current_time)) {
- gchar buf1[40];
- gst_netaddress_to_string (&arrival->address, buf1, 40);
+ arrival->address, arrival->current_time)) {
+ gchar *buf1;
+
+ buf1 = __g_socket_address_to_string (arrival->address);
GST_LOG ("Known conflict on %x for %s, dropping packet",
rtp_source_get_ssrc (source), buf1);
+ g_free (buf1);
+
return TRUE;
} else {
- gchar buf1[40], buf2[40];
+ gchar *buf1, *buf2;
/* Current address is not a known conflict, lets assume this is
* a new source. Save old address in possible conflict list
rtp_source_add_conflicting_address (source, from,
arrival->current_time);
- gst_netaddress_to_string (from, buf1, 40);
- gst_netaddress_to_string (&arrival->address, buf2, 40);
+ buf1 = __g_socket_address_to_string (from);
+ buf2 = __g_socket_address_to_string (arrival->address);
+
GST_DEBUG ("New conflict for ssrc %x, replacing %s with %s,"
" saving old as known conflict",
rtp_source_get_ssrc (source), buf1, buf2);
if (rtp)
- rtp_source_set_rtp_from (source, &arrival->address);
+ rtp_source_set_rtp_from (source, arrival->address);
else
- rtp_source_set_rtcp_from (source, &arrival->address);
+ rtp_source_set_rtcp_from (source, arrival->address);
+
+ g_free (buf1);
+ g_free (buf2);
+
return FALSE;
}
} else {
} else {
/* We don't already have a from address for RTP, just set it */
if (rtp)
- rtp_source_set_rtp_from (source, &arrival->address);
+ rtp_source_set_rtp_from (source, arrival->address);
else
- rtp_source_set_rtcp_from (source, &arrival->address);
+ rtp_source_set_rtcp_from (source, arrival->address);
return FALSE;
}
if (inactivity_period > 1 * GST_SECOND) {
/* Use new network address */
if (rtp) {
- g_assert (source->have_rtp_from);
- rtp_source_set_rtp_from (source, &arrival->address);
+ g_assert (source->rtp_from);
+ rtp_source_set_rtp_from (source, arrival->address);
} else {
- g_assert (source->have_rtcp_from);
- rtp_source_set_rtcp_from (source, &arrival->address);
+ g_assert (source->rtcp_from);
+ rtp_source_set_rtcp_from (source, arrival->address);
}
return FALSE;
}
} else {
/* This is sending with our ssrc, is it an address we already know */
- if (rtp_source_find_conflicting_address (source, &arrival->address,
+ if (rtp_source_find_conflicting_address (source, arrival->address,
arrival->current_time)) {
/* Its a known conflict, its probably a loop, not a collision
* lets just drop the incoming packet
} else {
/* Its a new collision, lets change our SSRC */
- rtp_source_add_conflicting_address (source, &arrival->address,
+ rtp_source_add_conflicting_address (source, arrival->address,
arrival->current_time);
GST_DEBUG ("Collision for SSRC %x", rtp_source_get_ssrc (source));
source->probation = 0;
/* store from address, if any */
- if (arrival->have_address) {
+ if (arrival->address) {
if (rtp)
- rtp_source_set_rtp_from (source, &arrival->address);
+ rtp_source_set_rtp_from (source, arrival->address);
else
- rtp_source_set_rtcp_from (source, &arrival->address);
+ rtp_source_set_rtcp_from (source, arrival->address);
}
/* configure a callback on the source */
gboolean rtp, GstBuffer * buffer, GstClockTime current_time,
GstClockTime running_time, guint64 ntpnstime)
{
+ GstNetAddressMeta *meta;
+ GstRTPBuffer rtpb = { NULL };
+
/* get time of arrival */
arrival->current_time = current_time;
arrival->running_time = running_time;
arrival->ntpnstime = ntpnstime;
/* get packet size including header overhead */
- arrival->bytes = GST_BUFFER_SIZE (buffer) + sess->header_len;
+ arrival->bytes = gst_buffer_get_size (buffer) + sess->header_len;
if (rtp) {
- arrival->payload_len = gst_rtp_buffer_get_payload_len (buffer);
+ gst_rtp_buffer_map (buffer, GST_MAP_READ, &rtpb);
+ arrival->payload_len = gst_rtp_buffer_get_payload_len (&rtpb);
+ gst_rtp_buffer_unmap (&rtpb);
} else {
arrival->payload_len = 0;
}
/* for netbuffer we can store the IP address to check for collisions */
- arrival->have_address = GST_IS_NETBUFFER (buffer);
- if (arrival->have_address) {
- GstNetBuffer *netbuf = (GstNetBuffer *) buffer;
-
- memcpy (&arrival->address, &netbuf->from, sizeof (GstNetAddress));
+ meta = gst_buffer_get_net_address_meta (buffer);
+ if (arrival->address)
+ g_object_unref (arrival->address);
+ if (meta) {
+ arrival->address = G_SOCKET_ADDRESS (g_object_ref (meta->addr));
+ } else {
+ arrival->address = NULL;
}
}
RTPSource *source;
gboolean created;
gboolean prevsender, prevactive;
- RTPArrivalStats arrival;
+ RTPArrivalStats arrival = { NULL, };
guint32 csrcs[16];
guint8 i, count;
guint64 oldrate;
+ GstRTPBuffer rtp = { NULL };
g_return_val_if_fail (RTP_IS_SESSION (sess), GST_FLOW_ERROR);
g_return_val_if_fail (GST_IS_BUFFER (buffer), GST_FLOW_ERROR);
goto ignore;
/* get SSRC and look up in session database */
- ssrc = gst_rtp_buffer_get_ssrc (buffer);
+ gst_rtp_buffer_map (buffer, GST_MAP_READ, &rtp);
+ ssrc = gst_rtp_buffer_get_ssrc (&rtp);
source = obtain_source (sess, ssrc, &created, &arrival, TRUE);
- if (!source)
+ if (!source) {
+ gst_rtp_buffer_unmap (&rtp);
goto collision;
-
- prevsender = RTP_SOURCE_IS_SENDER (source);
- prevactive = RTP_SOURCE_IS_ACTIVE (source);
- oldrate = source->bitrate;
+ }
/* copy available csrc for later */
- count = gst_rtp_buffer_get_csrc_count (buffer);
+ count = gst_rtp_buffer_get_csrc_count (&rtp);
/* make sure to not overflow our array. An RTP buffer can maximally contain
* 16 CSRCs */
count = MIN (count, 16);
for (i = 0; i < count; i++)
- csrcs[i] = gst_rtp_buffer_get_csrc (buffer, i);
+ csrcs[i] = gst_rtp_buffer_get_csrc (&rtp, i);
+
+ gst_rtp_buffer_unmap (&rtp);
+
+ prevsender = RTP_SOURCE_IS_SENDER (source);
+ prevactive = RTP_SOURCE_IS_ACTIVE (source);
+ oldrate = source->bitrate;
/* let source process the packet */
result = rtp_source_process_rtp (source, buffer, &arrival);
if (!source)
return;
- sdes = gst_structure_new ("application/x-rtp-source-sdes", NULL);
+ sdes = gst_structure_new_empty ("application/x-rtp-source-sdes");
more_entries = gst_rtcp_packet_sdes_first_entry (packet);
j = 0;
GstBuffer *fci_buffer = NULL;
if (fci_length > 0) {
- fci_buffer = gst_buffer_create_sub (packet->buffer,
- fci_data - GST_BUFFER_DATA (packet->buffer), fci_length);
+ fci_buffer = gst_buffer_copy_region (packet->rtcp->buffer,
+ GST_BUFFER_COPY_MEMORY, fci_data - packet->rtcp->map.data,
+ fci_length);
GST_BUFFER_TIMESTAMP (fci_buffer) = arrival->running_time;
}
{
GstRTCPPacket packet;
gboolean more, is_bye = FALSE, do_sync = FALSE;
- RTPArrivalStats arrival;
+ RTPArrivalStats arrival = { NULL, };
GstFlowReturn result = GST_FLOW_OK;
+ GstRTCPBuffer rtcp = { NULL, };
g_return_val_if_fail (RTP_IS_SESSION (sess), GST_FLOW_ERROR);
g_return_val_if_fail (GST_IS_BUFFER (buffer), GST_FLOW_ERROR);
goto ignore;
/* start processing the compound packet */
- more = gst_rtcp_buffer_get_first_packet (buffer, &packet);
+ gst_rtcp_buffer_map (buffer, GST_MAP_READ, &rtcp);
+ more = gst_rtcp_buffer_get_first_packet (&rtcp, &packet);
while (more) {
GstRTCPType type;
more = gst_rtcp_packet_move_to_next (&packet);
}
+ gst_rtcp_buffer_unmap (&rtcp);
+
/* if we are scheduling a BYE, we only want to count bye packets, else we
* count everything */
if (sess->source->received_bye) {
sess->stats.avg_rtcp_packet_size, arrival.bytes);
RTP_SESSION_UNLOCK (sess);
+ if (arrival.address)
+ g_object_unref (arrival.address);
+
/* notify caller of sr packets in the callback */
if (do_sync && sess->callbacks.sync_rtcp) {
/* make writable, we might want to change the buffer */
- buffer = gst_buffer_make_metadata_writable (buffer);
+ buffer = gst_buffer_make_writable (buffer);
result = sess->callbacks.sync_rtcp (sess, sess->source, buffer,
sess->sync_rtcp_user_data);
g_return_val_if_fail (is_list || GST_IS_BUFFER (data), GST_FLOW_ERROR);
if (is_list) {
- valid_packet = gst_rtp_buffer_list_validate (GST_BUFFER_LIST_CAST (data));
+ GstBufferList *blist = GST_BUFFER_LIST_CAST (data);
+ gint i, len = gst_buffer_list_length (blist);
+
+ valid_packet = TRUE;
+ for (i = 0; i < len; i++)
+ valid_packet &= gst_rtp_buffer_validate (gst_buffer_list_get (blist, i));
} else {
valid_packet = gst_rtp_buffer_validate (GST_BUFFER_CAST (data));
}
typedef struct
{
+ GstRTCPBuffer rtcpbuf;
RTPSession *sess;
GstBuffer *rtcp;
GstClockTime current_time;
{
GstRTCPPacket *packet = &data->packet;
RTPSource *own = sess->source;
+ GstRTCPBuffer *rtcp = &data->rtcpbuf;
data->rtcp = gst_rtcp_buffer_new (sess->mtu);
+ gst_rtcp_buffer_map (data->rtcp, GST_MAP_READWRITE, rtcp);
+
if (RTP_SOURCE_IS_SENDER (own)) {
guint64 ntptime;
guint32 rtptime;
/* we are a sender, create SR */
GST_DEBUG ("create SR for SSRC %08x", own->ssrc);
- gst_rtcp_buffer_add_packet (data->rtcp, GST_RTCP_TYPE_SR, packet);
+ gst_rtcp_buffer_add_packet (rtcp, GST_RTCP_TYPE_SR, packet);
/* get latest stats */
rtp_source_get_new_sr (own, data->ntpnstime, data->running_time,
} else {
/* we are only receiver, create RR */
GST_DEBUG ("create RR for SSRC %08x", own->ssrc);
- gst_rtcp_buffer_add_packet (data->rtcp, GST_RTCP_TYPE_RR, packet);
+ gst_rtcp_buffer_add_packet (rtcp, GST_RTCP_TYPE_RR, packet);
gst_rtcp_packet_rr_set_ssrc (packet, own->ssrc);
}
}
GstRTCPPacket *packet = &data->packet;
const GstStructure *sdes;
gint i, n_fields;
+ GstRTCPBuffer *rtcp = &data->rtcpbuf;
/* add SDES packet */
- gst_rtcp_buffer_add_packet (data->rtcp, GST_RTCP_TYPE_SDES, packet);
+ gst_rtcp_buffer_add_packet (rtcp, GST_RTCP_TYPE_SDES, packet);
gst_rtcp_packet_sdes_add_item (packet, sess->source->ssrc);
session_bye (RTPSession * sess, ReportData * data)
{
GstRTCPPacket *packet = &data->packet;
+ GstRTCPBuffer *rtcp = &data->rtcpbuf;
/* open packet */
session_start_rtcp (sess, data);
session_sdes (sess, data);
/* add a BYE packet */
- gst_rtcp_buffer_add_packet (data->rtcp, GST_RTCP_TYPE_BYE, packet);
+ gst_rtcp_buffer_add_packet (rtcp, GST_RTCP_TYPE_BYE, packet);
gst_rtcp_packet_bye_add_ssrc (packet, sess->source->ssrc);
if (sess->bye_reason)
gst_rtcp_packet_bye_set_reason (packet, sess->bye_reason);
guint64 ntpnstime, GstClockTime running_time)
{
GstFlowReturn result = GST_FLOW_OK;
- ReportData data;
+ ReportData data = { GST_RTCP_BUFFER_INIT };
RTPSource *own;
GHashTable *table_copy;
gboolean notify = FALSE;
if (data.rtcp) {
gboolean do_not_suppress;
+ gst_rtcp_buffer_unmap (&data.rtcpbuf);
+
/* Give the user a change to add its own packet */
g_signal_emit (sess, rtp_session_signals[SIGNAL_ON_SENDING_RTCP], 0,
data.rtcp, data.is_early, &do_not_suppress);
if (sess->callbacks.send_rtcp && (do_not_suppress || !data.may_suppress)) {
guint packet_size;
- /* close the RTCP packet */
- gst_rtcp_buffer_end (data.rtcp);
-
- packet_size = GST_BUFFER_SIZE (data.rtcp) + sess->header_len;
+ packet_size = gst_buffer_get_size (data.rtcp) + sess->header_len;
UPDATE_AVG (sess->stats.avg_rtcp_packet_size, packet_size);
GST_DEBUG ("%p, sending RTCP packet, avg size %u, %u", &sess->stats,
has_pli_compare_func (gconstpointer a, gconstpointer ignored)
{
GstRTCPPacket packet;
+ GstRTCPBuffer rtcp = { NULL, };
+ gboolean ret = FALSE;
- packet.buffer = (GstBuffer *) a;
- packet.offset = 0;
+ gst_rtcp_buffer_map ((GstBuffer *) a, GST_MAP_READ, &rtcp);
- if (gst_rtcp_packet_get_type (&packet) == GST_RTCP_TYPE_PSFB &&
- gst_rtcp_packet_fb_get_type (&packet) == GST_RTCP_PSFB_TYPE_PLI)
- return TRUE;
- else
- return FALSE;
+ if (gst_rtcp_buffer_get_first_packet (&rtcp, &packet)) {
+ if (gst_rtcp_packet_get_type (&packet) == GST_RTCP_TYPE_PSFB &&
+ gst_rtcp_packet_fb_get_type (&packet) == GST_RTCP_PSFB_TYPE_PLI)
+ ret = TRUE;
+ }
+
+ gst_rtcp_buffer_unmap (&rtcp);
+
+ return ret;
}
static gboolean
gpointer key, value;
gboolean started_fir = FALSE;
GstRTCPPacket fir_rtcppacket;
+ GstRTCPBuffer rtcp = { NULL, };
RTP_SESSION_LOCK (sess);
+ gst_rtcp_buffer_map (buffer, GST_MAP_READWRITE, &rtcp);
+
g_hash_table_iter_init (&iter, sess->ssrcs[sess->mask_idx]);
while (g_hash_table_iter_next (&iter, &key, &value)) {
guint media_ssrc = GPOINTER_TO_UINT (key);
if (media_src->send_fir) {
if (!started_fir) {
- if (!gst_rtcp_buffer_add_packet (buffer, GST_RTCP_TYPE_PSFB,
+ if (!gst_rtcp_buffer_add_packet (&rtcp, GST_RTCP_TYPE_PSFB,
&fir_rtcppacket))
break;
gst_rtcp_packet_fb_set_type (&fir_rtcppacket, GST_RTCP_PSFB_TYPE_FIR);
if (media_src->send_pli && !rtp_source_has_retained (media_src,
has_pli_compare_func, NULL)) {
- if (gst_rtcp_buffer_add_packet (buffer, GST_RTCP_TYPE_PSFB,
- &pli_rtcppacket)) {
- gst_rtcp_packet_fb_set_type (&pli_rtcppacket, GST_RTCP_PSFB_TYPE_PLI);
- gst_rtcp_packet_fb_set_sender_ssrc (&pli_rtcppacket,
- rtp_source_get_ssrc (sess->source));
- gst_rtcp_packet_fb_set_media_ssrc (&pli_rtcppacket, media_ssrc);
- ret = TRUE;
- } else {
+ if (!gst_rtcp_buffer_add_packet (&rtcp, GST_RTCP_TYPE_PSFB,
+ &pli_rtcppacket))
/* Break because the packet is full, will put next request in a
- * further packet
- */
+ * further packet */
break;
- }
+ gst_rtcp_packet_fb_set_type (&pli_rtcppacket, GST_RTCP_PSFB_TYPE_PLI);
+ gst_rtcp_packet_fb_set_sender_ssrc (&pli_rtcppacket,
+ rtp_source_get_ssrc (sess->source));
+ gst_rtcp_packet_fb_set_media_ssrc (&pli_rtcppacket, media_ssrc);
+ ret = TRUE;
}
media_src->send_pli = FALSE;
}
+ gst_rtcp_buffer_unmap (&rtcp);
RTP_SESSION_UNLOCK (sess);
#define __RTP_SESSION_H__
#include <gst/gst.h>
-#include <gst/netbuffer/gstnetbuffer.h>
#include "rtpsource.h"
#define RTP_IS_SESSION_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass),RTP_TYPE_SESSION))
#define RTP_SESSION_CAST(sess) ((RTPSession *)(sess))
-#define RTP_SESSION_LOCK(sess) (g_mutex_lock ((sess)->lock))
-#define RTP_SESSION_UNLOCK(sess) (g_mutex_unlock ((sess)->lock))
+#define RTP_SESSION_LOCK(sess) (g_mutex_lock (&(sess)->lock))
+#define RTP_SESSION_UNLOCK(sess) (g_mutex_unlock (&(sess)->lock))
/**
* RTPSessionProcessRTP:
struct _RTPSession {
GObject object;
- GMutex *lock;
+ GMutex lock;
guint header_len;
guint mtu;
src->probation = RTP_DEFAULT_PROBATION;
src->closing = FALSE;
- src->sdes = gst_structure_new ("application/x-rtp-source-sdes", NULL);
+ src->sdes = gst_structure_new_empty ("application/x-rtp-source-sdes");
src->payload = -1;
src->clock_rate = -1;
}
static void
+rtp_conflicting_address_free (RTPConflictingAddress * addr)
+{
+ g_object_unref (addr->address);
+ g_free (addr);
+}
+
+static void
rtp_source_finalize (GObject * object)
{
RTPSource *src;
gst_caps_replace (&src->caps, NULL);
- g_list_foreach (src->conflicting_addresses, (GFunc) g_free, NULL);
+ g_list_foreach (src->conflicting_addresses,
+ (GFunc) rtp_conflicting_address_free, NULL);
g_list_free (src->conflicting_addresses);
while ((buffer = g_queue_pop_head (src->retained_feedback)))
gst_buffer_unref (buffer);
g_queue_free (src->retained_feedback);
+ if (src->rtp_from)
+ g_object_unref (src->rtp_from);
+ if (src->rtcp_from)
+ g_object_unref (src->rtcp_from);
+
G_OBJECT_CLASS (rtp_source_parent_class)->finalize (object);
}
GstStructure *s;
gboolean is_sender = src->is_sender;
gboolean internal = src->internal;
- gchar address_str[GST_NETADDRESS_MAX_LEN];
+ gchar *address_str;
gboolean have_rb;
guint8 fractionlost = 0;
gint32 packetslost = 0;
"clock-rate", G_TYPE_INT, src->clock_rate, NULL);
/* add address and port */
- if (src->have_rtp_from) {
- gst_netaddress_to_string (&src->rtp_from, address_str,
- sizeof (address_str));
+ if (src->rtp_from) {
+ address_str = __g_socket_address_to_string (src->rtp_from);
gst_structure_set (s, "rtp-from", G_TYPE_STRING, address_str, NULL);
+ g_free (address_str);
}
- if (src->have_rtcp_from) {
- gst_netaddress_to_string (&src->rtcp_from, address_str,
- sizeof (address_str));
+ if (src->rtcp_from) {
+ address_str = __g_socket_address_to_string (src->rtcp_from);
gst_structure_set (s, "rtcp-from", G_TYPE_STRING, address_str, NULL);
+ g_free (address_str);
}
gst_structure_set (s,
* collistion checking.
*/
void
-rtp_source_set_rtp_from (RTPSource * src, GstNetAddress * address)
+rtp_source_set_rtp_from (RTPSource * src, GSocketAddress * address)
{
g_return_if_fail (RTP_IS_SOURCE (src));
- src->have_rtp_from = TRUE;
- memcpy (&src->rtp_from, address, sizeof (GstNetAddress));
+ if (src->rtp_from)
+ g_object_unref (src->rtp_from);
+ src->rtp_from = G_SOCKET_ADDRESS (g_object_ref (address));
}
/**
* collistion checking.
*/
void
-rtp_source_set_rtcp_from (RTPSource * src, GstNetAddress * address)
+rtp_source_set_rtcp_from (RTPSource * src, GSocketAddress * address)
{
g_return_if_fail (RTP_IS_SOURCE (src));
- src->have_rtcp_from = TRUE;
- memcpy (&src->rtcp_from, address, sizeof (GstNetAddress));
+ if (src->rtcp_from)
+ g_object_unref (src->rtcp_from);
+ src->rtcp_from = G_SOCKET_ADDRESS (g_object_ref (address));
}
static GstFlowReturn
gint32 diff;
gint clock_rate;
guint8 pt;
+ GstRTPBuffer rtp = { NULL };
/* get arrival time */
if ((running_time = arrival->running_time) == GST_CLOCK_TIME_NONE)
goto no_time;
- pt = gst_rtp_buffer_get_payload_type (buffer);
+ gst_rtp_buffer_map (buffer, GST_MAP_READ, &rtp);
+ pt = gst_rtp_buffer_get_payload_type (&rtp);
GST_LOG ("SSRC %08x got payload %d", src->ssrc, pt);
/* get clockrate */
- if ((clock_rate = get_clock_rate (src, pt)) == -1)
+ if ((clock_rate = get_clock_rate (src, pt)) == -1) {
+ gst_rtp_buffer_unmap (&rtp);
goto no_clock_rate;
+ }
- rtptime = gst_rtp_buffer_get_timestamp (buffer);
+ rtptime = gst_rtp_buffer_get_timestamp (&rtp);
/* convert arrival time to RTP timestamp units, truncate to 32 bits, we don't
* care about the absolute value, just the difference. */
GST_LOG ("rtparrival %u, rtptime %u, clock-rate %d, diff %d, jitter: %f",
rtparrival, rtptime, clock_rate, diff, (src->stats.jitter) / 16.0);
+ gst_rtp_buffer_unmap (&rtp);
return;
/* ERRORS */
guint16 seqnr, udelta;
RTPSourceStats *stats;
guint16 expected;
+ GstRTPBuffer rtp = { NULL };
g_return_val_if_fail (RTP_IS_SOURCE (src), GST_FLOW_ERROR);
g_return_val_if_fail (GST_IS_BUFFER (buffer), GST_FLOW_ERROR);
stats = &src->stats;
- seqnr = gst_rtp_buffer_get_seq (buffer);
-
- rtp_source_update_caps (src, GST_BUFFER_CAPS (buffer));
+ gst_rtp_buffer_map (buffer, GST_MAP_READ, &rtp);
+ seqnr = gst_rtp_buffer_get_seq (&rtp);
+ gst_rtp_buffer_unmap (&rtp);
if (stats->cycles == -1) {
GST_DEBUG ("received first buffer");
src->received_bye = TRUE;
}
-static GstBufferListItem
-set_ssrc (GstBuffer ** buffer, guint group, guint idx, RTPSource * src)
+static gboolean
+set_ssrc (GstBuffer ** buffer, guint idx, RTPSource * src)
{
+ GstRTPBuffer rtp = { NULL };
+
*buffer = gst_buffer_make_writable (*buffer);
- gst_rtp_buffer_set_ssrc (*buffer, src->ssrc);
- return GST_BUFFER_LIST_SKIP_GROUP;
+ gst_rtp_buffer_map (*buffer, GST_MAP_WRITE, &rtp);
+ gst_rtp_buffer_set_ssrc (&rtp, src->ssrc);
+ gst_rtp_buffer_unmap (&rtp);
+ return TRUE;
}
/**
GstBuffer *buffer = NULL;
guint packets;
guint32 ssrc;
+ GstRTPBuffer rtp = { NULL };
g_return_val_if_fail (RTP_IS_SOURCE (src), GST_FLOW_ERROR);
g_return_val_if_fail (is_list || GST_IS_BUFFER (data), GST_FLOW_ERROR);
/* We can grab the caps from the first group, since all
* groups of a buffer list have same caps. */
- buffer = gst_buffer_list_get (list, 0, 0);
+ buffer = gst_buffer_list_get (list, 0);
if (!buffer)
goto no_buffer;
} else {
buffer = GST_BUFFER_CAST (data);
}
- rtp_source_update_caps (src, GST_BUFFER_CAPS (buffer));
/* we are a sender now */
src->is_sender = TRUE;
if (is_list) {
+ gint i;
+
/* Each group makes up a network packet. */
- packets = gst_buffer_list_n_groups (list);
- len = gst_rtp_buffer_list_get_payload_len (list);
+ packets = gst_buffer_list_length (list);
+ for (i = 0, len = 0; i < packets; i++) {
+ gst_rtp_buffer_map (gst_buffer_list_get (list, i), GST_MAP_READ, &rtp);
+ len += gst_rtp_buffer_get_payload_len (&rtp);
+ gst_rtp_buffer_unmap (&rtp);
+ }
+ /* subsequent info taken from first list member */
+ gst_rtp_buffer_map (gst_buffer_list_get (list, 0), GST_MAP_READ, &rtp);
} else {
packets = 1;
- len = gst_rtp_buffer_get_payload_len (buffer);
+ gst_rtp_buffer_map (buffer, GST_MAP_READ, &rtp);
+ len = gst_rtp_buffer_get_payload_len (&rtp);
}
/* update stats for the SR */
do_bitrate_estimation (src, running_time, &src->bytes_sent);
- if (is_list) {
- rtptime = gst_rtp_buffer_list_get_timestamp (list);
- } else {
- rtptime = gst_rtp_buffer_get_timestamp (buffer);
- }
+ rtptime = gst_rtp_buffer_get_timestamp (&rtp);
ext_rtptime = src->last_rtptime;
ext_rtptime = gst_rtp_buffer_ext_timestamp (&ext_rtptime, rtptime);
src->last_rtptime = ext_rtptime;
/* push packet */
- if (!src->callbacks.push_rtp)
+ if (!src->callbacks.push_rtp) {
+ gst_rtp_buffer_unmap (&rtp);
goto no_callback;
-
- if (is_list) {
- ssrc = gst_rtp_buffer_list_get_ssrc (list);
- } else {
- ssrc = gst_rtp_buffer_get_ssrc (buffer);
}
+ ssrc = gst_rtp_buffer_get_ssrc (&rtp);
+ gst_rtp_buffer_unmap (&rtp);
+
if (ssrc != src->ssrc) {
/* the SSRC of the packet is not correct, make a writable buffer and
* update the SSRC. This could involve a complete copy of the packet when
list = gst_buffer_list_make_writable (list);
gst_buffer_list_foreach (list, (GstBufferListFunc) set_ssrc, src);
} else {
- set_ssrc (&buffer, 0, 0, src);
+ set_ssrc (&buffer, 0, src);
}
}
GST_LOG ("pushing RTP %s %" G_GUINT64_FORMAT, is_list ? "list" : "packet",
* Returns: TRUE if it was a known conflict, FALSE otherwise
*/
gboolean
-rtp_source_find_conflicting_address (RTPSource * src, GstNetAddress * address,
+rtp_source_find_conflicting_address (RTPSource * src, GSocketAddress * address,
GstClockTime time)
{
GList *item;
item; item = g_list_next (item)) {
RTPConflictingAddress *known_conflict = item->data;
- if (gst_netaddress_equal (address, &known_conflict->address)) {
+ if (__g_socket_address_equal (address, known_conflict->address)) {
known_conflict->time = time;
return TRUE;
}
*/
void
rtp_source_add_conflicting_address (RTPSource * src,
- GstNetAddress * address, GstClockTime time)
+ GSocketAddress * address, GstClockTime time)
{
RTPConflictingAddress *new_conflict;
new_conflict = g_new0 (RTPConflictingAddress, 1);
- memcpy (&new_conflict->address, address, sizeof (GstNetAddress));
+ new_conflict->address = G_SOCKET_ADDRESS (g_object_ref (address));
new_conflict->time = time;
src->conflicting_addresses = g_list_prepend (src->conflicting_addresses,
GList *next_item = g_list_next (item);
if (known_conflict->time < current_time - collision_timeout) {
- gchar buf[40];
+ gchar *buf;
src->conflicting_addresses =
g_list_delete_link (src->conflicting_addresses, item);
- gst_netaddress_to_string (&known_conflict->address, buf, 40);
+ buf = __g_socket_address_to_string (known_conflict->address);
GST_DEBUG ("collision %p timed out: %s", known_conflict, buf);
+ g_free (buf);
+ g_object_unref (known_conflict->address);
g_free (known_conflict);
}
item = next_item;
{
GstBuffer *buffer;
- buffer = gst_buffer_create_sub (packet->buffer, packet->offset,
- (gst_rtcp_packet_get_length (packet) + 1) * 4);
+ buffer = gst_buffer_copy_region (packet->rtcp->buffer, GST_BUFFER_COPY_MEMORY,
+ packet->offset, (gst_rtcp_packet_get_length (packet) + 1) * 4);
GST_BUFFER_TIMESTAMP (buffer) = running_time;
#include <gst/gst.h>
#include <gst/rtp/gstrtcpbuffer.h>
-#include <gst/netbuffer/gstnetbuffer.h>
+#include <gst/net/gstnetaddressmeta.h>
+#include <gio/gio.h>
#include "rtpstats.h"
/**
* RTPConflictingAddress:
- * @address: #GstNetAddress which conflicted
+ * @address: #GSocketAddress which conflicted
* @last_conflict_time: time when the last conflict was seen
*
* This structure is used to account for addresses that have conflicted to find
* loops.
*/
typedef struct {
- GstNetAddress address;
+ GSocketAddress *address;
GstClockTime time;
} RTPConflictingAddress;
gboolean received_bye;
gchar *bye_reason;
- gboolean have_rtp_from;
- GstNetAddress rtp_from;
- gboolean have_rtcp_from;
- GstNetAddress rtcp_from;
+ GSocketAddress *rtp_from;
+ GSocketAddress *rtcp_from;
gint payload;
GstCaps *caps;
gboolean rtp_source_set_sdes_struct (RTPSource * src, GstStructure *sdes);
/* handling network address */
-void rtp_source_set_rtp_from (RTPSource *src, GstNetAddress *address);
-void rtp_source_set_rtcp_from (RTPSource *src, GstNetAddress *address);
+void rtp_source_set_rtp_from (RTPSource *src, GSocketAddress *address);
+void rtp_source_set_rtcp_from (RTPSource *src, GSocketAddress *address);
/* handling RTP */
GstFlowReturn rtp_source_process_rtp (RTPSource *src, GstBuffer *buffer, RTPArrivalStats *arrival);
void rtp_source_reset (RTPSource * src);
gboolean rtp_source_find_conflicting_address (RTPSource * src,
- GstNetAddress *address,
+ GSocketAddress *address,
GstClockTime time);
void rtp_source_add_conflicting_address (RTPSource * src,
- GstNetAddress *address,
+ GSocketAddress *address,
GstClockTime time);
void rtp_source_timeout (RTPSource * src,
{
stats->min_interval = min_interval;
}
+
+gboolean
+__g_socket_address_equal (GSocketAddress * a, GSocketAddress * b)
+{
+ GInetSocketAddress *ia, *ib;
+ GInetAddress *iaa, *iab;
+
+ ia = G_INET_SOCKET_ADDRESS (a);
+ ib = G_INET_SOCKET_ADDRESS (b);
+
+ if (g_inet_socket_address_get_port (ia) !=
+ g_inet_socket_address_get_port (ib))
+ return FALSE;
+
+ iaa = g_inet_socket_address_get_address (ia);
+ iab = g_inet_socket_address_get_address (ib);
+
+ return g_inet_address_equal (iaa, iab);
+}
+
+gchar *
+__g_socket_address_to_string (GSocketAddress * addr)
+{
+ GInetSocketAddress *ia;
+ gchar *ret, *tmp;
+
+ ia = G_INET_SOCKET_ADDRESS (addr);
+
+ tmp = g_inet_address_to_string (g_inet_socket_address_get_address (ia));
+ ret = g_strdup_printf ("%s:%u", tmp, g_inet_socket_address_get_port (ia));
+ g_free (tmp);
+
+ return ret;
+}
#define __RTP_STATS_H__
#include <gst/gst.h>
-#include <gst/netbuffer/gstnetbuffer.h>
+#include <gst/net/gstnetaddressmeta.h>
+#include <gio/gio.h>
/**
* RTPSenderReport:
/**
* RTPArrivalStats:
+ * @address: address of the sender of the packet
* @current_time: current time according to the system clock
* @running_time: arrival time of a packet as buffer running_time
* @ntpnstime: arrival time of a packet NTP time in nanoseconds
- * @have_address: if the @address field contains a valid address
- * @address: address of the sender of the packet
* @bytes: bytes of the packet including lowlevel overhead
* @payload_len: bytes of the RTP payload
*
* Structure holding information about the arrival stats of a packet.
*/
typedef struct {
+ GSocketAddress *address;
GstClockTime current_time;
GstClockTime running_time;
guint64 ntpnstime;
- gboolean have_address;
- GstNetAddress address;
guint bytes;
guint payload_len;
} RTPArrivalStats;
void rtp_stats_set_min_interval (RTPSessionStats *stats,
gdouble min_interval);
+
+
+gboolean __g_socket_address_equal (GSocketAddress *a, GSocketAddress *b);
+gchar * __g_socket_address_to_string (GSocketAddress * addr);
+
#endif /* __RTP_STATS_H__ */
libgstrtsp_la_LIBADD = $(GST_PLUGINS_BASE_LIBS) $(GST_BASE_LIBS) \
-lgstinterfaces-@GST_MAJORMINOR@ \
-lgstrtp-@GST_MAJORMINOR@ -lgstrtsp-@GST_MAJORMINOR@ \
- -lgstsdp-@GST_MAJORMINOR@ $(GST_LIBS) $(WIN32_LIBS)
+ -lgstsdp-@GST_MAJORMINOR@ $(GST_LIBS)
libgstrtsp_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
libgstrtsp_la_LIBTOOLFLAGS = --tag=disable-static
/* GStreamer
- * Copyright (C) <2005,2006> Wim Taymans <wim@fluendo.com>
+ * Copyright (C) <2005,2006> Wim Taymans <wim.taymans@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
};
static GstStaticPadTemplate gst_rtp_dec_recv_rtp_sink_template =
-GST_STATIC_PAD_TEMPLATE ("recv_rtp_sink_%d",
+GST_STATIC_PAD_TEMPLATE ("recv_rtp_sink_%u",
GST_PAD_SINK,
GST_PAD_REQUEST,
GST_STATIC_CAPS ("application/x-rtp")
);
static GstStaticPadTemplate gst_rtp_dec_recv_rtcp_sink_template =
-GST_STATIC_PAD_TEMPLATE ("recv_rtcp_sink_%d",
+GST_STATIC_PAD_TEMPLATE ("recv_rtcp_sink_%u",
GST_PAD_SINK,
GST_PAD_REQUEST,
GST_STATIC_CAPS ("application/x-rtcp")
);
static GstStaticPadTemplate gst_rtp_dec_recv_rtp_src_template =
-GST_STATIC_PAD_TEMPLATE ("recv_rtp_src_%d_%d_%d",
+GST_STATIC_PAD_TEMPLATE ("recv_rtp_src_%u_%u_%u",
GST_PAD_SRC,
GST_PAD_SOMETIMES,
GST_STATIC_CAPS ("application/x-rtp")
);
static GstStaticPadTemplate gst_rtp_dec_rtcp_src_template =
-GST_STATIC_PAD_TEMPLATE ("rtcp_src_%d",
+GST_STATIC_PAD_TEMPLATE ("rtcp_src_%u",
GST_PAD_SRC,
GST_PAD_REQUEST,
GST_STATIC_CAPS ("application/x-rtcp")
static GstStateChangeReturn gst_rtp_dec_change_state (GstElement * element,
GstStateChange transition);
static GstPad *gst_rtp_dec_request_new_pad (GstElement * element,
- GstPadTemplate * templ, const gchar * name);
+ GstPadTemplate * templ, const gchar * name, const GstCaps * caps);
static void gst_rtp_dec_release_pad (GstElement * element, GstPad * pad);
-static GstFlowReturn gst_rtp_dec_chain_rtp (GstPad * pad, GstBuffer * buffer);
-static GstFlowReturn gst_rtp_dec_chain_rtcp (GstPad * pad, GstBuffer * buffer);
+static GstFlowReturn gst_rtp_dec_chain_rtp (GstPad * pad, GstObject * parent,
+ GstBuffer * buffer);
+static GstFlowReturn gst_rtp_dec_chain_rtcp (GstPad * pad, GstObject * parent,
+ GstBuffer * buffer);
/* Manages the receiving end of the packets.
static guint gst_rtp_dec_signals[LAST_SIGNAL] = { 0 };
-GST_BOILERPLATE (GstRTPDec, gst_rtp_dec, GstElement, GST_TYPE_ELEMENT);
+#define gst_rtp_dec_parent_class parent_class
+G_DEFINE_TYPE (GstRTPDec, gst_rtp_dec, GST_TYPE_ELEMENT);
-static void
-gst_rtp_dec_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- /* sink pads */
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_dec_recv_rtp_sink_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_dec_recv_rtcp_sink_template);
- /* src pads */
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_dec_recv_rtp_src_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_rtp_dec_rtcp_src_template);
-
- gst_element_class_set_details_simple (element_class, "RTP Decoder",
- "Codec/Parser/Network",
- "Accepts raw RTP and RTCP packets and sends them forward",
- "Wim Taymans <wim@fluendo.com>");
-}
/* BOXED:UINT,UINT */
#define g_marshal_value_peek_uint(v) g_value_get_uint (v)
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
+ GST_DEBUG_CATEGORY_INIT (rtpdec_debug, "rtpdec", 0, "RTP decoder");
+
gobject_class->finalize = gst_rtp_dec_finalize;
gobject_class->set_property = gst_rtp_dec_set_property;
gobject_class->get_property = gst_rtp_dec_get_property;
NULL, NULL, gst_rtp_dec_marshal_VOID__UINT_UINT, G_TYPE_NONE, 2,
G_TYPE_UINT, G_TYPE_UINT);
-
gstelement_class->provide_clock =
GST_DEBUG_FUNCPTR (gst_rtp_dec_provide_clock);
gstelement_class->change_state = GST_DEBUG_FUNCPTR (gst_rtp_dec_change_state);
GST_DEBUG_FUNCPTR (gst_rtp_dec_request_new_pad);
gstelement_class->release_pad = GST_DEBUG_FUNCPTR (gst_rtp_dec_release_pad);
- GST_DEBUG_CATEGORY_INIT (rtpdec_debug, "rtpdec", 0, "RTP decoder");
+ /* sink pads */
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_dec_recv_rtp_sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_dec_recv_rtcp_sink_template));
+ /* src pads */
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_dec_recv_rtp_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_dec_rtcp_src_template));
+
+ gst_element_class_set_details_simple (gstelement_class, "RTP Decoder",
+ "Codec/Parser/Network",
+ "Accepts raw RTP and RTCP packets and sends them forward",
+ "Wim Taymans <wim.taymans@gmail.com>");
}
static void
-gst_rtp_dec_init (GstRTPDec * rtpdec, GstRTPDecClass * klass)
+gst_rtp_dec_init (GstRTPDec * rtpdec)
{
rtpdec->provided_clock = gst_system_clock_obtain ();
rtpdec->latency = DEFAULT_LATENCY_MS;
+
+ GST_OBJECT_FLAG_SET (rtpdec, GST_ELEMENT_FLAG_PROVIDE_CLOCK);
}
static void
}
static gboolean
-gst_rtp_dec_query_src (GstPad * pad, GstQuery * query)
+gst_rtp_dec_query_src (GstPad * pad, GstObject * parent, GstQuery * query)
{
gboolean res;
break;
}
default:
- res = gst_pad_query_default (pad, query);
+ res = gst_pad_query_default (pad, parent, query);
break;
}
return res;
}
static GstFlowReturn
-gst_rtp_dec_chain_rtp (GstPad * pad, GstBuffer * buffer)
+gst_rtp_dec_chain_rtp (GstPad * pad, GstObject * parent, GstBuffer * buffer)
{
GstFlowReturn res;
GstRTPDec *rtpdec;
GstRTPDecSession *session;
guint32 ssrc;
guint8 pt;
+ GstRTPBuffer rtp = { NULL, };
- rtpdec = GST_RTP_DEC (GST_PAD_PARENT (pad));
+ rtpdec = GST_RTP_DEC (parent);
GST_DEBUG_OBJECT (rtpdec, "got rtp packet");
if (!gst_rtp_buffer_validate (buffer))
goto bad_packet;
- ssrc = gst_rtp_buffer_get_ssrc (buffer);
- pt = gst_rtp_buffer_get_payload_type (buffer);
+
+ gst_rtp_buffer_map (buffer, GST_MAP_READ, &rtp);
+ ssrc = gst_rtp_buffer_get_ssrc (&rtp);
+ pt = gst_rtp_buffer_get_payload_type (&rtp);
+ gst_rtp_buffer_unmap (&rtp);
GST_DEBUG_OBJECT (rtpdec, "SSRC %08x, PT %d", ssrc, pt);
caps = (GstCaps *) g_value_get_boxed (&ret);
- name = g_strdup_printf ("recv_rtp_src_%d_%u_%d", session->id, ssrc, pt);
+ name = g_strdup_printf ("recv_rtp_src_%u_%u_%u", session->id, ssrc, pt);
klass = GST_ELEMENT_GET_CLASS (rtpdec);
- templ = gst_element_class_get_pad_template (klass, "recv_rtp_src_%d_%d_%d");
+ templ = gst_element_class_get_pad_template (klass, "recv_rtp_src_%u_%u_%u");
session->recv_rtp_src = gst_pad_new_from_template (templ, name);
g_free (name);
- gst_pad_set_caps (session->recv_rtp_src, caps);
+ gst_pad_push_event (session->recv_rtp_src, gst_event_new_caps (caps));
gst_pad_set_element_private (session->recv_rtp_src, session);
gst_pad_set_query_function (session->recv_rtp_src, gst_rtp_dec_query_src);
session->active = TRUE;
}
- gst_buffer_set_caps (buffer, GST_PAD_CAPS (session->recv_rtp_src));
-
res = gst_pad_push (session->recv_rtp_src, buffer);
return res;
}
static GstFlowReturn
-gst_rtp_dec_chain_rtcp (GstPad * pad, GstBuffer * buffer)
+gst_rtp_dec_chain_rtcp (GstPad * pad, GstObject * parent, GstBuffer * buffer)
{
GstRTPDec *src;
gboolean more;
#endif
- src = GST_RTP_DEC (GST_PAD_PARENT (pad));
+ src = GST_RTP_DEC (parent);
GST_DEBUG_OBJECT (src, "got rtcp packet");
GstRTPDecSession *session;
/* first get the session number */
- if (name == NULL || sscanf (name, "recv_rtp_sink_%d", &sessid) != 1)
+ if (name == NULL || sscanf (name, "recv_rtp_sink_%u", &sessid) != 1)
goto no_name;
GST_DEBUG_OBJECT (rtpdec, "finding session %d", sessid);
GstRTPDecSession *session;
/* first get the session number */
- if (name == NULL || sscanf (name, "recv_rtcp_sink_%d", &sessid) != 1)
+ if (name == NULL || sscanf (name, "recv_rtcp_sink_%u", &sessid) != 1)
goto no_name;
GST_DEBUG_OBJECT (rtpdec, "finding session %d", sessid);
GstRTPDecSession *session;
/* first get the session number */
- if (name == NULL || sscanf (name, "rtcp_src_%d", &sessid) != 1)
+ if (name == NULL || sscanf (name, "rtcp_src_%u", &sessid) != 1)
goto no_name;
/* get or create session */
*/
static GstPad *
gst_rtp_dec_request_new_pad (GstElement * element,
- GstPadTemplate * templ, const gchar * name)
+ GstPadTemplate * templ, const gchar * name, const GstCaps * caps)
{
GstRTPDec *rtpdec;
GstElementClass *klass;
klass = GST_ELEMENT_GET_CLASS (element);
/* figure out the template */
- if (templ == gst_element_class_get_pad_template (klass, "recv_rtp_sink_%d")) {
+ if (templ == gst_element_class_get_pad_template (klass, "recv_rtp_sink_%u")) {
result = create_recv_rtp (rtpdec, templ, name);
} else if (templ == gst_element_class_get_pad_template (klass,
- "recv_rtcp_sink_%d")) {
+ "recv_rtcp_sink_%u")) {
result = create_recv_rtcp (rtpdec, templ, name);
- } else if (templ == gst_element_class_get_pad_template (klass, "rtcp_src_%d")) {
+ } else if (templ == gst_element_class_get_pad_template (klass, "rtcp_src_%u")) {
result = create_rtcp (rtpdec, templ, name);
} else
goto wrong_template;
GST_DEBUG_CATEGORY_INIT (rtspext_debug, "rtspext", 0, "RTSP extension");
/* get a list of all extensions */
- extensions = gst_registry_feature_filter (gst_registry_get_default (),
+ extensions = gst_registry_feature_filter (gst_registry_get (),
(GstPluginFeatureFilter) gst_rtsp_ext_list_filter, FALSE, NULL);
}
#include "config.h"
#endif
-/* FIXME 0.11: suppress warnings for deprecated API such as GStaticRecMutex
- * with newer GLib versions (>= 2.31.0) */
-#define GLIB_DISABLE_DEPRECATION_WARNINGS
-
#ifdef HAVE_UNISTD_H
#include <unistd.h>
#endif /* HAVE_UNISTD_H */
#include "gstrtspsrc.h"
-#ifdef G_OS_WIN32
-#include <winsock2.h>
-#endif
-
GST_DEBUG_CATEGORY_STATIC (rtspsrc_debug);
#define GST_CAT_DEFAULT (rtspsrc_debug)
-static GstStaticPadTemplate rtptemplate = GST_STATIC_PAD_TEMPLATE ("stream%d",
+static GstStaticPadTemplate rtptemplate = GST_STATIC_PAD_TEMPLATE ("stream_%u",
GST_PAD_SRC,
GST_PAD_SOMETIMES,
GST_STATIC_CAPS ("application/x-rtp; application/x-rdt"));
/* templates used internally */
static GstStaticPadTemplate anysrctemplate =
-GST_STATIC_PAD_TEMPLATE ("internalsrc%d",
+GST_STATIC_PAD_TEMPLATE ("internalsrc_%u",
GST_PAD_SRC,
GST_PAD_SOMETIMES,
GST_STATIC_CAPS_ANY);
static GstStaticPadTemplate anysinktemplate =
-GST_STATIC_PAD_TEMPLATE ("internalsink%d",
+GST_STATIC_PAD_TEMPLATE ("internalsink_%u",
GST_PAD_SINK,
GST_PAD_SOMETIMES,
GST_STATIC_CAPS_ANY);
static gboolean gst_rtspsrc_setup_auth (GstRTSPSrc * src,
GstRTSPMessage * response);
-static void gst_rtspsrc_loop_send_cmd (GstRTSPSrc * src, gint cmd,
- gboolean flush);
+static void gst_rtspsrc_loop_send_cmd (GstRTSPSrc * src, gint cmd);
static GstRTSPResult gst_rtspsrc_send_cb (GstRTSPExtension * ext,
GstRTSPMessage * request, GstRTSPMessage * response, GstRTSPSrc * src);
gboolean only_close);
static gboolean gst_rtspsrc_uri_set_uri (GstURIHandler * handler,
- const gchar * uri);
+ const gchar * uri, GError ** error);
static gboolean gst_rtspsrc_activate_streams (GstRTSPSrc * src);
static gboolean gst_rtspsrc_loop (GstRTSPSrc * src);
} G_STMT_END
/*static guint gst_rtspsrc_signals[LAST_SIGNAL] = { 0 }; */
-
-static void
-_do_init (GType rtspsrc_type)
-{
- static const GInterfaceInfo urihandler_info = {
- gst_rtspsrc_uri_handler_init,
- NULL,
- NULL
- };
-
- GST_DEBUG_CATEGORY_INIT (rtspsrc_debug, "rtspsrc", 0, "RTSP src");
-
- g_type_add_interface_static (rtspsrc_type, GST_TYPE_URI_HANDLER,
- &urihandler_info);
-}
-
-GST_BOILERPLATE_FULL (GstRTSPSrc, gst_rtspsrc, GstBin, GST_TYPE_BIN, _do_init);
-
-static void
-gst_rtspsrc_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_static_pad_template (element_class, &rtptemplate);
-
- gst_element_class_set_details_simple (element_class, "RTSP packet receiver",
- "Source/Network",
- "Receive data over the network via RTSP (RFC 2326)",
- "Wim Taymans <wim@fluendo.com>, "
- "Thijs Vermeir <thijs.vermeir@barco.com>, "
- "Lutz Mueller <lutz@topfrose.de>");
-}
+#define gst_rtspsrc_parent_class parent_class
+G_DEFINE_TYPE_WITH_CODE (GstRTSPSrc, gst_rtspsrc, GST_TYPE_BIN,
+ G_IMPLEMENT_INTERFACE (GST_TYPE_URI_HANDLER, gst_rtspsrc_uri_handler_init));
static void
gst_rtspsrc_class_init (GstRTSPSrcClass * klass)
gstelement_class = (GstElementClass *) klass;
gstbin_class = (GstBinClass *) klass;
+ GST_DEBUG_CATEGORY_INIT (rtspsrc_debug, "rtspsrc", 0, "RTSP src");
+
gobject_class->set_property = gst_rtspsrc_set_property;
gobject_class->get_property = gst_rtspsrc_get_property;
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_CONNECTION_SPEED,
- g_param_spec_uint ("connection-speed", "Connection Speed",
+ g_param_spec_uint64 ("connection-speed", "Connection Speed",
"Network connection speed in kbps (0 = unknown)",
- 0, G_MAXINT / 1000, DEFAULT_CONNECTION_SPEED,
+ 0, G_MAXUINT64 / 1000, DEFAULT_CONNECTION_SPEED,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_NAT_METHOD,
gstelement_class->send_event = gst_rtspsrc_send_event;
gstelement_class->change_state = gst_rtspsrc_change_state;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&rtptemplate));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTSP packet receiver", "Source/Network",
+ "Receive data over the network via RTSP (RFC 2326)",
+ "Wim Taymans <wim@fluendo.com>, "
+ "Thijs Vermeir <thijs.vermeir@barco.com>, "
+ "Lutz Mueller <lutz@topfrose.de>");
+
gstbin_class->handle_message = gst_rtspsrc_handle_message;
gst_rtsp_ext_list_init ();
static void
-gst_rtspsrc_init (GstRTSPSrc * src, GstRTSPSrcClass * g_class)
+gst_rtspsrc_init (GstRTSPSrc * src)
{
-#ifdef G_OS_WIN32
- WSADATA wsa_data;
-
- if (WSAStartup (MAKEWORD (2, 2), &wsa_data) != 0) {
- GST_ERROR_OBJECT (src, "WSAStartup failed: 0x%08x", WSAGetLastError ());
- }
-#endif
-
src->conninfo.location = g_strdup (DEFAULT_LOCATION);
src->protocols = DEFAULT_PROTOCOLS;
src->debug = DEFAULT_DEBUG;
/* protects the streaming thread in interleaved mode or the polling
* thread in UDP mode. */
- src->stream_rec_lock = g_new (GStaticRecMutex, 1);
- g_static_rec_mutex_init (src->stream_rec_lock);
+ g_rec_mutex_init (&src->stream_rec_lock);
/* protects our state changes from multiple invocations */
- src->state_rec_lock = g_new (GStaticRecMutex, 1);
- g_static_rec_mutex_init (src->state_rec_lock);
+ g_rec_mutex_init (&src->state_rec_lock);
src->state = GST_RTSP_STATE_INVALID;
- GST_OBJECT_FLAG_SET (src, GST_ELEMENT_IS_SOURCE);
+ GST_OBJECT_FLAG_SET (src, GST_ELEMENT_FLAG_SOURCE);
}
static void
}
/* free locks */
- g_static_rec_mutex_free (rtspsrc->stream_rec_lock);
- g_free (rtspsrc->stream_rec_lock);
- g_static_rec_mutex_free (rtspsrc->state_rec_lock);
- g_free (rtspsrc->state_rec_lock);
-
-#ifdef G_OS_WIN32
- WSACleanup ();
-#endif
+ g_rec_mutex_clear (&rtspsrc->stream_rec_lock);
+ g_rec_mutex_clear (&rtspsrc->state_rec_lock);
G_OBJECT_CLASS (parent_class)->finalize (object);
}
switch (prop_id) {
case PROP_LOCATION:
gst_rtspsrc_uri_set_uri (GST_URI_HANDLER (rtspsrc),
- g_value_get_string (value));
+ g_value_get_string (value), NULL);
break;
case PROP_PROTOCOLS:
rtspsrc->protocols = g_value_get_flags (value);
rtspsrc->latency = g_value_get_uint (value);
break;
case PROP_CONNECTION_SPEED:
- rtspsrc->connection_speed = g_value_get_uint (value);
+ rtspsrc->connection_speed = g_value_get_uint64 (value);
break;
case PROP_NAT_METHOD:
rtspsrc->nat_method = g_value_get_enum (value);
g_value_set_uint (value, rtspsrc->latency);
break;
case PROP_CONNECTION_SPEED:
- g_value_set_uint (value, rtspsrc->connection_speed);
+ g_value_set_uint64 (value, rtspsrc->connection_speed);
break;
case PROP_NAT_METHOD:
g_value_set_enum (value, rtspsrc->nat_method);
/* we keep these elements, we configure all in configure_transport when the
* server told us to really use the UDP ports. */
- stream->udpsrc[0] = gst_object_ref (udpsrc0);
- stream->udpsrc[1] = gst_object_ref (udpsrc1);
+ stream->udpsrc[0] = gst_object_ref_sink (udpsrc0);
+ stream->udpsrc[1] = gst_object_ref_sink (udpsrc1);
/* keep track of next available port number when we have a range
* configured */
if (src->next_port_num != 0)
src->next_port_num = tmp_rtcp + 1;
- /* they are ours now */
- gst_object_sink (udpsrc0);
- gst_object_sink (udpsrc1);
-
return TRUE;
/* ERRORS */
cmd = CMD_WAIT;
state = GST_STATE_PAUSED;
} else {
- event = gst_event_new_flush_stop ();
+ event = gst_event_new_flush_stop (TRUE);
GST_DEBUG_OBJECT (src, "stop flush; playing %d", playing);
cmd = CMD_LOOP;
if (playing)
}
}
gst_rtspsrc_push_event (src, event, FALSE);
- gst_rtspsrc_loop_send_cmd (src, cmd, flush);
+ gst_rtspsrc_loop_send_cmd (src, cmd);
/* set up manager before data-flow resumes */
/* to manage jitterbuffer buffer mode */
* right values in the segment to perform the seek */
if (event) {
GST_DEBUG_OBJECT (src, "configuring seek");
- gst_segment_set_seek (&seeksegment, rate, format, flags,
+ gst_segment_do_seek (&seeksegment, rate, format, flags,
cur_type, cur, stop_type, stop, &update);
}
/* if we started flush, we stop now */
GST_DEBUG_OBJECT (src, "stopping flush");
gst_rtspsrc_flush (src, FALSE, playing);
- } else if (src->running) {
- /* re-engage loop */
- gst_rtspsrc_loop_send_cmd (src, CMD_LOOP, FALSE);
-
- /* we are running the current segment and doing a non-flushing seek,
- * close the segment first based on the previous last_stop. */
- GST_DEBUG_OBJECT (src, "closing running segment %" G_GINT64_FORMAT
- " to %" G_GINT64_FORMAT, src->segment.accum, src->segment.last_stop);
-
- /* queue the segment for sending in the stream thread */
- if (src->close_segment)
- gst_event_unref (src->close_segment);
- src->close_segment = gst_event_new_new_segment (TRUE,
- src->segment.rate, src->segment.format,
- src->segment.accum, src->segment.last_stop, src->segment.accum);
-
- /* keep track of our last_stop */
- seeksegment.accum = src->segment.last_stop;
}
/* now we did the seek and can activate the new segment values */
if (src->segment.flags & GST_SEEK_FLAG_SEGMENT) {
gst_element_post_message (GST_ELEMENT_CAST (src),
gst_message_new_segment_start (GST_OBJECT_CAST (src),
- src->segment.format, src->segment.last_stop));
+ src->segment.format, src->segment.position));
}
/* now create the newsegment */
GST_DEBUG_OBJECT (src, "Creating newsegment from %" G_GINT64_FORMAT
- " to %" G_GINT64_FORMAT, src->segment.last_stop, stop);
+ " to %" G_GINT64_FORMAT, src->segment.position, stop);
/* store the newsegment event so it can be sent from the streaming thread. */
if (src->start_segment)
gst_event_unref (src->start_segment);
- src->start_segment =
- gst_event_new_new_segment (FALSE, src->segment.rate,
- src->segment.format, src->segment.last_stop, stop,
- src->segment.last_stop);
+ src->start_segment = gst_event_new_segment (&src->segment);
/* mark discont */
GST_DEBUG_OBJECT (src, "mark DISCONT, we did a seek to another position");
}
static gboolean
-gst_rtspsrc_handle_src_event (GstPad * pad, GstEvent * event)
+gst_rtspsrc_handle_src_event (GstPad * pad, GstObject * parent,
+ GstEvent * event)
{
GstRTSPSrc *src;
gboolean res = TRUE;
gboolean forward;
- src = GST_RTSPSRC_CAST (gst_pad_get_parent (pad));
+ src = GST_RTSPSRC_CAST (parent);
GST_DEBUG_OBJECT (src, "pad %s:%s received event %s",
GST_DEBUG_PAD_NAME (pad), GST_EVENT_TYPE_NAME (event));
} else {
gst_event_unref (event);
}
- gst_object_unref (src);
return res;
}
/* this is the final event function we receive on the internal source pad when
* we deal with TCP connections */
static gboolean
-gst_rtspsrc_handle_internal_src_event (GstPad * pad, GstEvent * event)
+gst_rtspsrc_handle_internal_src_event (GstPad * pad, GstObject * parent,
+ GstEvent * event)
{
gboolean res;
/* this is the final query function we receive on the internal source pad when
* we deal with TCP connections */
static gboolean
-gst_rtspsrc_handle_internal_src_query (GstPad * pad, GstQuery * query)
+gst_rtspsrc_handle_internal_src_query (GstPad * pad, GstObject * parent,
+ GstQuery * query)
{
GstRTSPSrc *src;
gboolean res = TRUE;
/* this query is executed on the ghost source pad exposed on rtspsrc. */
static gboolean
-gst_rtspsrc_handle_src_query (GstPad * pad, GstQuery * query)
+gst_rtspsrc_handle_src_query (GstPad * pad, GstObject * parent,
+ GstQuery * query)
{
GstRTSPSrc *src;
gboolean res = FALSE;
- src = GST_RTSPSRC_CAST (gst_pad_get_parent (pad));
+ src = GST_RTSPSRC_CAST (parent);
GST_DEBUG_OBJECT (src, "pad %s:%s received query %s",
GST_DEBUG_PAD_NAME (pad), GST_QUERY_TYPE_NAME (query));
break;
}
}
- gst_object_unref (src);
return res;
}
/* callback for RTCP messages to be sent to the server when operating in TCP
* mode. */
static GstFlowReturn
-gst_rtspsrc_sink_chain (GstPad * pad, GstBuffer * buffer)
+gst_rtspsrc_sink_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer)
{
GstRTSPSrc *src;
GstRTSPStream *stream;
GstFlowReturn res = GST_FLOW_OK;
+ GstMapInfo map;
guint8 *data;
guint size;
GstRTSPResult ret;
stream = (GstRTSPStream *) gst_pad_get_element_private (pad);
src = stream->parent;
- data = GST_BUFFER_DATA (buffer);
- size = GST_BUFFER_SIZE (buffer);
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ size = map.size;
+ data = map.data;
gst_rtsp_message_init_data (&message, stream->channel[1]);
gst_rtsp_message_steal_body (&message, &data, &size);
gst_rtsp_message_unset (&message);
+ gst_buffer_unmap (buffer, &map);
gst_buffer_unref (buffer);
return res;
}
-static void
-pad_unblocked (GstPad * pad, gboolean blocked, GstRTSPSrc * src)
+static GstPadProbeReturn
+pad_blocked (GstPad * pad, GstPadProbeInfo * info, gpointer user_data)
{
- GST_DEBUG_OBJECT (src, "pad %s:%s unblocked", GST_DEBUG_PAD_NAME (pad));
-}
+ GstRTSPSrc *src = user_data;
-static void
-pad_blocked (GstPad * pad, gboolean blocked, GstRTSPSrc * src)
-{
GST_DEBUG_OBJECT (src, "pad %s:%s blocked, activating streams",
GST_DEBUG_PAD_NAME (pad));
gst_rtspsrc_activate_streams (src);
- return;
+ return GST_PAD_PROBE_OK;
was_ok:
{
GST_OBJECT_UNLOCK (src);
- return;
+ return GST_PAD_PROBE_OK;
}
}
GST_RTSP_STATE_LOCK (src);
/* find stream */
name = gst_object_get_name (GST_OBJECT_CAST (pad));
- if (sscanf (name, "recv_rtp_src_%d_%d_%d", &id, &ssrc, &pt) != 3)
+ if (sscanf (name, "recv_rtp_src_%u_%u_%u", &id, &ssrc, &pt) != 3)
goto unknown_stream;
GST_DEBUG_OBJECT (src, "stream: %u, SSRC %d, PT %d", id, ssrc, pt);
/* we stream directly to the manager, get some pads. Each RTSP stream goes
* into a separate RTP session. */
- name = g_strdup_printf ("recv_rtp_sink_%d", stream->id);
+ name = g_strdup_printf ("recv_rtp_sink_%u", stream->id);
stream->channelpad[0] = gst_element_get_request_pad (src->manager, name);
g_free (name);
- name = g_strdup_printf ("recv_rtcp_sink_%d", stream->id);
+ name = g_strdup_printf ("recv_rtcp_sink_%u", stream->id);
stream->channelpad[1] = gst_element_get_request_pad (src->manager, name);
g_free (name);
GST_DEBUG_OBJECT (src, "no manager, creating pad");
/* create a new pad we will use to stream to */
- name = g_strdup_printf ("stream%d", stream->id);
+ name = g_strdup_printf ("stream_%u", stream->id);
template = gst_static_pad_template_get (&rtptemplate);
stream->channelpad[0] = gst_pad_new_from_template (template, name);
gst_object_unref (template);
template = gst_static_pad_template_get (&anysrctemplate);
/* allocate pads for sending the channel data into the manager */
- pad0 = gst_pad_new_from_template (template, "internalsrc0");
+ pad0 = gst_pad_new_from_template (template, "internalsrc_0");
gst_pad_link (pad0, stream->channelpad[0]);
gst_object_unref (stream->channelpad[0]);
stream->channelpad[0] = pad0;
if (stream->channelpad[1]) {
/* if we have a sinkpad for the other channel, create a pad and link to the
* manager. */
- pad1 = gst_pad_new_from_template (template, "internalsrc1");
+ pad1 = gst_pad_new_from_template (template, "internalsrc_1");
gst_pad_set_event_function (pad1, gst_rtspsrc_handle_internal_src_event);
gst_pad_link (pad1, stream->channelpad[1]);
gst_object_unref (stream->channelpad[1]);
template = gst_static_pad_template_get (&anysinktemplate);
- stream->rtcppad = gst_pad_new_from_template (template, "internalsink0");
+ stream->rtcppad = gst_pad_new_from_template (template, "internalsink_0");
gst_pad_set_chain_function (stream->rtcppad, gst_rtspsrc_sink_chain);
gst_pad_set_element_private (stream->rtcppad, stream);
gst_pad_set_active (stream->rtcppad, TRUE);
/* get session RTCP pad */
- name = g_strdup_printf ("send_rtcp_src_%d", stream->id);
+ name = g_strdup_printf ("send_rtcp_src_%u", stream->id);
pad = gst_element_get_request_pad (src->manager, name);
g_free (name);
goto no_element;
/* take ownership */
- gst_object_ref (stream->udpsrc[0]);
- gst_object_sink (stream->udpsrc[0]);
+ gst_object_ref_sink (stream->udpsrc[0]);
/* change state */
gst_element_set_state (stream->udpsrc[0], GST_STATE_PAUSED);
goto no_element;
/* take ownership */
- gst_object_ref (stream->udpsrc[1]);
- gst_object_sink (stream->udpsrc[1]);
+ gst_object_ref_sink (stream->udpsrc[1]);
gst_element_set_state (stream->udpsrc[1], GST_STATE_PAUSED);
}
/* configure pad block on the pad. As soon as there is dataflow on the
* UDP source, we know that UDP is not blocked by a firewall and we can
* configure all the streams to let the application autoplug decoders. */
- gst_pad_set_blocked_async (stream->blockedpad, TRUE,
- (GstPadBlockCallback) pad_blocked, src);
+ stream->blockid =
+ gst_pad_add_probe (stream->blockedpad,
+ GST_PAD_PROBE_TYPE_BLOCK_DOWNSTREAM, pad_blocked, src, NULL);
if (stream->channelpad[0]) {
GST_DEBUG_OBJECT (src, "connecting UDP source 0 to manager");
GstRTSPStream * stream, GstRTSPTransport * transport)
{
GstPad *pad;
- gint rtp_port, rtcp_port, sockfd = -1;
+ gint rtp_port, rtcp_port;
gboolean do_rtp, do_rtcp;
const gchar *destination;
gchar *uri, *name;
guint ttl = 0;
+ GSocket *socket;
/* get transport info */
gst_rtspsrc_get_transport_info (src, stream, transport, &destination,
if (stream->udpsrc[0]) {
/* configure socket, we give it the same UDP socket as the udpsrc for RTP
* so that NAT firewalls will open a hole for us */
- g_object_get (G_OBJECT (stream->udpsrc[0]), "sock", &sockfd, NULL);
- GST_DEBUG_OBJECT (src, "RTP UDP src has sock %d", sockfd);
+ g_object_get (G_OBJECT (stream->udpsrc[0]), "used-socket", &socket, NULL);
+ GST_DEBUG_OBJECT (src, "RTP UDP src has sock %p", socket);
/* configure socket and make sure udpsink does not close it when shutting
* down, it belongs to udpsrc after all. */
- g_object_set (G_OBJECT (stream->udpsink[0]), "sockfd", sockfd,
- "closefd", FALSE, NULL);
+ g_object_set (G_OBJECT (stream->udpsink[0]), "socket", socket,
+ "close-socket", FALSE, NULL);
+ g_object_unref (socket);
}
/* the source for the dummy packets to open up NAT */
"sizetype", 2, "sizemax", 200, "silent", TRUE, NULL);
/* we don't want to consider this a sink */
- GST_OBJECT_FLAG_UNSET (stream->udpsink[0], GST_ELEMENT_IS_SINK);
+ GST_OBJECT_FLAG_UNSET (stream->udpsink[0], GST_ELEMENT_FLAG_SINK);
/* keep everything locked */
gst_element_set_locked_state (stream->udpsink[0], TRUE);
/* configure socket, we give it the same UDP socket as the udpsrc for RTCP
* because some servers check the port number of where it sends RTCP to identify
* the RTCP packets it receives */
- g_object_get (G_OBJECT (stream->udpsrc[1]), "sock", &sockfd, NULL);
- GST_DEBUG_OBJECT (src, "RTCP UDP src has sock %d", sockfd);
+ g_object_get (G_OBJECT (stream->udpsrc[1]), "used-socket", &socket, NULL);
+ GST_DEBUG_OBJECT (src, "RTCP UDP src has sock %p", socket);
/* configure socket and make sure udpsink does not close it when shutting
* down, it belongs to udpsrc after all. */
- g_object_set (G_OBJECT (stream->udpsink[1]), "sockfd", sockfd,
- "closefd", FALSE, NULL);
+ g_object_set (G_OBJECT (stream->udpsink[1]), "socket", socket,
+ "close-socket", FALSE, NULL);
+ g_object_unref (socket);
}
/* we don't want to consider this a sink */
- GST_OBJECT_FLAG_UNSET (stream->udpsink[1], GST_ELEMENT_IS_SINK);
+ GST_OBJECT_FLAG_UNSET (stream->udpsink[1], GST_ELEMENT_FLAG_SINK);
/* we keep this playing always */
gst_element_set_locked_state (stream->udpsink[1], TRUE);
stream->rtcppad = gst_element_get_static_pad (stream->udpsink[1], "sink");
/* get session RTCP pad */
- name = g_strdup_printf ("send_rtcp_src_%d", stream->id);
+ name = g_strdup_printf ("send_rtcp_src_%u", stream->id);
pad = gst_element_get_request_pad (src->manager, name);
g_free (name);
/* create ghostpad, don't add just yet, this will be done when we activate
* the stream. */
- name = g_strdup_printf ("stream%d", stream->id);
+ name = g_strdup_printf ("stream_%u", stream->id);
template = gst_static_pad_template_get (&rtptemplate);
stream->srcpad = gst_ghost_pad_new_from_template (name, outpad, template);
gst_pad_set_event_function (stream->srcpad, gst_rtspsrc_handle_src_event);
for (walk = src->streams; walk; walk = g_list_next (walk)) {
GstRTSPStream *stream = (GstRTSPStream *) walk->data;
- if (stream->blockedpad) {
+ if (stream->blockid) {
GST_DEBUG_OBJECT (src, "unblocking stream pad %p", stream);
- gst_pad_set_blocked_async (stream->blockedpad, FALSE,
- (GstPadBlockCallback) pad_unblocked, src);
- stream->blockedpad = NULL;
+ gst_pad_remove_probe (stream->blockedpad, stream->blockid);
+ stream->blockid = 0;
}
}
GST_DEBUG_OBJECT (src, "configuring stream caps");
- start = segment->last_stop;
+ start = segment->position;
stop = segment->duration;
play_speed = segment->rate;
play_scale = segment->applied_rate;
size -= 1;
buf = gst_buffer_new ();
- GST_BUFFER_DATA (buf) = data;
- GST_BUFFER_MALLOCDATA (buf) = data;
- GST_BUFFER_SIZE (buf) = size;
+ gst_buffer_take_memory (buf, -1,
+ gst_memory_new_wrapped (0, data, g_free, size, 0, size));
/* don't need message anymore */
gst_rtsp_message_unset (&message);
src->need_activate = FALSE;
}
- if (!src->manager) {
- /* set stream caps on buffer when we don't have a session manager to do it
- * for us */
- gst_buffer_set_caps (buf, stream->caps);
- }
-
if (src->base_time == -1) {
/* Take current running_time. This timestamp will be put on
* the first buffer of each stream because we are a live source and so we
("The server closed the connection."));
src->conninfo.connected = FALSE;
gst_rtsp_message_unset (&message);
- return GST_FLOW_UNEXPECTED;
+ return GST_FLOW_EOS;
}
interrupt:
{
gst_rtsp_message_unset (&message);
GST_DEBUG_OBJECT (src, "got interrupted: stop connection flush");
gst_rtspsrc_connection_flush (src, FALSE);
- return GST_FLOW_WRONG_STATE;
+ return GST_FLOW_FLUSHING;
}
receive_error:
{
gst_rtsp_message_unset (&message);
GST_DEBUG_OBJECT (src, "got interrupted: stop connection flush");
gst_rtspsrc_connection_flush (src, FALSE);
- return GST_FLOW_WRONG_STATE;
+ return GST_FLOW_FLUSHING;
}
connect_error:
{
g_free (str);
ret = GST_FLOW_ERROR;
} else {
- ret = GST_FLOW_WRONG_STATE;
+ ret = GST_FLOW_FLUSHING;
}
return ret;
}
g_free (str);
ret = GST_FLOW_ERROR;
} else {
- ret = GST_FLOW_WRONG_STATE;
+ ret = GST_FLOW_FLUSHING;
}
return ret;
}
("The server closed the connection."));
src->conninfo.connected = FALSE;
gst_rtsp_message_unset (&message);
- return GST_FLOW_UNEXPECTED;
+ return GST_FLOW_EOS;
}
}
}
static void
-gst_rtspsrc_loop_send_cmd (GstRTSPSrc * src, gint cmd, gboolean flush)
+gst_rtspsrc_loop_send_cmd (GstRTSPSrc * src, gint cmd)
{
gint old;
- /* FIXME flush param mute; remove at discretion */
-
/* start new request */
gst_rtspsrc_loop_start_cmd (src, cmd);
no_connection:
{
GST_WARNING_OBJECT (src, "we are not connected");
- ret = GST_FLOW_WRONG_STATE;
+ ret = GST_FLOW_FLUSHING;
goto pause;
}
pause:
GST_DEBUG_OBJECT (src, "pausing task, reason %s", reason);
src->running = FALSE;
- if (ret == GST_FLOW_UNEXPECTED) {
+ if (ret == GST_FLOW_EOS) {
/* perform EOS logic */
if (src->segment.flags & GST_SEEK_FLAG_SEGMENT) {
gst_element_post_message (GST_ELEMENT_CAST (src),
gst_message_new_segment_done (GST_OBJECT_CAST (src),
- src->segment.format, src->segment.last_stop));
+ src->segment.format, src->segment.position));
} else {
gst_rtspsrc_push_event (src, gst_event_new_eos (), FALSE);
}
- } else if (ret == GST_FLOW_NOT_LINKED || ret < GST_FLOW_UNEXPECTED) {
+ } else if (ret == GST_FLOW_NOT_LINKED || ret < GST_FLOW_EOS) {
/* for fatal errors we post an error message, post the error before the
* EOS so the app knows about the error first. */
GST_ELEMENT_ERROR (src, STREAM, FAILED,
else
transports = GST_RTSP_LOWER_TRANS_UNKNOWN;
- gst_rtspsrc_uri_set_uri (GST_URI_HANDLER (src), new_location);
+ gst_rtspsrc_uri_set_uri (GST_URI_HANDLER (src), new_location, NULL);
/* set old transports */
if (src->conninfo.url && transports != GST_RTSP_LOWER_TRANS_UNKNOWN)
/* we need to start playback without clipping from the position reported by
* the server */
segment->start = seconds;
- segment->last_stop = seconds;
+ segment->position = seconds;
if (therange->max.type == GST_RTSP_TIME_NOW)
seconds = -1;
/* don't change duration with unknown value, we might have a valid value
* there that we want to keep. */
if (seconds != -1)
- gst_segment_set_duration (segment, GST_FORMAT_TIME, seconds);
+ segment->duration = seconds;
return TRUE;
}
if (src->props)
gst_structure_remove_all_fields (src->props);
else
- src->props = gst_structure_empty_new ("RTSPProperties");
+ src->props = gst_structure_new_empty ("RTSPProperties");
if (src->debug)
gst_sdp_message_dump (sdp);
if (src->range && src->range->min.type == GST_RTSP_TIME_NOW) {
g_strlcpy (val_str, "now", sizeof (val_str));
} else {
- if (segment->last_stop == 0) {
+ if (segment->position == 0) {
g_strlcpy (val_str, "0", sizeof (val_str));
} else {
g_ascii_dtostr (val_str, sizeof (val_str),
- ((gdouble) segment->last_stop) / GST_SECOND);
+ ((gdouble) segment->position) / GST_SECOND);
}
}
return g_strdup_printf ("npt=%s-", val_str);
/* NOTE the above also disables npt based eos detection */
/* and below forces position to 0,
* which is visible feedback we lost the plot */
- segment->start = segment->last_stop = src->last_pos;
+ segment->start = segment->position = src->last_pos;
}
gst_rtsp_message_unset (&request);
/* we only act on the first udp timeout message, others are irrelevant
* and can be ignored. */
if (!ignore_timeout)
- gst_rtspsrc_loop_send_cmd (rtspsrc, CMD_RECONNECT, TRUE);
+ gst_rtspsrc_loop_send_cmd (rtspsrc, CMD_RECONNECT);
/* eat and free */
gst_message_unref (message);
return;
src->loop_cmd = CMD_WAIT;
if (src->task == NULL) {
- src->task = gst_task_create ((GstTaskFunction) gst_rtspsrc_thread, src);
+ src->task = gst_task_new ((GstTaskFunction) gst_rtspsrc_thread, src);
if (src->task == NULL)
goto task_error;
GST_DEBUG_OBJECT (src, "stopping");
/* also cancels pending task */
- gst_rtspsrc_loop_send_cmd (src, CMD_WAIT, TRUE);
+ gst_rtspsrc_loop_send_cmd (src, CMD_WAIT);
GST_OBJECT_LOCK (src);
if ((task = src->task)) {
/* first attempt, don't ignore timeouts */
rtspsrc->ignore_timeout = FALSE;
rtspsrc->open_error = FALSE;
- gst_rtspsrc_loop_send_cmd (rtspsrc, CMD_OPEN, FALSE);
+ gst_rtspsrc_loop_send_cmd (rtspsrc, CMD_OPEN);
break;
case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
/* unblock the tcp tasks and make the loop waiting */
- gst_rtspsrc_loop_send_cmd (rtspsrc, CMD_WAIT, TRUE);
+ gst_rtspsrc_loop_send_cmd (rtspsrc, CMD_WAIT);
break;
case GST_STATE_CHANGE_PAUSED_TO_READY:
break;
switch (transition) {
case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
- gst_rtspsrc_loop_send_cmd (rtspsrc, CMD_PLAY, FALSE);
+ gst_rtspsrc_loop_send_cmd (rtspsrc, CMD_PLAY);
break;
case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
/* send pause request and keep the idle task around */
- gst_rtspsrc_loop_send_cmd (rtspsrc, CMD_PAUSE, FALSE);
+ gst_rtspsrc_loop_send_cmd (rtspsrc, CMD_PAUSE);
ret = GST_STATE_CHANGE_NO_PREROLL;
break;
case GST_STATE_CHANGE_READY_TO_PAUSED:
ret = GST_STATE_CHANGE_NO_PREROLL;
break;
case GST_STATE_CHANGE_PAUSED_TO_READY:
- gst_rtspsrc_loop_send_cmd (rtspsrc, CMD_CLOSE, FALSE);
+ gst_rtspsrc_loop_send_cmd (rtspsrc, CMD_CLOSE);
break;
case GST_STATE_CHANGE_READY_TO_NULL:
gst_rtspsrc_stop (rtspsrc);
/*** GSTURIHANDLER INTERFACE *************************************************/
static GstURIType
-gst_rtspsrc_uri_get_type (void)
+gst_rtspsrc_uri_get_type (GType type)
{
return GST_URI_SRC;
}
-static gchar **
-gst_rtspsrc_uri_get_protocols (void)
+static const gchar *const *
+gst_rtspsrc_uri_get_protocols (GType type)
{
static const gchar *protocols[] =
{ "rtsp", "rtspu", "rtspt", "rtsph", "rtsp-sdp", NULL };
- return (gchar **) protocols;
+ return protocols;
}
-static const gchar *
+static gchar *
gst_rtspsrc_uri_get_uri (GstURIHandler * handler)
{
GstRTSPSrc *src = GST_RTSPSRC (handler);
- /* should not dup */
- return src->conninfo.location;
+ /* FIXME: make thread-safe */
+ return g_strdup (src->conninfo.location);
}
static gboolean
-gst_rtspsrc_uri_set_uri (GstURIHandler * handler, const gchar * uri)
+gst_rtspsrc_uri_set_uri (GstURIHandler * handler, const gchar * uri,
+ GError ** error)
{
GstRTSPSrc *src;
GstRTSPResult res;
sdp_failed:
{
GST_ERROR_OBJECT (src, "Could not create new SDP (%d)", res);
+ g_set_error_literal (error, GST_URI_ERROR, GST_URI_ERROR_BAD_URI,
+ "Could not create SDP");
return FALSE;
}
invalid_sdp:
GST_ERROR_OBJECT (src, "Not a valid SDP (%d) '%s'", res,
GST_STR_NULL (uri));
gst_sdp_message_free (sdp);
+ g_set_error_literal (error, GST_URI_ERROR, GST_URI_ERROR_BAD_URI,
+ "Invalid SDP");
return FALSE;
}
parse_error:
{
GST_ERROR_OBJECT (src, "Not a valid RTSP url '%s' (%d)",
GST_STR_NULL (uri), res);
+ g_set_error_literal (error, GST_URI_ERROR, GST_URI_ERROR_BAD_URI,
+ "Invalid RTSP URI");
return FALSE;
}
}
typedef struct _GstRTSPSrc GstRTSPSrc;
typedef struct _GstRTSPSrcClass GstRTSPSrcClass;
-#define GST_RTSP_STATE_GET_LOCK(rtsp) (GST_RTSPSRC_CAST(rtsp)->state_rec_lock)
-#define GST_RTSP_STATE_LOCK(rtsp) (g_static_rec_mutex_lock (GST_RTSP_STATE_GET_LOCK(rtsp)))
-#define GST_RTSP_STATE_UNLOCK(rtsp) (g_static_rec_mutex_unlock (GST_RTSP_STATE_GET_LOCK(rtsp)))
+#define GST_RTSP_STATE_GET_LOCK(rtsp) (&GST_RTSPSRC_CAST(rtsp)->state_rec_lock)
+#define GST_RTSP_STATE_LOCK(rtsp) (g_rec_mutex_lock (GST_RTSP_STATE_GET_LOCK(rtsp)))
+#define GST_RTSP_STATE_UNLOCK(rtsp) (g_rec_mutex_unlock (GST_RTSP_STATE_GET_LOCK(rtsp)))
-#define GST_RTSP_STREAM_GET_LOCK(rtsp) (GST_RTSPSRC_CAST(rtsp)->stream_rec_lock)
-#define GST_RTSP_STREAM_LOCK(rtsp) (g_static_rec_mutex_lock (GST_RTSP_STREAM_GET_LOCK(rtsp)))
-#define GST_RTSP_STREAM_UNLOCK(rtsp) (g_static_rec_mutex_unlock (GST_RTSP_STREAM_GET_LOCK(rtsp)))
+#define GST_RTSP_STREAM_GET_LOCK(rtsp) (&GST_RTSPSRC_CAST(rtsp)->stream_rec_lock)
+#define GST_RTSP_STREAM_LOCK(rtsp) (g_rec_mutex_lock (GST_RTSP_STREAM_GET_LOCK(rtsp)))
+#define GST_RTSP_STREAM_UNLOCK(rtsp) (g_rec_mutex_unlock (GST_RTSP_STREAM_GET_LOCK(rtsp)))
typedef struct _GstRTSPConnInfo GstRTSPConnInfo;
/* our udp sources */
GstElement *udpsrc[2];
GstPad *blockedpad;
+ gulong blockid;
gboolean is_ipv6;
/* our udp sinks back to the server */
/* task and mutex for interleaved mode */
gboolean interleaved;
GstTask *task;
- GStaticRecMutex *stream_rec_lock;
+ GRecMutex stream_rec_lock;
GstSegment segment;
gboolean running;
gboolean need_range;
gboolean skip;
gint free_channel;
- GstEvent *close_segment;
GstEvent *start_segment;
GstClockTime base_time;
gboolean open_error;
/* mutex for protecting state changes */
- GStaticRecMutex *state_rec_lock;
+ GRecMutex state_rec_lock;
GstSDPMessage *sdp;
gboolean from_sdp;
GTimeVal tcp_timeout;
GTimeVal *ptcp_timeout;
guint latency;
- guint connection_speed;
+ guint64 connection_speed;
GstRTSPNatMethod nat_method;
gboolean do_rtcp;
gchar *proxy_host;
libgstshapewipe_la_SOURCES = gstshapewipe.c
-libgstshapewipe_la_CFLAGS = $(GIO_CFLAGS) $(GST_CFLAGS) $(GST_CONTROLLER_CFLAGS) $(GST_PLUGINS_BASE_CFLAGS)
-libgstshapewipe_la_LIBADD = $(GIO_LIBS) $(GST_LIBS) $(GST_CONTROLLER_LIBS) $(GST_PLUGINS_BASE_LIBS) -lgstvideo-@GST_MAJORMINOR@
+libgstshapewipe_la_CFLAGS = $(GIO_CFLAGS) $(GST_CFLAGS) $(GST_PLUGINS_BASE_CFLAGS)
+libgstshapewipe_la_LIBADD = $(GIO_LIBS) $(GST_LIBS) $(GST_PLUGINS_BASE_LIBS) -lgstvideo-@GST_MAJORMINOR@
libgstshapewipe_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
libgstshapewipe_la_LIBTOOLFLAGS = --tag=disable-static
* <refsect2>
* <title>Example launch line</title>
* |[
- * gst-launch -v videotestsrc ! video/x-raw-yuv,format=(fourcc)AYUV,width=640,height=480 ! shapewipe position=0.5 name=shape ! videomixer name=mixer ! ffmpegcolorspace ! autovideosink filesrc location=mask.png ! typefind ! decodebin2 ! ffmpegcolorspace ! videoscale ! queue ! shape.mask_sink videotestsrc pattern=snow ! video/x-raw-yuv,format=(fourcc)AYUV,width=640,height=480 ! queue ! mixer.
+ * gst-launch -v videotestsrc ! video/x-raw,format=(string)AYUV,width=640,height=480 ! shapewipe position=0.5 name=shape ! videomixer name=mixer ! videoconvert ! autovideosink filesrc location=mask.png ! typefind ! decodebin2 ! videoconvert ! videoscale ! queue ! shape.mask_sink videotestsrc pattern=snow ! video/x-raw,format=(string)AYUV,width=640,height=480 ! queue ! mixer.
* ]| This pipeline adds the transition from mask.png with position 0.5 to an SMPTE test screen and snow.
* </refsect2>
*/
#include <string.h>
#include <gst/gst.h>
-#include <gst/controller/gstcontroller.h>
-#include <gst/glib-compat-private.h>
#include "gstshapewipe.h"
GstStateChange transition);
static GstFlowReturn gst_shape_wipe_video_sink_chain (GstPad * pad,
- GstBuffer * buffer);
+ GstObject * parent, GstBuffer * buffer);
static gboolean gst_shape_wipe_video_sink_event (GstPad * pad,
- GstEvent * event);
-static gboolean gst_shape_wipe_video_sink_setcaps (GstPad * pad,
+ GstObject * parent, GstEvent * event);
+static gboolean gst_shape_wipe_video_sink_setcaps (GstShapeWipe * self,
GstCaps * caps);
-static GstCaps *gst_shape_wipe_video_sink_getcaps (GstPad * pad);
-static GstFlowReturn gst_shape_wipe_video_sink_bufferalloc (GstPad * pad,
- guint64 offset, guint size, GstCaps * caps, GstBuffer ** buf);
+static GstCaps *gst_shape_wipe_video_sink_getcaps (GstPad * pad,
+ GstCaps * filter);
static gboolean gst_shape_wipe_video_sink_query (GstPad * pad,
- GstQuery * query);
+ GstObject * parent, GstQuery * query);
static GstFlowReturn gst_shape_wipe_mask_sink_chain (GstPad * pad,
- GstBuffer * buffer);
-static gboolean gst_shape_wipe_mask_sink_event (GstPad * pad, GstEvent * event);
-static gboolean gst_shape_wipe_mask_sink_setcaps (GstPad * pad, GstCaps * caps);
-static GstCaps *gst_shape_wipe_mask_sink_getcaps (GstPad * pad);
-static gboolean gst_shape_wipe_src_event (GstPad * pad, GstEvent * event);
-static GstCaps *gst_shape_wipe_src_getcaps (GstPad * pad);
-static gboolean gst_shape_wipe_src_query (GstPad * pad, GstQuery * query);
+ GstObject * parent, GstBuffer * buffer);
+static gboolean gst_shape_wipe_mask_sink_event (GstPad * pad,
+ GstObject * parent, GstEvent * event);
+static gboolean gst_shape_wipe_mask_sink_setcaps (GstShapeWipe * self,
+ GstCaps * caps);
+static GstCaps *gst_shape_wipe_mask_sink_getcaps (GstPad * pad,
+ GstCaps * filter);
+static gboolean gst_shape_wipe_mask_sink_query (GstPad * pad,
+ GstObject * parent, GstQuery * query);
+static gboolean gst_shape_wipe_src_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+static GstCaps *gst_shape_wipe_src_getcaps (GstPad * pad, GstCaps * filter);
+static gboolean gst_shape_wipe_src_query (GstPad * pad, GstObject * parent,
+ GstQuery * query);
enum
{
#define DEFAULT_BORDER 0.0
static GstStaticPadTemplate video_sink_pad_template =
- GST_STATIC_PAD_TEMPLATE ("video_sink",
+GST_STATIC_PAD_TEMPLATE ("video_sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("AYUV") " ; " GST_VIDEO_CAPS_ARGB " ; "
- GST_VIDEO_CAPS_BGRA ";" GST_VIDEO_CAPS_ABGR ";" GST_VIDEO_CAPS_RGBA));
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ AYUV, ARGB, BGRA, ABGR, RGBA }")));
static GstStaticPadTemplate mask_sink_pad_template =
GST_STATIC_PAD_TEMPLATE ("mask_sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("video/x-raw-gray, "
- "bpp = 8, "
- "depth = 8, "
+ GST_STATIC_CAPS ("video/x-raw, "
+ "format = (string) GRAY8, "
"width = " GST_VIDEO_SIZE_RANGE ", "
"height = " GST_VIDEO_SIZE_RANGE ", " "framerate = 0/1 ; "
- "video/x-raw-gray, " "bpp = 16, " "depth = 16, "
- "endianness = BYTE_ORDER, " "width = " GST_VIDEO_SIZE_RANGE ", "
+ "video/x-raw, " "format = (string) " GST_VIDEO_NE (GRAY16) ", "
+ "width = " GST_VIDEO_SIZE_RANGE ", "
"height = " GST_VIDEO_SIZE_RANGE ", " "framerate = 0/1"));
static GstStaticPadTemplate src_pad_template =
- GST_STATIC_PAD_TEMPLATE ("src", GST_PAD_SRC, GST_PAD_ALWAYS,
- GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("AYUV") " ; " GST_VIDEO_CAPS_ARGB " ; "
- GST_VIDEO_CAPS_BGRA ";" GST_VIDEO_CAPS_ABGR ";" GST_VIDEO_CAPS_RGBA));
+GST_STATIC_PAD_TEMPLATE ("src", GST_PAD_SRC, GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ AYUV, ARGB, BGRA, ABGR, RGBA }")));
GST_DEBUG_CATEGORY_STATIC (gst_shape_wipe_debug);
#define GST_CAT_DEFAULT gst_shape_wipe_debug
-GST_BOILERPLATE (GstShapeWipe, gst_shape_wipe, GstElement, GST_TYPE_ELEMENT);
-
-static void
-gst_shape_wipe_base_init (gpointer g_class)
-{
- GstElementClass *gstelement_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_set_details_simple (gstelement_class,
- "Shape Wipe transition filter",
- "Filter/Editor/Video",
- "Adds a shape wipe transition to a video stream",
- "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
-
- gst_element_class_add_static_pad_template (gstelement_class,
- &video_sink_pad_template);
- gst_element_class_add_static_pad_template (gstelement_class,
- &mask_sink_pad_template);
- gst_element_class_add_static_pad_template (gstelement_class,
- &src_pad_template);
-}
+#define gst_shape_wipe_parent_class parent_class
+G_DEFINE_TYPE (GstShapeWipe, gst_shape_wipe, GST_TYPE_ELEMENT);
static void
gst_shape_wipe_class_init (GstShapeWipeClass * klass)
gstelement_class->change_state =
GST_DEBUG_FUNCPTR (gst_shape_wipe_change_state);
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "Shape Wipe transition filter",
+ "Filter/Editor/Video",
+ "Adds a shape wipe transition to a video stream",
+ "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&video_sink_pad_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&mask_sink_pad_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&src_pad_template));
}
static void
-gst_shape_wipe_init (GstShapeWipe * self, GstShapeWipeClass * g_class)
+gst_shape_wipe_init (GstShapeWipe * self)
{
self->video_sinkpad =
gst_pad_new_from_static_template (&video_sink_pad_template, "video_sink");
GST_DEBUG_FUNCPTR (gst_shape_wipe_video_sink_chain));
gst_pad_set_event_function (self->video_sinkpad,
GST_DEBUG_FUNCPTR (gst_shape_wipe_video_sink_event));
- gst_pad_set_setcaps_function (self->video_sinkpad,
- GST_DEBUG_FUNCPTR (gst_shape_wipe_video_sink_setcaps));
- gst_pad_set_getcaps_function (self->video_sinkpad,
- GST_DEBUG_FUNCPTR (gst_shape_wipe_video_sink_getcaps));
- gst_pad_set_bufferalloc_function (self->video_sinkpad,
- GST_DEBUG_FUNCPTR (gst_shape_wipe_video_sink_bufferalloc));
gst_pad_set_query_function (self->video_sinkpad,
GST_DEBUG_FUNCPTR (gst_shape_wipe_video_sink_query));
gst_element_add_pad (GST_ELEMENT (self), self->video_sinkpad);
GST_DEBUG_FUNCPTR (gst_shape_wipe_mask_sink_chain));
gst_pad_set_event_function (self->mask_sinkpad,
GST_DEBUG_FUNCPTR (gst_shape_wipe_mask_sink_event));
- gst_pad_set_setcaps_function (self->mask_sinkpad,
- GST_DEBUG_FUNCPTR (gst_shape_wipe_mask_sink_setcaps));
- gst_pad_set_getcaps_function (self->mask_sinkpad,
- GST_DEBUG_FUNCPTR (gst_shape_wipe_mask_sink_getcaps));
+ gst_pad_set_query_function (self->mask_sinkpad,
+ GST_DEBUG_FUNCPTR (gst_shape_wipe_mask_sink_query));
gst_element_add_pad (GST_ELEMENT (self), self->mask_sinkpad);
self->srcpad = gst_pad_new_from_static_template (&src_pad_template, "src");
gst_pad_set_event_function (self->srcpad,
GST_DEBUG_FUNCPTR (gst_shape_wipe_src_event));
- gst_pad_set_getcaps_function (self->srcpad,
- GST_DEBUG_FUNCPTR (gst_shape_wipe_src_getcaps));
gst_pad_set_query_function (self->srcpad,
GST_DEBUG_FUNCPTR (gst_shape_wipe_src_query));
gst_element_add_pad (GST_ELEMENT (self), self->srcpad);
- self->mask_mutex = g_mutex_new ();
- self->mask_cond = g_cond_new ();
+ g_mutex_init (&self->mask_mutex);
+ g_cond_init (&self->mask_cond);
gst_shape_wipe_reset (self);
}
gst_shape_wipe_reset (self);
- if (self->mask_cond)
- g_cond_free (self->mask_cond);
- self->mask_cond = NULL;
-
- if (self->mask_mutex)
- g_mutex_free (self->mask_mutex);
- self->mask_mutex = NULL;
+ g_cond_clear (&self->mask_cond);
+ g_mutex_clear (&self->mask_mutex);
G_OBJECT_CLASS (parent_class)->finalize (object);
}
gst_buffer_unref (self->mask);
self->mask = NULL;
- g_mutex_lock (self->mask_mutex);
- g_cond_signal (self->mask_cond);
- g_mutex_unlock (self->mask_mutex);
+ g_mutex_lock (&self->mask_mutex);
+ g_cond_signal (&self->mask_cond);
+ g_mutex_unlock (&self->mask_mutex);
- self->fmt = GST_VIDEO_FORMAT_UNKNOWN;
- self->width = self->height = 0;
+ gst_video_info_init (&self->info);
self->mask_bpp = 0;
gst_segment_init (&self->segment, GST_FORMAT_TIME);
self->frame_duration = 0;
}
-static GstFlowReturn
-gst_shape_wipe_video_sink_bufferalloc (GstPad * pad, guint64 offset, guint size,
- GstCaps * caps, GstBuffer ** buf)
-{
- GstShapeWipe *self = GST_SHAPE_WIPE (gst_pad_get_parent (pad));
- GstFlowReturn ret = GST_FLOW_OK;
-
- GST_LOG_OBJECT (pad, "Allocating buffer with offset 0x%" G_GINT64_MODIFIER
- "x and size %u with caps: %" GST_PTR_FORMAT, offset, size, caps);
-
- *buf = NULL;
-
- ret = gst_pad_alloc_buffer (self->srcpad, offset, size, caps, buf);
- if (G_UNLIKELY (ret != GST_FLOW_OK))
- GST_ERROR_OBJECT (pad, "Allocating buffer failed: %s",
- gst_flow_get_name (ret));
-
- gst_object_unref (self);
-
- return ret;
-}
-
static gboolean
-gst_shape_wipe_video_sink_setcaps (GstPad * pad, GstCaps * caps)
+gst_shape_wipe_video_sink_setcaps (GstShapeWipe * self, GstCaps * caps)
{
- GstShapeWipe *self = GST_SHAPE_WIPE (gst_pad_get_parent (pad));
gboolean ret = TRUE;
- GstStructure *s;
- GstVideoFormat fmt;
- gint width, height;
- gint fps_n, fps_d;
-
- GST_DEBUG_OBJECT (pad, "Setting caps: %" GST_PTR_FORMAT, caps);
-
- s = gst_caps_get_structure (caps, 0);
+ GstVideoInfo info;
- if (!gst_video_format_parse_caps (caps, &fmt, &width, &height) ||
- !gst_structure_get_fraction (s, "framerate", &fps_n, &fps_d)) {
- GST_ERROR_OBJECT (pad, "Invalid caps");
- ret = FALSE;
- goto done;
- }
+ GST_DEBUG_OBJECT (self, "Setting caps: %" GST_PTR_FORMAT, caps);
- self->fmt = fmt;
- if (self->width != width || self->height != height) {
- g_mutex_lock (self->mask_mutex);
- self->width = width;
- self->height = height;
+ if (!gst_video_info_from_caps (&info, caps))
+ goto invalid_caps;
+ if (self->info.width != info.width || self->info.height != info.height) {
+ g_mutex_lock (&self->mask_mutex);
+ self->info = info;
if (self->mask)
gst_buffer_unref (self->mask);
self->mask = NULL;
- g_mutex_unlock (self->mask_mutex);
+ g_mutex_unlock (&self->mask_mutex);
}
- if (fps_n != 0)
- self->frame_duration = gst_util_uint64_scale (GST_SECOND, fps_d, fps_n);
+
+ if (info.fps_n != 0)
+ self->frame_duration =
+ gst_util_uint64_scale (GST_SECOND, info.fps_d, info.fps_n);
else
self->frame_duration = 0;
ret = gst_pad_set_caps (self->srcpad, caps);
-done:
- gst_object_unref (self);
-
return ret;
+
+ /* ERRORS */
+invalid_caps:
+ {
+ GST_ERROR_OBJECT (self, "Invalid caps");
+ return FALSE;
+ }
}
static GstCaps *
-gst_shape_wipe_video_sink_getcaps (GstPad * pad)
+gst_shape_wipe_video_sink_getcaps (GstPad * pad, GstCaps * filter)
{
GstShapeWipe *self = GST_SHAPE_WIPE (gst_pad_get_parent (pad));
GstCaps *ret, *tmp;
- if (GST_PAD_CAPS (pad))
- return gst_caps_copy (GST_PAD_CAPS (pad));
+ if (gst_pad_has_current_caps (pad))
+ return gst_pad_get_current_caps (pad);
- tmp = gst_pad_peer_get_caps (self->srcpad);
+ tmp = gst_pad_peer_query_caps (self->srcpad, NULL);
if (tmp) {
ret = gst_caps_intersect (tmp, gst_pad_get_pad_template_caps (pad));
gst_caps_unref (tmp);
if (gst_caps_is_empty (ret))
goto done;
- tmp = gst_pad_peer_get_caps (pad);
+ tmp = gst_pad_peer_query_caps (pad, NULL);
GST_LOG_OBJECT (pad, "peerpad accepted caps: %" GST_PTR_FORMAT, tmp);
if (tmp) {
if (gst_caps_is_empty (ret))
goto done;
- if (self->height && self->width) {
+ if (self->info.height && self->info.width) {
guint i, n;
n = gst_caps_get_size (ret);
for (i = 0; i < n; i++) {
GstStructure *s = gst_caps_get_structure (ret, i);
- gst_structure_set (s, "width", G_TYPE_INT, self->width, "height",
- G_TYPE_INT, self->height, NULL);
+ gst_structure_set (s, "width", G_TYPE_INT, self->info.width, "height",
+ G_TYPE_INT, self->info.height, NULL);
}
}
- tmp = gst_pad_peer_get_caps (self->mask_sinkpad);
+ tmp = gst_pad_peer_query_caps (self->mask_sinkpad, NULL);
GST_LOG_OBJECT (pad, "mask accepted caps: %" GST_PTR_FORMAT, tmp);
if (tmp) {
n = gst_caps_get_size (tmp);
- tmp2 = gst_caps_new_empty ();
for (i = 0; i < n; i++) {
GstStructure *s = gst_caps_get_structure (tmp, i);
- GstStructure *c;
-
- gst_structure_remove_fields (s, "format", "bpp", "depth", "endianness",
- "framerate", "red_mask", "green_mask", "blue_mask", "alpha_mask",
- NULL);
- gst_structure_set_name (s, "video/x-raw-yuv");
- c = gst_structure_copy (s);
- gst_structure_set_name (c, "video/x-raw-rgb");
- gst_caps_append_structure (tmp2, c);
+
+ gst_structure_remove_fields (s, "format", "framerate", NULL);
+ gst_structure_set_name (s, "video/x-raw");
}
- gst_caps_append (tmp, tmp2);
intersection = gst_caps_intersect (tmp, ret);
gst_caps_unref (tmp);
}
static gboolean
-gst_shape_wipe_mask_sink_setcaps (GstPad * pad, GstCaps * caps)
+gst_shape_wipe_mask_sink_setcaps (GstShapeWipe * self, GstCaps * caps)
{
- GstShapeWipe *self = GST_SHAPE_WIPE (gst_pad_get_parent (pad));
gboolean ret = TRUE;
GstStructure *s;
gint width, height, bpp;
- GST_DEBUG_OBJECT (pad, "Setting caps: %" GST_PTR_FORMAT, caps);
+ GST_DEBUG_OBJECT (self, "Setting caps: %" GST_PTR_FORMAT, caps);
s = gst_caps_get_structure (caps, 0);
goto done;
}
- if ((self->width != width || self->height != height) &&
- self->width > 0 && self->height > 0) {
- GST_ERROR_OBJECT (pad, "Mask caps must have the same width/height "
+ if ((self->info.width != width || self->info.height != height) &&
+ self->info.width > 0 && self->info.height > 0) {
+ GST_ERROR_OBJECT (self, "Mask caps must have the same width/height "
"as the video caps");
ret = FALSE;
goto done;
} else {
- self->width = width;
- self->height = height;
+ self->info.width = width;
+ self->info.height = height;
}
self->mask_bpp = bpp;
done:
- gst_object_unref (self);
-
return ret;
}
static GstCaps *
-gst_shape_wipe_mask_sink_getcaps (GstPad * pad)
+gst_shape_wipe_mask_sink_getcaps (GstPad * pad, GstCaps * filter)
{
GstShapeWipe *self = GST_SHAPE_WIPE (gst_pad_get_parent (pad));
GstCaps *ret, *tmp;
guint i, n;
- if (GST_PAD_CAPS (pad))
- return gst_caps_copy (GST_PAD_CAPS (pad));
+ if (gst_pad_has_current_caps (pad))
+ return gst_pad_get_current_caps (pad);
- tmp = gst_pad_peer_get_caps (self->video_sinkpad);
+ tmp = gst_pad_peer_query_caps (self->video_sinkpad, NULL);
if (tmp) {
ret =
gst_caps_intersect (tmp,
if (gst_caps_is_empty (ret))
goto done;
- tmp = gst_pad_peer_get_caps (self->srcpad);
+ tmp = gst_pad_peer_query_caps (self->srcpad, NULL);
GST_LOG_OBJECT (pad, "srcpad accepted caps: %" GST_PTR_FORMAT, ret);
if (tmp) {
GstStructure *s = gst_caps_get_structure (ret, i);
GstStructure *t;
- gst_structure_set_name (s, "video/x-raw-gray");
- gst_structure_remove_fields (s, "format", "framerate", "bpp", "depth",
- "endianness", "framerate", "red_mask", "green_mask", "blue_mask",
- "alpha_mask", NULL);
+ gst_structure_set_name (s, "video/x-raw");
+ gst_structure_remove_fields (s, "format", "framerate", NULL);
- if (self->width && self->height)
- gst_structure_set (s, "width", G_TYPE_INT, self->width, "height",
- G_TYPE_INT, self->height, NULL);
+ if (self->info.width && self->info.height)
+ gst_structure_set (s, "width", G_TYPE_INT, self->info.width, "height",
+ G_TYPE_INT, self->info.height, NULL);
gst_structure_set (s, "framerate", GST_TYPE_FRACTION, 0, 1, NULL);
t = gst_structure_copy (s);
- gst_structure_set (s, "bpp", G_TYPE_INT, 16, "depth", G_TYPE_INT, 16,
- "endianness", G_TYPE_INT, G_BYTE_ORDER, NULL);
- gst_structure_set (t, "bpp", G_TYPE_INT, 8, "depth", G_TYPE_INT, 8, NULL);
+ gst_structure_set (s, "format", G_TYPE_STRING, GST_VIDEO_NE (GRAY16), NULL);
+ gst_structure_set (t, "format", G_TYPE_STRING, "GRAY8", NULL);
gst_caps_append_structure (tmp, t);
}
gst_caps_append (ret, tmp);
- tmp = gst_pad_peer_get_caps (pad);
+ tmp = gst_pad_peer_query_caps (pad, NULL);
GST_LOG_OBJECT (pad, "peer accepted caps: %" GST_PTR_FORMAT, tmp);
if (tmp) {
}
static GstCaps *
-gst_shape_wipe_src_getcaps (GstPad * pad)
+gst_shape_wipe_src_getcaps (GstPad * pad, GstCaps * filter)
{
GstShapeWipe *self = GST_SHAPE_WIPE (gst_pad_get_parent (pad));
GstCaps *ret, *tmp;
- if (GST_PAD_CAPS (pad))
- return gst_caps_copy (GST_PAD_CAPS (pad));
- else if (GST_PAD_CAPS (self->video_sinkpad))
- return gst_caps_copy (GST_PAD_CAPS (self->video_sinkpad));
+ if (gst_pad_has_current_caps (pad))
+ return gst_pad_get_current_caps (pad);
+ else if (gst_pad_has_current_caps (self->video_sinkpad))
+ return gst_pad_get_current_caps (self->video_sinkpad);
- tmp = gst_pad_peer_get_caps (self->video_sinkpad);
+ tmp = gst_pad_peer_query_caps (self->video_sinkpad, NULL);
if (tmp) {
ret =
gst_caps_intersect (tmp,
if (gst_caps_is_empty (ret))
goto done;
- tmp = gst_pad_peer_get_caps (pad);
+ tmp = gst_pad_peer_query_caps (pad, NULL);
GST_LOG_OBJECT (pad, "peer accepted caps: %" GST_PTR_FORMAT, ret);
if (tmp) {
GstCaps *intersection;
if (gst_caps_is_empty (ret))
goto done;
- if (self->height && self->width) {
+ if (self->info.height && self->info.width) {
guint i, n;
n = gst_caps_get_size (ret);
for (i = 0; i < n; i++) {
GstStructure *s = gst_caps_get_structure (ret, i);
- gst_structure_set (s, "width", G_TYPE_INT, self->width, "height",
- G_TYPE_INT, self->height, NULL);
+ gst_structure_set (s, "width", G_TYPE_INT, self->info.width, "height",
+ G_TYPE_INT, self->info.height, NULL);
}
}
- tmp = gst_pad_peer_get_caps (self->mask_sinkpad);
+ tmp = gst_pad_peer_query_caps (self->mask_sinkpad, NULL);
GST_LOG_OBJECT (pad, "mask sink accepted caps: %" GST_PTR_FORMAT, ret);
if (tmp) {
GstCaps *intersection, *tmp2;
tmp = intersection;
n = gst_caps_get_size (tmp);
- tmp2 = gst_caps_new_empty ();
for (i = 0; i < n; i++) {
GstStructure *s = gst_caps_get_structure (tmp, i);
- GstStructure *c;
- gst_structure_remove_fields (s, "format", "bpp", "depth", "endianness",
- "framerate", "red_mask", "green_mask", "blue_mask", "alpha_mask",
- NULL);
- gst_structure_set_name (s, "video/x-raw-yuv");
- c = gst_structure_copy (s);
-
- gst_caps_append_structure (tmp2, c);
+ gst_structure_remove_fields (s, "format", "framerate", NULL);
+ gst_structure_set_name (s, "video/x-raw");
}
- gst_caps_append (tmp, tmp2);
intersection = gst_caps_intersect (tmp, ret);
gst_caps_unref (tmp);
}
static gboolean
-gst_shape_wipe_video_sink_query (GstPad * pad, GstQuery * query)
+gst_shape_wipe_video_sink_query (GstPad * pad, GstObject * parent,
+ GstQuery * query)
{
- GstShapeWipe *self = GST_SHAPE_WIPE (gst_pad_get_parent (pad));
+ GstShapeWipe *self = GST_SHAPE_WIPE (parent);
gboolean ret;
- GstPad *peer = gst_pad_get_peer (self->srcpad);
GST_LOG_OBJECT (pad, "Handling query of type '%s'",
gst_query_type_get_name (GST_QUERY_TYPE (query)));
- if (!peer) {
- GST_INFO_OBJECT (pad, "No peer yet, dropping query");
- ret = FALSE;
- } else {
- ret = gst_pad_query (peer, query);
- gst_object_unref (peer);
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_CAPS:
+ {
+ GstCaps *filter, *caps;
+
+ gst_query_parse_caps (query, &filter);
+ caps = gst_shape_wipe_video_sink_getcaps (pad, filter);
+ gst_query_set_caps_result (query, caps);
+ gst_caps_unref (caps);
+ ret = TRUE;
+ break;
+ }
+ default:
+ ret = gst_pad_peer_query (self->srcpad, query);
+ break;
}
- gst_object_unref (self);
return ret;
}
static gboolean
-gst_shape_wipe_src_query (GstPad * pad, GstQuery * query)
+gst_shape_wipe_src_query (GstPad * pad, GstObject * parent, GstQuery * query)
{
- GstShapeWipe *self = GST_SHAPE_WIPE (gst_pad_get_parent (pad));
+ GstShapeWipe *self = GST_SHAPE_WIPE (parent);
gboolean ret;
- GstPad *peer = gst_pad_get_peer (self->video_sinkpad);
GST_LOG_OBJECT (pad, "Handling query of type '%s'",
gst_query_type_get_name (GST_QUERY_TYPE (query)));
- if (!peer) {
- GST_INFO_OBJECT (pad, "No peer yet, dropping query");
- ret = FALSE;
- } else {
- ret = gst_pad_query (peer, query);
- gst_object_unref (peer);
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_CAPS:
+ {
+ GstCaps *filter, *caps;
+
+ gst_query_parse_caps (query, &filter);
+ caps = gst_shape_wipe_src_getcaps (pad, filter);
+ gst_query_set_caps_result (query, caps);
+ gst_caps_unref (caps);
+ ret = TRUE;
+ break;
+ }
+ default:
+ ret = gst_pad_peer_query (self->video_sinkpad, query);
+ break;
}
- gst_object_unref (self);
return ret;
}
#define CREATE_ARGB_FUNCTIONS(depth, name, shift, a, r, g, b) \
static void \
-gst_shape_wipe_blend_##name##_##depth (GstShapeWipe * self, GstBuffer * inbuf, \
- GstBuffer * maskbuf, GstBuffer * outbuf) \
+gst_shape_wipe_blend_##name##_##depth (GstShapeWipe * self, GstVideoFrame * inframe, \
+ GstVideoFrame * maskframe, GstVideoFrame * outframe) \
{ \
- const guint##depth *mask = (const guint##depth *) GST_BUFFER_DATA (maskbuf); \
- const guint8 *input = (const guint8 *) GST_BUFFER_DATA (inbuf); \
- guint8 *output = (guint8 *) GST_BUFFER_DATA (outbuf); \
+ const guint##depth *mask = (const guint##depth *) GST_VIDEO_FRAME_PLANE_DATA (maskframe, 0); \
+ const guint8 *input = (const guint8 *) GST_VIDEO_FRAME_PLANE_DATA (inframe, 0); \
+ guint8 *output = (guint8 *) GST_VIDEO_FRAME_PLANE_DATA (outframe, 0); \
guint i, j; \
- guint mask_increment = ((depth == 16) ? GST_ROUND_UP_2 (self->width) : \
- GST_ROUND_UP_4 (self->width)) - self->width; \
+ guint mask_increment = ((depth == 16) ? GST_ROUND_UP_2 (self->info.width) : \
+ GST_ROUND_UP_4 (self->info.width)) - self->info.width; \
gfloat position = self->mask_position; \
gfloat low = position - (self->mask_border / 2.0f); \
gfloat high = position + (self->mask_border / 2.0f); \
guint32 low_i, high_i, round_i; \
- gint width = self->width, height = self->height; \
+ gint width = self->info.width, height = self->info.height; \
\
if (low < 0.0f) { \
high = 0.0f; \
CREATE_ARGB_FUNCTIONS (8, bgra, 8, 3, 2, 1, 0);
static GstFlowReturn
-gst_shape_wipe_video_sink_chain (GstPad * pad, GstBuffer * buffer)
+gst_shape_wipe_video_sink_chain (GstPad * pad, GstObject * parent,
+ GstBuffer * buffer)
{
- GstShapeWipe *self = GST_SHAPE_WIPE (GST_PAD_PARENT (pad));
+ GstShapeWipe *self = GST_SHAPE_WIPE (parent);
GstFlowReturn ret = GST_FLOW_OK;
GstBuffer *mask = NULL, *outbuf = NULL;
GstClockTime timestamp;
gboolean new_outbuf = FALSE;
+ GstVideoFrame inframe, outframe, maskframe;
- if (G_UNLIKELY (self->fmt == GST_VIDEO_FORMAT_UNKNOWN))
+ if (G_UNLIKELY (GST_VIDEO_INFO_FORMAT (&self->info) ==
+ GST_VIDEO_FORMAT_UNKNOWN))
goto not_negotiated;
timestamp = GST_BUFFER_TIMESTAMP (buffer);
gst_segment_to_stream_time (&self->segment, GST_FORMAT_TIME, timestamp);
if (GST_CLOCK_TIME_IS_VALID (timestamp))
- gst_object_sync_values (G_OBJECT (self), timestamp);
+ gst_object_sync_values (GST_OBJECT (self), timestamp);
GST_LOG_OBJECT (self,
"Blending buffer with timestamp %" GST_TIME_FORMAT " at position %f",
GST_TIME_ARGS (timestamp), self->mask_position);
- g_mutex_lock (self->mask_mutex);
+ g_mutex_lock (&self->mask_mutex);
if (self->shutdown)
goto shutdown;
if (!self->mask)
- g_cond_wait (self->mask_cond, self->mask_mutex);
+ g_cond_wait (&self->mask_cond, &self->mask_mutex);
if (self->mask == NULL || self->shutdown) {
goto shutdown;
} else {
mask = gst_buffer_ref (self->mask);
}
- g_mutex_unlock (self->mask_mutex);
+ g_mutex_unlock (&self->mask_mutex);
if (!gst_shape_wipe_do_qos (self, GST_BUFFER_TIMESTAMP (buffer)))
goto qos;
/* Try to blend inplace, if it's not possible
* get a new buffer from downstream. */
if (!gst_buffer_is_writable (buffer)) {
- ret =
- gst_pad_alloc_buffer_and_set_caps (self->srcpad, GST_BUFFER_OFFSET_NONE,
- GST_BUFFER_SIZE (buffer), GST_PAD_CAPS (self->srcpad), &outbuf);
- if (G_UNLIKELY (ret != GST_FLOW_OK))
- goto alloc_failed;
-
- gst_buffer_copy_metadata (outbuf, buffer, GST_BUFFER_COPY_ALL);
+ outbuf = gst_buffer_new_allocate (NULL, gst_buffer_get_size (buffer), 0);
+ gst_buffer_copy_into (outbuf, buffer, GST_BUFFER_COPY_METADATA, 0, -1);
new_outbuf = TRUE;
} else {
outbuf = buffer;
}
- switch (self->fmt) {
+ gst_video_frame_map (&inframe, &self->info, buffer, GST_MAP_READ);
+ gst_video_frame_map (&maskframe, &self->info, mask, GST_MAP_READ);
+ gst_video_frame_map (&outframe, &self->info, outbuf, GST_MAP_WRITE);
+
+ switch (GST_VIDEO_INFO_FORMAT (&self->info)) {
case GST_VIDEO_FORMAT_AYUV:
case GST_VIDEO_FORMAT_ARGB:
case GST_VIDEO_FORMAT_ABGR:
if (self->mask_bpp == 16)
- gst_shape_wipe_blend_argb_16 (self, buffer, mask, outbuf);
+ gst_shape_wipe_blend_argb_16 (self, &inframe, &maskframe, &outframe);
else
- gst_shape_wipe_blend_argb_8 (self, buffer, mask, outbuf);
+ gst_shape_wipe_blend_argb_8 (self, &inframe, &maskframe, &outframe);
break;
case GST_VIDEO_FORMAT_BGRA:
case GST_VIDEO_FORMAT_RGBA:
if (self->mask_bpp == 16)
- gst_shape_wipe_blend_bgra_16 (self, buffer, mask, outbuf);
+ gst_shape_wipe_blend_bgra_16 (self, &inframe, &maskframe, &outframe);
else
- gst_shape_wipe_blend_bgra_8 (self, buffer, mask, outbuf);
+ gst_shape_wipe_blend_bgra_8 (self, &inframe, &maskframe, &outframe);
break;
default:
g_assert_not_reached ();
break;
}
+ gst_video_frame_unmap (&outframe);
+ gst_video_frame_unmap (&maskframe);
+ gst_video_frame_unmap (&inframe);
+
gst_buffer_unref (mask);
if (new_outbuf)
gst_buffer_unref (buffer);
/* Errors */
not_negotiated:
- GST_ERROR_OBJECT (self, "No valid caps yet");
- gst_buffer_unref (buffer);
- return GST_FLOW_NOT_NEGOTIATED;
+ {
+ GST_ERROR_OBJECT (self, "No valid caps yet");
+ gst_buffer_unref (buffer);
+ return GST_FLOW_NOT_NEGOTIATED;
+ }
shutdown:
- GST_DEBUG_OBJECT (self, "Shutting down");
- gst_buffer_unref (buffer);
- return GST_FLOW_WRONG_STATE;
+ {
+ GST_DEBUG_OBJECT (self, "Shutting down");
+ gst_buffer_unref (buffer);
+ return GST_FLOW_FLUSHING;
+ }
qos:
- GST_DEBUG_OBJECT (self, "Dropping buffer because of QoS");
- gst_buffer_unref (buffer);
- gst_buffer_unref (mask);
- return GST_FLOW_OK;
-alloc_failed:
- GST_ERROR_OBJECT (self, "Buffer allocation from downstream failed: %s",
- gst_flow_get_name (ret));
- gst_buffer_unref (buffer);
- gst_buffer_unref (mask);
- return ret;
+ {
+ GST_DEBUG_OBJECT (self, "Dropping buffer because of QoS");
+ gst_buffer_unref (buffer);
+ gst_buffer_unref (mask);
+ return GST_FLOW_OK;
+ }
push_failed:
- GST_ERROR_OBJECT (self, "Pushing buffer downstream failed: %s",
- gst_flow_get_name (ret));
- return ret;
+ {
+ GST_ERROR_OBJECT (self, "Pushing buffer downstream failed: %s",
+ gst_flow_get_name (ret));
+ return ret;
+ }
}
static GstFlowReturn
-gst_shape_wipe_mask_sink_chain (GstPad * pad, GstBuffer * buffer)
+gst_shape_wipe_mask_sink_chain (GstPad * pad, GstObject * parent,
+ GstBuffer * buffer)
{
- GstShapeWipe *self = GST_SHAPE_WIPE (GST_PAD_PARENT (pad));
+ GstShapeWipe *self = GST_SHAPE_WIPE (parent);
GstFlowReturn ret = GST_FLOW_OK;
- g_mutex_lock (self->mask_mutex);
+ g_mutex_lock (&self->mask_mutex);
GST_DEBUG_OBJECT (self, "Setting new mask buffer: %" GST_PTR_FORMAT, buffer);
gst_buffer_replace (&self->mask, buffer);
- g_cond_signal (self->mask_cond);
- g_mutex_unlock (self->mask_mutex);
+ g_cond_signal (&self->mask_cond);
+ g_mutex_unlock (&self->mask_mutex);
gst_buffer_unref (buffer);
break;
case GST_STATE_CHANGE_PAUSED_TO_READY:
/* Unblock video sink chain function */
- g_mutex_lock (self->mask_mutex);
+ g_mutex_lock (&self->mask_mutex);
self->shutdown = TRUE;
- g_cond_signal (self->mask_cond);
- g_mutex_unlock (self->mask_mutex);
+ g_cond_signal (&self->mask_cond);
+ g_mutex_unlock (&self->mask_mutex);
break;
default:
break;
}
static gboolean
-gst_shape_wipe_video_sink_event (GstPad * pad, GstEvent * event)
+gst_shape_wipe_video_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event)
{
- GstShapeWipe *self = GST_SHAPE_WIPE (gst_pad_get_parent (pad));
+ GstShapeWipe *self = GST_SHAPE_WIPE (parent);
gboolean ret;
GST_LOG_OBJECT (pad, "Got %s event", GST_EVENT_TYPE_NAME (event));
switch (GST_EVENT_TYPE (event)) {
- case GST_EVENT_NEWSEGMENT:{
- GstFormat fmt;
- gboolean is_update;
- gint64 start, end, base;
- gdouble rate;
-
- gst_event_parse_new_segment (event, &is_update, &rate, &fmt, &start,
- &end, &base);
- if (fmt == GST_FORMAT_TIME) {
+ case GST_EVENT_CAPS:
+ {
+ GstCaps *caps;
+
+ gst_event_parse_caps (event, &caps);
+ ret = gst_shape_wipe_video_sink_setcaps (self, caps);
+ gst_event_unref (event);
+ break;
+ }
+ case GST_EVENT_SEGMENT:
+ {
+ GstSegment seg;
+
+ gst_event_copy_segment (event, &seg);
+ if (seg.format == GST_FORMAT_TIME) {
GST_DEBUG_OBJECT (pad,
- "Got NEWSEGMENT event in GST_FORMAT_TIME, passing on (%"
- GST_TIME_FORMAT " - %" GST_TIME_FORMAT ")", GST_TIME_ARGS (start),
- GST_TIME_ARGS (end));
- gst_segment_set_newsegment (&self->segment, is_update, rate, fmt, start,
- end, base);
+ "Got SEGMENT event in GST_FORMAT_TIME %" GST_PTR_FORMAT, &seg);
+ self->segment = seg;
} else {
gst_segment_init (&self->segment, GST_FORMAT_TIME);
}
break;
}
- gst_object_unref (self);
return ret;
}
static gboolean
-gst_shape_wipe_mask_sink_event (GstPad * pad, GstEvent * event)
+gst_shape_wipe_mask_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event)
{
- GstShapeWipe *self = GST_SHAPE_WIPE (gst_pad_get_parent (pad));
+ GstShapeWipe *self = GST_SHAPE_WIPE (parent);
GST_LOG_OBJECT (pad, "Got %s event", GST_EVENT_TYPE_NAME (event));
switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_CAPS:
+ {
+ GstCaps *caps;
+
+ gst_event_parse_caps (event, &caps);
+ gst_shape_wipe_mask_sink_setcaps (self, caps);
+ break;
+ }
case GST_EVENT_FLUSH_STOP:
- g_mutex_lock (self->mask_mutex);
+ g_mutex_lock (&self->mask_mutex);
gst_buffer_replace (&self->mask, NULL);
- g_mutex_unlock (self->mask_mutex);
+ g_mutex_unlock (&self->mask_mutex);
break;
default:
break;
/* Dropping all events here */
gst_event_unref (event);
- gst_object_unref (self);
return TRUE;
}
static gboolean
-gst_shape_wipe_src_event (GstPad * pad, GstEvent * event)
+gst_shape_wipe_mask_sink_query (GstPad * pad, GstObject * parent,
+ GstQuery * query)
{
- GstShapeWipe *self = GST_SHAPE_WIPE (gst_pad_get_parent (pad));
+ gboolean ret;
+
+ GST_LOG_OBJECT (pad, "Handling query of type '%s'",
+ gst_query_type_get_name (GST_QUERY_TYPE (query)));
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_CAPS:
+ {
+ GstCaps *filter, *caps;
+
+ gst_query_parse_caps (query, &filter);
+ caps = gst_shape_wipe_mask_sink_getcaps (pad, filter);
+ gst_query_set_caps_result (query, caps);
+ gst_caps_unref (caps);
+ ret = TRUE;
+ break;
+ }
+ default:
+ ret = gst_pad_query_default (pad, parent, query);
+ break;
+ }
+
+ return ret;
+}
+
+
+static gboolean
+gst_shape_wipe_src_event (GstPad * pad, GstObject * parent, GstEvent * event)
+{
+ GstShapeWipe *self = GST_SHAPE_WIPE (parent);
gboolean ret;
GST_LOG_OBJECT (pad, "Got %s event", GST_EVENT_TYPE_NAME (event));
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_QOS:{
+ GstQOSType type;
GstClockTimeDiff diff;
GstClockTime timestamp;
gdouble proportion;
- gst_event_parse_qos (event, &proportion, &diff, ×tamp);
+ gst_event_parse_qos (event, &type, &proportion, &diff, ×tamp);
gst_shape_wipe_update_qos (self, proportion, diff, timestamp);
}
break;
}
- gst_object_unref (self);
return ret;
}
GST_DEBUG_CATEGORY_INIT (gst_shape_wipe_debug, "shapewipe", 0,
"shapewipe element");
- gst_controller_init (NULL, NULL);
-
if (!gst_element_register (plugin, "shapewipe", GST_RANK_NONE,
GST_TYPE_SHAPE_WIPE))
return FALSE;
GstBuffer *mask;
gfloat mask_position;
gfloat mask_border;
- GMutex *mask_mutex;
- GCond *mask_cond;
+ GMutex mask_mutex;
+ GCond mask_cond;
gint mask_bpp;
- GstVideoFormat fmt;
- gint width, height;
+ GstVideoInfo info;
gboolean shutdown;
noinst_HEADERS = gstsmpte.h gstmask.h paint.h gstsmptealpha.h
-libgstsmpte_la_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_CFLAGS) $(GST_CONTROLLER_CFLAGS)
+libgstsmpte_la_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_CFLAGS)
libgstsmpte_la_LIBADD = $(GST_PLUGINS_BASE_LIBS) $(GST_BASE_LIBS) $(LIBM) \
- -lgstvideo-$(GST_MAJORMINOR) \
- $(GST_CONTROLLER_LIBS)
+ -lgstvideo-$(GST_MAJORMINOR)
libgstsmpte_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
libgstsmpte_la_LIBTOOLFLAGS = --tag=disable-static
{
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
- gst_element_class_add_static_pad_template (element_class,
- &gst_smpte_sink1_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_smpte_sink2_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_smpte_src_template);
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_smpte_sink1_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_smpte_sink2_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_smpte_src_template));
gst_element_class_set_details_simple (element_class, "SMPTE transitions",
"Filter/Editor/Video",
"Apply the standard SMPTE transitions on video images",
#endif
#include <string.h>
-#include <gst/controller/gstcontroller.h>
-
#include "gstsmptealpha.h"
#include "paint.h"
{
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
- gst_element_class_add_static_pad_template (element_class,
- &gst_smpte_alpha_sink_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_smpte_alpha_src_template);
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_smpte_alpha_sink_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_smpte_alpha_src_template));
gst_element_class_set_details_simple (element_class, "SMPTE transitions",
"Filter/Editor/Video",
"Apply the standard SMPTE transitions as alpha on video images",
GST_TIME_ARGS (timestamp));
if (GST_CLOCK_TIME_IS_VALID (stream_time))
- gst_object_sync_values (G_OBJECT (smpte), stream_time);
+ gst_object_sync_values (GST_OBJECT (smpte), stream_time);
}
static GstFlowReturn
GST_DEBUG_CATEGORY_INIT (gst_smpte_alpha_debug, "smptealpha", 0,
"SMPTE alpha effect");
- /* initialize gst controller library */
- gst_controller_init (NULL, NULL);
-
return gst_element_register (plugin, "smptealpha", GST_RANK_NONE,
GST_TYPE_SMPTE_ALPHA);
}
#define GST_CAT_DEFAULT gst_spectrum_debug
/* elementfactory information */
+#if G_BYTE_ORDER == G_LITTLE_ENDIAN
+# define FORMATS "{ S16LE, S24LE, S32LE, F32LE, F64LE }"
+#else
+# define FORMATS "{ S16BE, S24BE, S32BE, F32BE, F64BE }"
+#endif
#define ALLOWED_CAPS \
- "audio/x-raw-int, " \
- " width = (int) 16, " \
- " depth = (int) [ 1, 16 ], " \
- " signed = (boolean) true, " \
- " endianness = (int) BYTE_ORDER, " \
- " rate = (int) [ 1, MAX ], " \
- " channels = (int) [ 1, MAX ]; " \
- "audio/x-raw-int, " \
- " width = (int) 24, " \
- " depth = (int) [ 1, 24 ], " \
- " signed = (boolean) true, " \
- " endianness = (int) BYTE_ORDER, " \
- " rate = (int) [ 1, MAX ], " \
- " channels = (int) [ 1, MAX ]; " \
- "audio/x-raw-int, " \
- " width = (int) 32, " \
- " depth = (int) [ 1, 32 ], " \
- " signed = (boolean) true, " \
- " endianness = (int) BYTE_ORDER, " \
- " rate = (int) [ 1, MAX ], " \
- " channels = (int) [ 1, MAX ]; " \
- "audio/x-raw-float, " \
- " width = (int) { 32, 64 }, " \
- " endianness = (int) BYTE_ORDER, " \
- " rate = (int) [ 1, MAX ], " \
- " channels = (int) [ 1, MAX ]"
+ GST_AUDIO_CAPS_MAKE (FORMATS) ", " \
+ "layout = (string) interleaved"
/* Spectrum properties */
-#define DEFAULT_MESSAGE TRUE
-#define DEFAULT_POST_MESSAGES TRUE
+#define DEFAULT_POST_MESSAGES TRUE
#define DEFAULT_MESSAGE_MAGNITUDE TRUE
#define DEFAULT_MESSAGE_PHASE FALSE
#define DEFAULT_INTERVAL (GST_SECOND / 10)
enum
{
PROP_0,
- PROP_MESSAGE,
PROP_POST_MESSAGES,
PROP_MESSAGE_MAGNITUDE,
PROP_MESSAGE_PHASE,
PROP_MULTI_CHANNEL
};
-GST_BOILERPLATE (GstSpectrum, gst_spectrum, GstAudioFilter,
- GST_TYPE_AUDIO_FILTER);
+#define gst_spectrum_parent_class parent_class
+G_DEFINE_TYPE (GstSpectrum, gst_spectrum, GST_TYPE_AUDIO_FILTER);
static void gst_spectrum_finalize (GObject * object);
static void gst_spectrum_set_property (GObject * object, guint prop_id,
static GstFlowReturn gst_spectrum_transform_ip (GstBaseTransform * trans,
GstBuffer * in);
static gboolean gst_spectrum_setup (GstAudioFilter * base,
- GstRingBufferSpec * format);
-
-static void
-gst_spectrum_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
- GstCaps *caps;
-
- gst_element_class_set_details_simple (element_class, "Spectrum analyzer",
- "Filter/Analyzer/Audio",
- "Run an FFT on the audio signal, output spectrum data",
- "Erik Walthinsen <omega@cse.ogi.edu>, "
- "Stefan Kost <ensonic@users.sf.net>, "
- "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
-
- caps = gst_caps_from_string (ALLOWED_CAPS);
- gst_audio_filter_class_add_pad_templates (GST_AUDIO_FILTER_CLASS (g_class),
- caps);
- gst_caps_unref (caps);
-}
+ const GstAudioInfo * info);
static void
gst_spectrum_class_init (GstSpectrumClass * klass)
{
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
+ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
GstBaseTransformClass *trans_class = GST_BASE_TRANSFORM_CLASS (klass);
GstAudioFilterClass *filter_class = GST_AUDIO_FILTER_CLASS (klass);
+ GstCaps *caps;
gobject_class->set_property = gst_spectrum_set_property;
gobject_class->get_property = gst_spectrum_get_property;
filter_class->setup = GST_DEBUG_FUNCPTR (gst_spectrum_setup);
- /* FIXME 0.11, remove in favour of post-messages */
- g_object_class_install_property (gobject_class, PROP_MESSAGE,
- g_param_spec_boolean ("message", "Message",
- "Whether to post a 'spectrum' element message on the bus for each "
- "passed interval (deprecated, use post-messages)", DEFAULT_MESSAGE,
- G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
/**
* GstSpectrum:post-messages
*
GST_DEBUG_CATEGORY_INIT (gst_spectrum_debug, "spectrum", 0,
"audio spectrum analyser element");
+
+ gst_element_class_set_details_simple (element_class, "Spectrum analyzer",
+ "Filter/Analyzer/Audio",
+ "Run an FFT on the audio signal, output spectrum data",
+ "Erik Walthinsen <omega@cse.ogi.edu>, "
+ "Stefan Kost <ensonic@users.sf.net>, "
+ "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
+
+ caps = gst_caps_from_string (ALLOWED_CAPS);
+ gst_audio_filter_class_add_pad_templates (filter_class, caps);
+ gst_caps_unref (caps);
}
static void
-gst_spectrum_init (GstSpectrum * spectrum, GstSpectrumClass * g_class)
+gst_spectrum_init (GstSpectrum * spectrum)
{
spectrum->post_messages = DEFAULT_POST_MESSAGES;
spectrum->message_magnitude = DEFAULT_MESSAGE_MAGNITUDE;
g_assert (spectrum->channel_data == NULL);
spectrum->num_channels = (spectrum->multi_channel) ?
- GST_AUDIO_FILTER (spectrum)->format.channels : 1;
+ GST_AUDIO_FILTER_CHANNELS (spectrum) : 1;
GST_DEBUG_OBJECT (spectrum, "allocating data for %d channels",
spectrum->num_channels);
GstSpectrum *filter = GST_SPECTRUM (object);
switch (prop_id) {
- case PROP_MESSAGE:
case PROP_POST_MESSAGES:
filter->post_messages = g_value_get_boolean (value);
break;
GstSpectrum *filter = GST_SPECTRUM (object);
switch (prop_id) {
- case PROP_MESSAGE:
case PROP_POST_MESSAGES:
g_value_set_boolean (value, filter->post_messages);
break;
}
static void
-input_data_mixed_int32 (const guint8 * _in, gfloat * out, guint len,
- guint channels, gfloat max_value, guint op, guint nfft)
-{
- guint i, j, ip = 0;
- gint32 *in = (gint32 *) _in;
- gfloat v;
-
- for (j = 0; j < len; j++) {
- v = in[ip++] * 2 + 1;
- for (i = 1; i < channels; i++)
- v += in[ip++] * 2 + 1;
- out[op] = v / channels;
- op = (op + 1) % nfft;
- }
-}
-
-static void
input_data_mixed_int32_max (const guint8 * _in, gfloat * out, guint len,
guint channels, gfloat max_value, guint op, guint nfft)
{
}
static void
-input_data_mixed_int24 (const guint8 * _in, gfloat * out, guint len,
- guint channels, gfloat max_value, guint op, guint nfft)
-{
- guint i, j;
- gfloat v = 0.0;
-
- for (j = 0; j < len; j++) {
- for (i = 0; i < channels; i++) {
-#if G_BYTE_ORDER == G_BIG_ENDIAN
- gint32 value = GST_READ_UINT24_BE (_in);
-#else
- gint32 value = GST_READ_UINT24_LE (_in);
-#endif
- if (value & 0x00800000)
- value |= 0xff000000;
- v += value * 2 + 1;
- _in += 3;
- }
- out[op] = v / channels;
- op = (op + 1) % nfft;
- }
-}
-
-static void
input_data_mixed_int24_max (const guint8 * _in, gfloat * out, guint len,
guint channels, gfloat max_value, guint op, guint nfft)
{
}
static void
-input_data_mixed_int16 (const guint8 * _in, gfloat * out, guint len,
- guint channels, gfloat max_value, guint op, guint nfft)
-{
- guint i, j, ip = 0;
- gint16 *in = (gint16 *) _in;
- gfloat v;
-
- for (j = 0; j < len; j++) {
- v = in[ip++] * 2 + 1;
- for (i = 1; i < channels; i++)
- v += in[ip++] * 2 + 1;
- out[op] = v / channels;
- op = (op + 1) % nfft;
- }
-}
-
-static void
input_data_mixed_int16_max (const guint8 * _in, gfloat * out, guint len,
guint channels, gfloat max_value, guint op, guint nfft)
{
}
static void
-input_data_int32 (const guint8 * _in, gfloat * out, guint len, guint channels,
- gfloat max_value, guint op, guint nfft)
-{
- guint j, ip;
- gint32 *in = (gint32 *) _in;
-
- for (j = 0, ip = 0; j < len; j++, ip += channels) {
- out[op] = in[ip] * 2 + 1;
- op = (op + 1) % nfft;
- }
-}
-
-static void
input_data_int32_max (const guint8 * _in, gfloat * out, guint len,
guint channels, gfloat max_value, guint op, guint nfft)
{
}
static void
-input_data_int24 (const guint8 * _in, gfloat * out, guint len, guint channels,
- gfloat max_value, guint op, guint nfft)
-{
- guint j;
-
- for (j = 0; j < len; j++) {
-#if G_BYTE_ORDER == G_BIG_ENDIAN
- gint32 v = GST_READ_UINT24_BE (_in);
-#else
- gint32 v = GST_READ_UINT24_LE (_in);
-#endif
- if (v & 0x00800000)
- v |= 0xff000000;
- _in += 3 * channels;
- out[op] = v * 2 + 1;
- op = (op + 1) % nfft;
- }
-}
-
-static void
input_data_int24_max (const guint8 * _in, gfloat * out, guint len,
guint channels, gfloat max_value, guint op, guint nfft)
{
}
static void
-input_data_int16 (const guint8 * _in, gfloat * out, guint len, guint channels,
- gfloat max_value, guint op, guint nfft)
-{
- guint j, ip;
- gint16 *in = (gint16 *) _in;
-
- for (j = 0, ip = 0; j < len; j++, ip += channels) {
- out[op] = in[ip] * 2 + 1;
- op = (op + 1) % nfft;
- }
-}
-
-static void
input_data_int16_max (const guint8 * _in, gfloat * out, guint len,
guint channels, gfloat max_value, guint op, guint nfft)
{
}
static gboolean
-gst_spectrum_setup (GstAudioFilter * base, GstRingBufferSpec * format)
+gst_spectrum_setup (GstAudioFilter * base, const GstAudioInfo * info)
{
GstSpectrum *spectrum = GST_SPECTRUM (base);
- guint width = format->width / 8;
- gboolean is_float = (format->type == GST_BUFTYPE_FLOAT);
- /* max_value will be 0 when depth is 1,
- * interpret -1 and 0 as -1 and +1 if that's the case. */
- guint max_value = (1UL << (format->depth - 1)) - 1;
gboolean multi_channel = spectrum->multi_channel;
GstSpectrumInputData input_data = NULL;
- if (is_float) {
- if (width == 4) {
+ switch (GST_AUDIO_INFO_FORMAT (info)) {
+ case GST_AUDIO_FORMAT_S16:
+ input_data =
+ multi_channel ? input_data_int16_max : input_data_mixed_int16_max;
+ break;
+ case GST_AUDIO_FORMAT_S24:
+ input_data =
+ multi_channel ? input_data_int24_max : input_data_mixed_int24_max;
+ break;
+ case GST_AUDIO_FORMAT_S32:
+ input_data =
+ multi_channel ? input_data_int32_max : input_data_mixed_int32_max;
+ break;
+ case GST_AUDIO_FORMAT_F32:
input_data = multi_channel ? input_data_float : input_data_mixed_float;
- } else if (width == 8) {
+ break;
+ case GST_AUDIO_FORMAT_F64:
input_data = multi_channel ? input_data_double : input_data_mixed_double;
- } else {
- g_assert_not_reached ();
- }
- } else {
- if (width == 4) {
- if (max_value) {
- input_data =
- multi_channel ? input_data_int32_max : input_data_mixed_int32_max;
- } else {
- input_data = multi_channel ? input_data_int32 : input_data_mixed_int32;
- }
- } else if (width == 3) {
- if (max_value) {
- input_data =
- multi_channel ? input_data_int24_max : input_data_mixed_int24_max;
- } else {
- input_data = multi_channel ? input_data_int24 : input_data_mixed_int24;
- }
- } else if (width == 2) {
- if (max_value) {
- input_data =
- multi_channel ? input_data_int16_max : input_data_mixed_int16_max;
- } else {
- input_data = multi_channel ? input_data_int16 : input_data_mixed_int16;
- }
- } else {
+ break;
+ default:
g_assert_not_reached ();
- }
+ break;
}
-
spectrum->input_data = input_data;
+
gst_spectrum_reset_state (spectrum);
+
return TRUE;
}
}
} else {
guint c;
- guint channels = GST_AUDIO_FILTER (spectrum)->format.channels;
+ guint channels = GST_AUDIO_FILTER_CHANNELS (spectrum);
if (spectrum->message_magnitude) {
mcv = gst_spectrum_message_add_container (s, GST_TYPE_ARRAY, "magnitude");
gst_spectrum_transform_ip (GstBaseTransform * trans, GstBuffer * buffer)
{
GstSpectrum *spectrum = GST_SPECTRUM (trans);
- GstRingBufferSpec *format = &GST_AUDIO_FILTER (spectrum)->format;
- guint rate = format->rate;
- guint channels = format->channels;
+ guint rate = GST_AUDIO_FILTER_RATE (spectrum);
+ guint channels = GST_AUDIO_FILTER_CHANNELS (spectrum);
+ guint bps = GST_AUDIO_FILTER_BPS (spectrum);
+ guint bpf = GST_AUDIO_FILTER_BPF (spectrum);
guint output_channels = spectrum->multi_channel ? channels : 1;
guint c;
- guint width = format->width / 8;
- gfloat max_value = (1UL << (format->depth - 1)) - 1;
+ gfloat max_value = (1UL << ((bps << 3) - 1)) - 1;
guint bands = spectrum->bands;
guint nfft = 2 * bands - 2;
guint input_pos;
gfloat *input;
- const guint8 *data = GST_BUFFER_DATA (buffer);
- guint size = GST_BUFFER_SIZE (buffer);
- guint frame_size = width * channels;
+ GstMapInfo map;
+ const guint8 *data;
+ gsize size;
guint fft_todo, msg_todo, block_size;
gboolean have_full_interval;
GstSpectrumChannel *cd;
GstSpectrumInputData input_data;
- GST_LOG_OBJECT (spectrum, "input size: %d bytes", GST_BUFFER_SIZE (buffer));
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ data = map.data;
+ size = map.size;
+
+ GST_LOG_OBJECT (spectrum, "input size: %" G_GSIZE_FORMAT " bytes", size);
if (GST_BUFFER_IS_DISCONT (buffer)) {
GST_DEBUG_OBJECT (spectrum, "Discontinuity detected -- flushing");
input_pos = spectrum->input_pos;
input_data = spectrum->input_data;
- while (size >= frame_size) {
+ while (size >= bpf) {
/* run input_data for a chunk of data */
fft_todo = nfft - (spectrum->num_frames % nfft);
msg_todo = spectrum->frames_todo - spectrum->num_frames;
GST_LOG_OBJECT (spectrum,
- "message frames todo: %u, fft frames todo: %u, input frames %u",
- msg_todo, fft_todo, (size / frame_size));
+ "message frames todo: %u, fft frames todo: %u, input frames %"
+ G_GSIZE_FORMAT, msg_todo, fft_todo, (size / bpf));
block_size = msg_todo;
- if (block_size > (size / frame_size))
- block_size = (size / frame_size);
+ if (block_size > (size / bpf))
+ block_size = (size / bpf);
if (block_size > fft_todo)
block_size = fft_todo;
cd = &spectrum->channel_data[c];
input = cd->input;
/* Move the current frames into our ringbuffers */
- input_data (data + c * width, input, block_size, channels, max_value,
+ input_data (data + c * bps, input, block_size, channels, max_value,
input_pos, nfft);
}
- data += block_size * frame_size;
- size -= block_size * frame_size;
+ data += block_size * bpf;
+ size -= block_size * bpf;
input_pos = (input_pos + block_size) % nfft;
spectrum->num_frames += block_size;
have_full_interval = (spectrum->num_frames == spectrum->frames_todo);
- GST_LOG_OBJECT (spectrum, "size: %u, do-fft = %d, do-message = %d", size,
+ GST_LOG_OBJECT (spectrum,
+ "size: %" G_GSIZE_FORMAT ", do-fft = %d, do-message = %d", size,
(spectrum->num_frames % nfft == 0), have_full_interval);
/* If we have enough frames for an FFT or we have all frames required for
spectrum->input_pos = input_pos;
+ gst_buffer_unmap (buffer, &map);
+
g_assert (size == 0);
return GST_FLOW_OK;
libgstudp_la_SOURCES = gstudp.c gstudpsrc.c gstudpsink.c gstmultiudpsink.c gstdynudpsink.c gstudpnetutils.c
-# adding -D_GNU_SOURCE to get non-POSIX extensions like EAI_ADDRFAMILY
-# with glibc >= 2.8 when including netdb.h (see glibc sources bug 6452)
-libgstudp_la_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_BASE_CFLAGS) $(GST_CFLAGS) -D_GNU_SOURCE
-libgstudp_la_LIBADD = $(GST_PLUGINS_BASE_LIBS) -lgstnetbuffer-@GST_MAJORMINOR@ \
- $(GST_BASE_LIBS) $(WIN32_LIBS)
+libgstudp_la_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_BASE_CFLAGS) $(GST_NET_CFLAGS) $(GST_CFLAGS) $(GIO_CFLAGS)
+libgstudp_la_LIBADD = $(GST_PLUGINS_BASE_LIBS) $(GST_BASE_LIBS) $(GST_NET_LIBS) $(GIO_LIBS)
libgstudp_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
libgstudp_la_LIBTOOLFLAGS = --tag=disable-static
* Copyright (C) <2005> Philippe Khalaf <burger@speedy.org>
* Copyright (C) <2005> Nokia Corporation <kai.vehmanen@nokia.com>
* Copyright (C) <2006> Joni Valtanen <joni.valtanen@movial.fi>
+ * Copyright (C) <2012> Collabora Ltd.
+ * Author: Sebastian Dröge <sebastian.droege@collabora.co.uk>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
#include "gstudp-marshal.h"
#include "gstdynudpsink.h"
-#include <stdio.h>
-#include <stdlib.h>
-#ifdef HAVE_UNISTD_H
-#include <unistd.h>
-#endif
-#include <errno.h>
-#include <string.h>
-#ifdef HAVE_SYS_TIME_H
-#include <sys/time.h>
-#endif
-#include <sys/types.h>
-#include <gst/netbuffer/gstnetbuffer.h>
+#include <gst/net/gstnetaddressmeta.h>
GST_DEBUG_CATEGORY_STATIC (dynudpsink_debug);
#define GST_CAT_DEFAULT (dynudpsink_debug)
LAST_SIGNAL
};
-#define UDP_DEFAULT_SOCKFD -1
-#define UDP_DEFAULT_CLOSEFD TRUE
+#define UDP_DEFAULT_SOCKET NULL
+#define UDP_DEFAULT_CLOSE_SOCKET TRUE
enum
{
PROP_0,
- PROP_SOCKFD,
- PROP_CLOSEFD
+ PROP_SOCKET,
+ PROP_CLOSE_SOCKET
};
-#define CLOSE_IF_REQUESTED(udpctx) \
-G_STMT_START { \
- if ((!udpctx->externalfd) || (udpctx->externalfd && udpctx->closefd)) { \
- CLOSE_SOCKET(udpctx->sock); \
- if (udpctx->sock == udpctx->sockfd) \
- udpctx->sockfd = UDP_DEFAULT_SOCKFD; \
- } \
- udpctx->sock = -1; \
-} G_STMT_END
-
-static void gst_dynudpsink_base_init (gpointer g_class);
-static void gst_dynudpsink_class_init (GstDynUDPSink * klass);
-static void gst_dynudpsink_init (GstDynUDPSink * udpsink);
static void gst_dynudpsink_finalize (GObject * object);
static GstFlowReturn gst_dynudpsink_render (GstBaseSink * sink,
GstBuffer * buffer);
-static void gst_dynudpsink_close (GstDynUDPSink * sink);
-static GstStateChangeReturn gst_dynudpsink_change_state (GstElement * element,
- GstStateChange transition);
+static gboolean gst_dynudpsink_stop (GstBaseSink * bsink);
+static gboolean gst_dynudpsink_start (GstBaseSink * bsink);
+static gboolean gst_dynudpsink_unlock (GstBaseSink * bsink);
+static gboolean gst_dynudpsink_unlock_stop (GstBaseSink * bsink);
static void gst_dynudpsink_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void gst_dynudpsink_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
-
-static GstElementClass *parent_class = NULL;
+static GstStructure *gst_dynudpsink_get_stats (GstDynUDPSink * sink,
+ const gchar * host, gint port);
static guint gst_dynudpsink_signals[LAST_SIGNAL] = { 0 };
-GType
-gst_dynudpsink_get_type (void)
-{
- static GType dynudpsink_type = 0;
-
- if (!dynudpsink_type) {
- static const GTypeInfo dynudpsink_info = {
- sizeof (GstDynUDPSinkClass),
- gst_dynudpsink_base_init,
- NULL,
- (GClassInitFunc) gst_dynudpsink_class_init,
- NULL,
- NULL,
- sizeof (GstDynUDPSink),
- 0,
- (GInstanceInitFunc) gst_dynudpsink_init,
- NULL
- };
-
- dynudpsink_type =
- g_type_register_static (GST_TYPE_BASE_SINK, "GstDynUDPSink",
- &dynudpsink_info, 0);
- }
- return dynudpsink_type;
-}
-
-static void
-gst_dynudpsink_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_static_pad_template (element_class,
- &sink_template);
-
- gst_element_class_set_details_simple (element_class, "UDP packet sender",
- "Sink/Network",
- "Send data over the network via UDP",
- "Philippe Khalaf <burger@speedy.org>");
-}
+#define gst_dynudpsink_parent_class parent_class
+G_DEFINE_TYPE (GstDynUDPSink, gst_dynudpsink, GST_TYPE_BASE_SINK);
static void
-gst_dynudpsink_class_init (GstDynUDPSink * klass)
+gst_dynudpsink_class_init (GstDynUDPSinkClass * klass)
{
GObjectClass *gobject_class;
GstElementClass *gstelement_class;
gobject_class->finalize = gst_dynudpsink_finalize;
gst_dynudpsink_signals[SIGNAL_GET_STATS] =
- g_signal_new ("get-stats", G_TYPE_FROM_CLASS (klass), G_SIGNAL_RUN_LAST,
+ g_signal_new ("get-stats", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION,
G_STRUCT_OFFSET (GstDynUDPSinkClass, get_stats),
- NULL, NULL, gst_udp_marshal_BOXED__STRING_INT, G_TYPE_VALUE_ARRAY, 2,
+ NULL, NULL, gst_udp_marshal_BOXED__STRING_INT, GST_TYPE_STRUCTURE, 2,
G_TYPE_STRING, G_TYPE_INT);
- g_object_class_install_property (gobject_class, PROP_SOCKFD,
- g_param_spec_int ("sockfd", "socket handle",
- "Socket to use for UDP sending. (-1 == allocate)",
- -1, G_MAXINT16, UDP_DEFAULT_SOCKFD,
+ g_object_class_install_property (gobject_class, PROP_SOCKET,
+ g_param_spec_object ("socket", "Socket",
+ "Socket to use for UDP sending. (NULL == allocate)",
+ G_TYPE_SOCKET, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_CLOSE_SOCKET,
+ g_param_spec_boolean ("close-socket", "Close socket",
+ "Close socket if passed as property on state change",
+ UDP_DEFAULT_CLOSE_SOCKET,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
- g_object_class_install_property (gobject_class, PROP_CLOSEFD,
- g_param_spec_boolean ("closefd", "Close sockfd",
- "Close sockfd if passed as property on state change",
- UDP_DEFAULT_CLOSEFD, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
- gstelement_class->change_state = gst_dynudpsink_change_state;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class, "UDP packet sender",
+ "Sink/Network",
+ "Send data over the network via UDP",
+ "Philippe Khalaf <burger@speedy.org>");
gstbasesink_class->render = gst_dynudpsink_render;
+ gstbasesink_class->start = gst_dynudpsink_start;
+ gstbasesink_class->stop = gst_dynudpsink_stop;
+ gstbasesink_class->unlock = gst_dynudpsink_unlock;
+ gstbasesink_class->unlock_stop = gst_dynudpsink_unlock_stop;
+
+ klass->get_stats = gst_dynudpsink_get_stats;
GST_DEBUG_CATEGORY_INIT (dynudpsink_debug, "dynudpsink", 0, "UDP sink");
}
static void
gst_dynudpsink_init (GstDynUDPSink * sink)
{
- WSA_STARTUP (sink);
+ sink->socket = UDP_DEFAULT_SOCKET;
+ sink->close_socket = UDP_DEFAULT_CLOSE_SOCKET;
+ sink->external_socket = FALSE;
- sink->sockfd = UDP_DEFAULT_SOCKFD;
- sink->closefd = UDP_DEFAULT_CLOSEFD;
- sink->externalfd = FALSE;
-
- sink->sock = -1;
+ sink->used_socket = NULL;
+ sink->cancellable = g_cancellable_new ();
+ sink->family = G_SOCKET_FAMILY_IPV6;
}
static void
gst_dynudpsink_finalize (GObject * object)
{
- GstDynUDPSink *udpsink;
+ GstDynUDPSink *sink;
- udpsink = GST_DYNUDPSINK (object);
+ sink = GST_DYNUDPSINK (object);
- if (udpsink->sockfd >= 0 && udpsink->closefd)
- CLOSE_SOCKET (udpsink->sockfd);
+ if (sink->cancellable)
+ g_object_unref (sink->cancellable);
+ sink->cancellable = NULL;
- G_OBJECT_CLASS (parent_class)->finalize (object);
+ if (sink->socket)
+ g_object_unref (sink->socket);
+ sink->socket = NULL;
- WSA_CLEANUP (object);
+ if (sink->used_socket)
+ g_object_unref (sink->used_socket);
+ sink->used_socket = NULL;
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
}
static GstFlowReturn
gst_dynudpsink_render (GstBaseSink * bsink, GstBuffer * buffer)
{
GstDynUDPSink *sink;
- gint ret, size;
- guint8 *data;
- GstNetBuffer *netbuf;
- struct sockaddr_in theiraddr;
- guint16 destport;
- guint32 destaddr;
+ gssize ret;
+ GstMapInfo map;
+ GstNetAddressMeta *meta;
+ GSocketAddress *addr;
+ GError *err = NULL;
+ GSocketFamily family;
- memset (&theiraddr, 0, sizeof (theiraddr));
+ meta = gst_buffer_get_net_address_meta (buffer);
- if (GST_IS_NETBUFFER (buffer)) {
- netbuf = GST_NETBUFFER (buffer);
- } else {
+ if (meta == NULL) {
GST_DEBUG ("Received buffer is not a GstNetBuffer, skipping");
return GST_FLOW_OK;
}
sink = GST_DYNUDPSINK (bsink);
- size = GST_BUFFER_SIZE (netbuf);
- data = GST_BUFFER_DATA (netbuf);
+ /* let's get the address from the metadata */
+ addr = meta->addr;
- GST_DEBUG ("about to send %d bytes", size);
+ family = g_socket_address_get_family (addr);
+ if (sink->family != family && family != G_SOCKET_FAMILY_IPV4)
+ goto invalid_family;
- // let's get the address from the netbuffer
- gst_netaddress_get_ip4_address (&netbuf->to, &destaddr, &destport);
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
- GST_DEBUG ("sending %d bytes to client %d port %d", size, destaddr, destport);
+ GST_DEBUG ("about to send %" G_GSIZE_FORMAT " bytes", map.size);
- theiraddr.sin_family = AF_INET;
- theiraddr.sin_addr.s_addr = destaddr;
- theiraddr.sin_port = destport;
-#ifdef G_OS_WIN32
- ret = sendto (sink->sock, (char *) data, size, 0,
-#else
- ret = sendto (sink->sock, data, size, 0,
+#ifndef GST_DISABLE_GST_DEBUG
+ {
+ gchar *host;
+
+ host =
+ g_inet_address_to_string (g_inet_socket_address_get_address
+ (G_INET_SOCKET_ADDRESS (addr)));
+ GST_DEBUG ("sending %" G_GSIZE_FORMAT " bytes to client %s port %d",
+ map.size, host,
+ g_inet_socket_address_get_port (G_INET_SOCKET_ADDRESS (addr)));
+ g_free (host);
+ }
#endif
- (struct sockaddr *) &theiraddr, sizeof (theiraddr));
- if (ret < 0) {
- if (errno != EINTR && errno != EAGAIN) {
- goto send_error;
- }
- }
+ ret =
+ g_socket_send_to (sink->used_socket, addr, (gchar *) map.data, map.size,
+ sink->cancellable, &err);
+ gst_buffer_unmap (buffer, &map);
+
+ if (ret < 0)
+ goto send_error;
- GST_DEBUG ("sent %d bytes", size);
+ GST_DEBUG ("sent %" G_GSSIZE_FORMAT " bytes", ret);
return GST_FLOW_OK;
send_error:
{
- GST_DEBUG ("got send error %s (%d)", g_strerror (errno), errno);
+ GST_DEBUG ("got send error %s", err->message);
+ g_clear_error (&err);
+ return GST_FLOW_ERROR;
+ }
+invalid_family:
+ {
+ GST_DEBUG ("invalid family (got %d, expected %d)",
+ g_socket_address_get_family (addr), sink->family);
return GST_FLOW_ERROR;
}
}
udpsink = GST_DYNUDPSINK (object);
switch (prop_id) {
- case PROP_SOCKFD:
- if (udpsink->sockfd >= 0 && udpsink->sockfd != udpsink->sock &&
- udpsink->closefd)
- CLOSE_SOCKET (udpsink->sockfd);
- udpsink->sockfd = g_value_get_int (value);
- GST_DEBUG ("setting SOCKFD to %d", udpsink->sockfd);
+ case PROP_SOCKET:
+ if (udpsink->socket != NULL && udpsink->socket != udpsink->used_socket &&
+ udpsink->close_socket) {
+ GError *err = NULL;
+
+ if (!g_socket_close (udpsink->socket, &err)) {
+ GST_ERROR ("failed to close socket %p: %s", udpsink->socket,
+ err->message);
+ g_clear_error (&err);
+ }
+ }
+ if (udpsink->socket)
+ g_object_unref (udpsink->socket);
+ udpsink->socket = g_value_dup_object (value);
+ GST_DEBUG ("setting socket to %p", udpsink->socket);
break;
- case PROP_CLOSEFD:
- udpsink->closefd = g_value_get_boolean (value);
+ case PROP_CLOSE_SOCKET:
+ udpsink->close_socket = g_value_get_boolean (value);
break;
-
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
udpsink = GST_DYNUDPSINK (object);
switch (prop_id) {
- case PROP_SOCKFD:
- g_value_set_int (value, udpsink->sockfd);
+ case PROP_SOCKET:
+ g_value_set_object (value, udpsink->socket);
break;
- case PROP_CLOSEFD:
- g_value_set_boolean (value, udpsink->closefd);
+ case PROP_CLOSE_SOCKET:
+ g_value_set_boolean (value, udpsink->close_socket);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
}
}
-
/* create a socket for sending to remote machine */
static gboolean
-gst_dynudpsink_init_send (GstDynUDPSink * sink)
+gst_dynudpsink_start (GstBaseSink * bsink)
{
- guint bc_val;
-
- if (sink->sockfd == -1) {
- /* create sender socket if none available */
- if ((sink->sock = socket (AF_INET, SOCK_DGRAM, 0)) < 0)
- goto no_socket;
-
- bc_val = 1;
- if (setsockopt (sink->sock, SOL_SOCKET, SO_BROADCAST, &bc_val,
- sizeof (bc_val)) < 0)
- goto no_broadcast;
+ GstDynUDPSink *udpsink;
+ GError *err = NULL;
+
+ udpsink = GST_DYNUDPSINK (bsink);
+
+ if (udpsink->socket == NULL) {
+ /* create sender socket if none available, first try IPv6, then
+ * fall-back to IPv4 */
+ udpsink->family = G_SOCKET_FAMILY_IPV6;
+ if ((udpsink->used_socket =
+ g_socket_new (G_SOCKET_FAMILY_IPV6,
+ G_SOCKET_TYPE_DATAGRAM, G_SOCKET_PROTOCOL_UDP, &err)) == NULL) {
+ udpsink->family = G_SOCKET_FAMILY_IPV4;
+ if ((udpsink->used_socket = g_socket_new (G_SOCKET_FAMILY_IPV4,
+ G_SOCKET_TYPE_DATAGRAM, G_SOCKET_PROTOCOL_UDP, &err)) == NULL)
+ goto no_socket;
+ }
- sink->externalfd = TRUE;
+ udpsink->external_socket = FALSE;
} else {
- sink->sock = sink->sockfd;
- sink->externalfd = TRUE;
+ udpsink->used_socket = G_SOCKET (g_object_ref (udpsink->socket));
+ udpsink->external_socket = TRUE;
+ udpsink->family = g_socket_get_family (udpsink->used_socket);
}
+
+ g_socket_set_broadcast (udpsink->used_socket, TRUE);
+
return TRUE;
/* ERRORS */
no_socket:
{
- perror ("socket");
- return FALSE;
- }
-no_broadcast:
- {
- perror ("setsockopt");
- CLOSE_IF_REQUESTED (sink);
+ GST_ERROR_OBJECT (udpsink, "Failed to create socket: %s", err->message);
+ g_clear_error (&err);
return FALSE;
}
}
-GValueArray *
+static GstStructure *
gst_dynudpsink_get_stats (GstDynUDPSink * sink, const gchar * host, gint port)
{
return NULL;
}
-static void
-gst_dynudpsink_close (GstDynUDPSink * sink)
+static gboolean
+gst_dynudpsink_stop (GstBaseSink * bsink)
{
- CLOSE_IF_REQUESTED (sink);
+ GstDynUDPSink *udpsink;
+
+ udpsink = GST_DYNUDPSINK (bsink);
+
+ if (udpsink->used_socket) {
+ if (udpsink->close_socket || !udpsink->external_socket) {
+ GError *err = NULL;
+
+ if (!g_socket_close (udpsink->used_socket, &err)) {
+ GST_ERROR_OBJECT (udpsink, "Failed to close socket: %s", err->message);
+ g_clear_error (&err);
+ }
+ }
+
+ g_object_unref (udpsink->used_socket);
+ udpsink->used_socket = NULL;
+ }
+
+ return TRUE;
}
-static GstStateChangeReturn
-gst_dynudpsink_change_state (GstElement * element, GstStateChange transition)
+static gboolean
+gst_dynudpsink_unlock (GstBaseSink * bsink)
{
- GstStateChangeReturn ret;
- GstDynUDPSink *sink;
+ GstDynUDPSink *udpsink;
- sink = GST_DYNUDPSINK (element);
+ udpsink = GST_DYNUDPSINK (bsink);
- switch (transition) {
- case GST_STATE_CHANGE_READY_TO_PAUSED:
- if (!gst_dynudpsink_init_send (sink))
- goto no_init;
- break;
- default:
- break;
- }
+ g_cancellable_cancel (udpsink->cancellable);
- ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+ return TRUE;
+}
- switch (transition) {
- case GST_STATE_CHANGE_PAUSED_TO_READY:
- gst_dynudpsink_close (sink);
- break;
- default:
- break;
- }
- return ret;
+static gboolean
+gst_dynudpsink_unlock_stop (GstBaseSink * bsink)
+{
+ GstDynUDPSink *udpsink;
- /* ERRORS */
-no_init:
- {
- return GST_STATE_CHANGE_FAILURE;
- }
+ udpsink = GST_DYNUDPSINK (bsink);
+
+ g_cancellable_reset (udpsink->cancellable);
+
+ return TRUE;
}
#include <gst/gst.h>
#include <gst/base/gstbasesink.h>
+#include <gio/gio.h>
G_BEGIN_DECLS
#include "gstudpnetutils.h"
-
#include "gstudp.h"
#define GST_TYPE_DYNUDPSINK (gst_dynudpsink_get_type())
GstBaseSink parent;
/* properties */
- gint sockfd;
- gboolean closefd;
+ GSocket *socket;
+ gboolean close_socket;
/* the socket in use */
- int sock;
- gboolean externalfd;
+ GSocket *used_socket;
+ gboolean external_socket;
+ GCancellable *cancellable;
+ GSocketFamily family;
};
struct _GstDynUDPSinkClass {
GstBaseSinkClass parent_class;
/* element methods */
- GValueArray* (*get_stats) (GstDynUDPSink *sink, const gchar *host, gint port);
+ GstStructure* (*get_stats) (GstDynUDPSink *sink, const gchar *host, gint port);
/* signals */
};
GType gst_dynudpsink_get_type(void);
-GValueArray* gst_dynudpsink_get_stats (GstDynUDPSink *sink, const gchar *host, gint port);
-
G_END_DECLS
#endif /* __GST_DYNUDPSINK_H__ */
/* GStreamer
* Copyright (C) <2007> Wim Taymans <wim.taymans@gmail.com>
* Copyright (C) <2009> Jarkko Palviainen <jarkko.palviainen@sesca.com>
+ * Copyright (C) <2012> Collabora Ltd.
+ * Author: Sebastian Dröge <sebastian.droege@collabora.co.uk>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
#include "gstudp-marshal.h"
#include "gstmultiudpsink.h"
-#include <stdio.h>
-#include <stdlib.h>
-#ifdef HAVE_UNISTD_H
-#include <unistd.h>
-#endif
-#include <errno.h>
#include <string.h>
#include "gst/glib-compat-private.h"
LAST_SIGNAL
};
-#define DEFAULT_SOCKFD -1
-#define DEFAULT_CLOSEFD TRUE
-#define DEFAULT_SOCK -1
+#define DEFAULT_SOCKET NULL
+#define DEFAULT_CLOSE_SOCKET TRUE
+#define DEFAULT_USED_SOCKET NULL
#define DEFAULT_CLIENTS NULL
-#define DEFAULT_FAMILY 0
+#define DEFAULT_FAMILY G_SOCKET_FAMILY_IPV6
/* FIXME, this should be disabled by default, we don't need to join a multicast
* group for sending, if this socket is also used for receiving, it should
* be configured in the element that does the receive. */
PROP_0,
PROP_BYTES_TO_SERVE,
PROP_BYTES_SERVED,
- PROP_SOCKFD,
- PROP_CLOSEFD,
- PROP_SOCK,
+ PROP_SOCKET,
+ PROP_CLOSE_SOCKET,
+ PROP_USED_SOCKET,
PROP_CLIENTS,
PROP_AUTO_MULTICAST,
PROP_TTL,
PROP_LAST
};
-#define CLOSE_IF_REQUESTED(udpctx) \
-G_STMT_START { \
- if ((!udpctx->externalfd) || (udpctx->externalfd && udpctx->closefd)) { \
- CLOSE_SOCKET(udpctx->sock); \
- if (udpctx->sock == udpctx->sockfd) \
- udpctx->sockfd = DEFAULT_SOCKFD; \
- } \
- udpctx->sock = DEFAULT_SOCK; \
-} G_STMT_END
-
-static void gst_multiudpsink_base_init (gpointer g_class);
-static void gst_multiudpsink_class_init (GstMultiUDPSinkClass * klass);
-static void gst_multiudpsink_init (GstMultiUDPSink * udpsink);
static void gst_multiudpsink_finalize (GObject * object);
static GstFlowReturn gst_multiudpsink_render (GstBaseSink * sink,
GstBuffer * buffer);
-#ifndef G_OS_WIN32 /* sendmsg() is not available on Windows */
-static GstFlowReturn gst_multiudpsink_render_list (GstBaseSink * bsink,
- GstBufferList * list);
-#endif
-static GstStateChangeReturn gst_multiudpsink_change_state (GstElement *
- element, GstStateChange transition);
+
+static gboolean gst_multiudpsink_start (GstBaseSink * bsink);
+static gboolean gst_multiudpsink_stop (GstBaseSink * bsink);
+static gboolean gst_multiudpsink_unlock (GstBaseSink * bsink);
+static gboolean gst_multiudpsink_unlock_stop (GstBaseSink * bsink);
static void gst_multiudpsink_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void gst_multiudpsink_clear_internal (GstMultiUDPSink * sink,
gboolean lock);
-static GstElementClass *parent_class = NULL;
-
static guint gst_multiudpsink_signals[LAST_SIGNAL] = { 0 };
-GType
-gst_multiudpsink_get_type (void)
-{
- static GType multiudpsink_type = 0;
-
- if (!multiudpsink_type) {
- static const GTypeInfo multiudpsink_info = {
- sizeof (GstMultiUDPSinkClass),
- gst_multiudpsink_base_init,
- NULL,
- (GClassInitFunc) gst_multiudpsink_class_init,
- NULL,
- NULL,
- sizeof (GstMultiUDPSink),
- 0,
- (GInstanceInitFunc) gst_multiudpsink_init,
- NULL
- };
-
- multiudpsink_type =
- g_type_register_static (GST_TYPE_BASE_SINK, "GstMultiUDPSink",
- &multiudpsink_info, 0);
- }
- return multiudpsink_type;
-}
-
-static void
-gst_multiudpsink_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_static_pad_template (element_class, &sink_template);
-
- gst_element_class_set_details_simple (element_class, "UDP packet sender",
- "Sink/Network",
- "Send data over the network via UDP",
- "Wim Taymans <wim.taymans@gmail.com>");
-}
+#define gst_multiudpsink_parent_class parent_class
+G_DEFINE_TYPE (GstMultiUDPSink, gst_multiudpsink, GST_TYPE_BASE_SINK);
static void
gst_multiudpsink_class_init (GstMultiUDPSinkClass * klass)
gstelement_class = (GstElementClass *) klass;
gstbasesink_class = (GstBaseSinkClass *) klass;
- parent_class = g_type_class_peek_parent (klass);
-
gobject_class->set_property = gst_multiudpsink_set_property;
gobject_class->get_property = gst_multiudpsink_get_property;
gobject_class->finalize = gst_multiudpsink_finalize;
*
* Get the statistics of the client with destination @host and @port.
*
- * Returns: a GValueArray of uint64: bytes_sent, packets_sent,
+ * Returns: a GstStructure: bytes_sent, packets_sent,
* connect_time (in epoch seconds), disconnect_time (in epoch seconds)
*/
gst_multiudpsink_signals[SIGNAL_GET_STATS] =
g_signal_new ("get-stats", G_TYPE_FROM_CLASS (klass),
G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION,
G_STRUCT_OFFSET (GstMultiUDPSinkClass, get_stats),
- NULL, NULL, gst_udp_marshal_BOXED__STRING_INT, G_TYPE_VALUE_ARRAY, 2,
+ NULL, NULL, gst_udp_marshal_BOXED__STRING_INT, GST_TYPE_STRUCTURE, 2,
G_TYPE_STRING, G_TYPE_INT);
/**
* GstMultiUDPSink::client-added:
g_param_spec_uint64 ("bytes-served", "Bytes served",
"Total number of bytes sent to all clients", 0, G_MAXUINT64, 0,
G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
- g_object_class_install_property (gobject_class, PROP_SOCKFD,
- g_param_spec_int ("sockfd", "Socket Handle",
- "Socket to use for UDP sending. (-1 == allocate)",
- -1, G_MAXINT, DEFAULT_SOCKFD,
- G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
- g_object_class_install_property (gobject_class, PROP_CLOSEFD,
- g_param_spec_boolean ("closefd", "Close sockfd",
- "Close sockfd if passed as property on state change",
- DEFAULT_CLOSEFD, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
- g_object_class_install_property (gobject_class, PROP_SOCK,
- g_param_spec_int ("sock", "Socket Handle",
- "Socket currently in use for UDP sending. (-1 == no socket)",
- -1, G_MAXINT, DEFAULT_SOCK,
- G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_SOCKET,
+ g_param_spec_object ("socket", "Socket Handle",
+ "Socket to use for UDP sending. (NULL == allocate)",
+ G_TYPE_SOCKET, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_CLOSE_SOCKET,
+ g_param_spec_boolean ("close-socket", "Close socket",
+ "Close socket if passed as property on state change",
+ DEFAULT_CLOSE_SOCKET, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_USED_SOCKET,
+ g_param_spec_object ("used-socket", "Used Socket Handle",
+ "Socket currently in use for UDP sending. (NULL == no socket)",
+ G_TYPE_SOCKET, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_CLIENTS,
g_param_spec_string ("clients", "Clients",
"A comma separated list of host:port pairs with destinations",
"Size of the kernel send buffer in bytes, 0=default", 0, G_MAXINT,
DEFAULT_BUFFER_SIZE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
- gstelement_class->change_state = gst_multiudpsink_change_state;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class, "UDP packet sender",
+ "Sink/Network",
+ "Send data over the network via UDP",
+ "Wim Taymans <wim.taymans@gmail.com>");
gstbasesink_class->render = gst_multiudpsink_render;
-#ifndef G_OS_WIN32
- gstbasesink_class->render_list = gst_multiudpsink_render_list;
-#endif
+ gstbasesink_class->start = gst_multiudpsink_start;
+ gstbasesink_class->stop = gst_multiudpsink_stop;
+ gstbasesink_class->unlock = gst_multiudpsink_unlock;
+ gstbasesink_class->unlock_stop = gst_multiudpsink_unlock_stop;
klass->add = gst_multiudpsink_add;
klass->remove = gst_multiudpsink_remove;
klass->clear = gst_multiudpsink_clear;
static void
gst_multiudpsink_init (GstMultiUDPSink * sink)
{
- WSA_STARTUP (sink);
-
- sink->client_lock = g_mutex_new ();
- sink->sock = DEFAULT_SOCK;
- sink->sockfd = DEFAULT_SOCKFD;
- sink->closefd = DEFAULT_CLOSEFD;
- sink->externalfd = (sink->sockfd != -1);
+ g_mutex_init (&sink->client_lock);
+ sink->socket = DEFAULT_SOCKET;
+ sink->used_socket = DEFAULT_USED_SOCKET;
+ sink->close_socket = DEFAULT_CLOSE_SOCKET;
+ sink->external_socket = (sink->socket != NULL);
sink->auto_multicast = DEFAULT_AUTO_MULTICAST;
sink->ttl = DEFAULT_TTL;
sink->ttl_mc = DEFAULT_TTL_MC;
sink->loop = DEFAULT_LOOP;
sink->qos_dscp = DEFAULT_QOS_DSCP;
- sink->ss_family = DEFAULT_FAMILY;
+ sink->family = DEFAULT_FAMILY;
sink->send_duplicates = DEFAULT_SEND_DUPLICATES;
+
+ sink->cancellable = g_cancellable_new ();
}
static GstUDPClient *
create_client (GstMultiUDPSink * sink, const gchar * host, gint port)
{
GstUDPClient *client;
+ GInetAddress *addr;
+ GResolver *resolver;
+ GError *err = NULL;
+
+ addr = g_inet_address_new_from_string (host);
+ if (!addr) {
+ GList *results;
+
+ resolver = g_resolver_get_default ();
+ results =
+ g_resolver_lookup_by_name (resolver, host, sink->cancellable, &err);
+ if (!results)
+ goto name_resolve;
+ addr = G_INET_ADDRESS (g_object_ref (results->data));
+
+ g_resolver_free_addresses (results);
+ g_object_unref (resolver);
+ }
+#ifndef GST_DISABLE_GST_DEBUG
+ {
+ gchar *ip = g_inet_address_to_string (addr);
+
+ GST_DEBUG_OBJECT (sink, "IP address for host %s is %s", host, ip);
+ g_free (ip);
+ }
+#endif
client = g_slice_new0 (GstUDPClient);
client->refcount = 1;
client->host = g_strdup (host);
client->port = port;
+ client->addr = g_inet_socket_address_new (addr, port);
+ g_object_unref (addr);
return client;
+
+name_resolve:
+ {
+ g_object_unref (resolver);
+
+ return NULL;
+ }
}
static void
free_client (GstUDPClient * client)
{
+ g_object_unref (client->addr);
g_free (client->host);
g_slice_free (GstUDPClient, client);
}
g_list_foreach (sink->clients, (GFunc) free_client, NULL);
g_list_free (sink->clients);
- if (sink->sockfd >= 0 && sink->closefd)
- CLOSE_SOCKET (sink->sockfd);
+ if (sink->socket)
+ g_object_unref (sink->socket);
+ sink->socket = NULL;
- g_mutex_free (sink->client_lock);
+ if (sink->used_socket)
+ g_object_unref (sink->used_socket);
+ sink->used_socket = NULL;
- WSA_CLEANUP (object);
+ if (sink->cancellable)
+ g_object_unref (sink->cancellable);
+ sink->cancellable = NULL;
- G_OBJECT_CLASS (parent_class)->finalize (object);
-}
-
-static gboolean
-socket_error_is_ignorable (void)
-{
-#ifdef G_OS_WIN32
- /* Windows doesn't seem to have an EAGAIN for sockets */
- return WSAGetLastError () == WSAEINTR;
-#else
- return errno == EINTR || errno == EAGAIN;
-#endif
-}
-
-static int
-socket_last_error_code (void)
-{
-#ifdef G_OS_WIN32
- return WSAGetLastError ();
-#else
- return errno;
-#endif
-}
+ g_mutex_clear (&sink->client_lock);
-static gchar *
-socket_last_error_message (void)
-{
-#ifdef G_OS_WIN32
- int errorcode = WSAGetLastError ();
- wchar_t buf[1024];
- DWORD result =
- FormatMessage (FORMAT_MESSAGE_FROM_SYSTEM | FORMAT_MESSAGE_IGNORE_INSERTS,
- NULL, errorcode, 0, (LPSTR) buf, sizeof (buf) / sizeof (wchar_t), NULL);
- if (FAILED (result)) {
- return g_strdup ("failed to get error message from system");
- } else {
- gchar *res =
- g_convert ((gchar *) buf, -1, "UTF-16", "UTF-8", NULL, NULL, NULL);
- /* g_convert() internally calls windows functions which reset the
- windows error code, so fix it up again like this */
- WSASetLastError (errorcode);
- return res;
- }
-#else
- return g_strdup (g_strerror (errno));
-#endif
+ G_OBJECT_CLASS (parent_class)->finalize (object);
}
static GstFlowReturn
gst_multiudpsink_render (GstBaseSink * bsink, GstBuffer * buffer)
{
GstMultiUDPSink *sink;
- gint ret, size, num = 0, no_clients = 0;
- guint8 *data;
GList *clients;
- gint len;
+ GOutputVector *vec;
+ GstMapInfo *map;
+ guint n_mem, i;
+ gsize size;
+ GstMemory *mem;
+ gint num, no_clients;
+ GError *err = NULL;
sink = GST_MULTIUDPSINK (bsink);
- size = GST_BUFFER_SIZE (buffer);
- data = GST_BUFFER_DATA (buffer);
+ n_mem = gst_buffer_n_memory (buffer);
+ if (n_mem == 0)
+ goto no_data;
- if (size > UDP_MAX_SIZE) {
- GST_WARNING ("Attempting to send a UDP packet larger than maximum "
- "size (%d > %d)", size, UDP_MAX_SIZE);
+ vec = g_new (GOutputVector, n_mem);
+ map = g_new (GstMapInfo, n_mem);
+
+ size = 0;
+ for (i = 0; i < n_mem; i++) {
+ mem = gst_buffer_get_memory (buffer, i);
+ gst_memory_map (mem, &map[i], GST_MAP_READ);
+
+ if (map[i].size > UDP_MAX_SIZE) {
+ GST_WARNING ("Attempting to send a UDP packet larger than maximum "
+ "size (%" G_GSIZE_FORMAT " > %d)", map[i].size, UDP_MAX_SIZE);
+ }
+
+ vec[i].buffer = map[i].data;
+ vec[i].size = map[i].size;
+
+ size += map[i].size;
}
sink->bytes_to_serve += size;
/* grab lock while iterating and sending to clients, this should be
* fast as UDP never blocks */
- g_mutex_lock (sink->client_lock);
+ g_mutex_lock (&sink->client_lock);
GST_LOG_OBJECT (bsink, "about to send %d bytes", size);
+ no_clients = 0;
+ num = 0;
for (clients = sink->clients; clients; clients = g_list_next (clients)) {
GstUDPClient *client;
gint count;
count = sink->send_duplicates ? client->refcount : 1;
while (count--) {
- while (TRUE) {
- len = gst_udp_get_sockaddr_length (&client->theiraddr);
-
- ret = sendto (*client->sock,
-#ifdef G_OS_WIN32
- (char *) data,
-#else
- data,
-#endif
- size, 0, (struct sockaddr *) &client->theiraddr, len);
-
- if (ret < 0) {
- /* some error, just warn, it's likely recoverable and we don't want to
- * break streaming. We break so that we stop retrying for this client. */
- if (!socket_error_is_ignorable ()) {
- gchar *errormessage = socket_last_error_message ();
- GST_WARNING_OBJECT (sink, "client %p gave error %d (%s)", client,
- socket_last_error_code (), errormessage);
- g_free (errormessage);
- break;
- }
- } else {
- num++;
- client->bytes_sent += ret;
- client->packets_sent++;
- sink->bytes_served += ret;
- break;
- }
- }
- }
- }
- g_mutex_unlock (sink->client_lock);
+ gssize ret;
- GST_LOG_OBJECT (sink, "sent %d bytes to %d (of %d) clients", size, num,
- no_clients);
+ ret =
+ g_socket_send_message (sink->used_socket, client->addr, vec, n_mem,
+ NULL, 0, 0, sink->cancellable, &err);
- return GST_FLOW_OK;
-}
+ if (ret < 0)
+ goto send_error;
-#ifndef G_OS_WIN32
-static GstFlowReturn
-gst_multiudpsink_render_list (GstBaseSink * bsink, GstBufferList * list)
-{
- GstMultiUDPSink *sink;
- GList *clients;
- gint ret, size = 0, num = 0, no_clients = 0;
- struct iovec *iov;
- struct msghdr msg = { 0 };
-
- GstBufferListIterator *it;
- guint gsize;
- GstBuffer *buf;
-
- sink = GST_MULTIUDPSINK (bsink);
-
- g_return_val_if_fail (list != NULL, GST_FLOW_ERROR);
-
- it = gst_buffer_list_iterate (list);
- g_return_val_if_fail (it != NULL, GST_FLOW_ERROR);
-
- while (gst_buffer_list_iterator_next_group (it)) {
- msg.msg_iovlen = 0;
- size = 0;
-
- if ((gsize = gst_buffer_list_iterator_n_buffers (it)) == 0) {
- goto invalid_list;
- }
-
- iov = (struct iovec *) g_malloc (gsize * sizeof (struct iovec));
- msg.msg_iov = iov;
-
- while ((buf = gst_buffer_list_iterator_next (it))) {
- if (GST_BUFFER_SIZE (buf) > UDP_MAX_SIZE) {
- GST_WARNING ("Attempting to send a UDP packet larger than maximum "
- "size (%d > %d)", GST_BUFFER_SIZE (buf), UDP_MAX_SIZE);
- }
-
- msg.msg_iov[msg.msg_iovlen].iov_len = GST_BUFFER_SIZE (buf);
- msg.msg_iov[msg.msg_iovlen].iov_base = GST_BUFFER_DATA (buf);
- msg.msg_iovlen++;
- size += GST_BUFFER_SIZE (buf);
- }
-
- sink->bytes_to_serve += size;
-
- /* grab lock while iterating and sending to clients, this should be
- * fast as UDP never blocks */
- g_mutex_lock (sink->client_lock);
- GST_LOG_OBJECT (bsink, "about to send %d bytes", size);
-
- for (clients = sink->clients; clients; clients = g_list_next (clients)) {
- GstUDPClient *client;
- gint count;
-
- client = (GstUDPClient *) clients->data;
- no_clients++;
- GST_LOG_OBJECT (sink, "sending %d bytes to client %p", size, client);
-
- count = sink->send_duplicates ? client->refcount : 1;
-
- while (count--) {
- while (TRUE) {
- msg.msg_name = (void *) &client->theiraddr;
- msg.msg_namelen = sizeof (client->theiraddr);
- ret = sendmsg (*client->sock, &msg, 0);
-
- if (ret < 0) {
- if (!socket_error_is_ignorable ()) {
- break;
- }
- } else {
- num++;
- client->bytes_sent += ret;
- client->packets_sent++;
- sink->bytes_served += ret;
- break;
- }
- }
- }
+ num++;
+ client->bytes_sent += ret;
+ client->packets_sent++;
+ sink->bytes_served += ret;
}
- g_mutex_unlock (sink->client_lock);
-
- g_free (iov);
- msg.msg_iov = NULL;
+ }
+ g_mutex_unlock (&sink->client_lock);
- GST_LOG_OBJECT (sink, "sent %d bytes to %d (of %d) clients", size, num,
- no_clients);
+ /* unmap all memory again */
+ for (i = 0; i < n_mem; i++) {
+ gst_memory_unmap (map[i].memory, &map[i]);
+ gst_memory_unref (map[i].memory);
}
- gst_buffer_list_iterator_free (it);
+ g_free (vec);
+ g_free (map);
+
+ GST_LOG_OBJECT (sink, "sent %d bytes to %d (of %d) clients", size, num,
+ no_clients);
return GST_FLOW_OK;
-invalid_list:
- gst_buffer_list_iterator_free (it);
- return GST_FLOW_ERROR;
+no_data:
+ {
+ return GST_FLOW_OK;
+ }
+send_error:
+ {
+ GST_DEBUG ("got send error %s", err->message);
+ g_clear_error (&err);
+ return GST_FLOW_ERROR;
+ }
}
-#endif
static void
gst_multiudpsink_set_clients_string (GstMultiUDPSink * sink,
clients = g_strsplit (string, ",", 0);
- g_mutex_lock (sink->client_lock);
+ g_mutex_lock (&sink->client_lock);
/* clear all existing clients */
gst_multiudpsink_clear_internal (sink, FALSE);
for (i = 0; clients[i]; i++) {
gchar *host, *p;
- gint port = 0;
+ gint64 port = 0;
host = clients[i];
p = strstr (clients[i], ":");
if (p != NULL) {
*p = '\0';
- port = atoi (p + 1);
+ port = g_ascii_strtoll (p + 1, NULL, 10);
}
if (port != 0)
gst_multiudpsink_add_internal (sink, host, port, FALSE);
}
- g_mutex_unlock (sink->client_lock);
+ g_mutex_unlock (&sink->client_lock);
g_strfreev (clients);
}
str = g_string_new ("");
- g_mutex_lock (sink->client_lock);
+ g_mutex_lock (&sink->client_lock);
clients = sink->clients;
while (clients) {
GstUDPClient *client;
(clients || count > 1 ? "," : ""));
}
}
- g_mutex_unlock (sink->client_lock);
+ g_mutex_unlock (&sink->client_lock);
return g_string_free (str, FALSE);
}
static void
gst_multiudpsink_setup_qos_dscp (GstMultiUDPSink * sink)
{
- gint tos;
-
/* don't touch on -1 */
if (sink->qos_dscp < 0)
return;
- if (sink->sock < 0)
+ if (sink->used_socket == NULL)
return;
- GST_DEBUG_OBJECT (sink, "setting TOS to %d", sink->qos_dscp);
+#ifdef IP_TOS
+ {
+ gint tos;
+ gint fd;
- /* Extract and shift 6 bits of DSFIELD */
- tos = (sink->qos_dscp & 0x3f) << 2;
+ fd = g_socket_get_fd (sink->used_socket);
- if (setsockopt (sink->sock, IPPROTO_IP, IP_TOS, &tos, sizeof (tos)) < 0) {
- gchar *errormessage = socket_last_error_message ();
- GST_ERROR_OBJECT (sink, "could not set TOS: %s", errormessage);
- g_free (errormessage);
- }
+ GST_DEBUG_OBJECT (sink, "setting TOS to %d", sink->qos_dscp);
+
+ /* Extract and shift 6 bits of DSFIELD */
+ tos = (sink->qos_dscp & 0x3f) << 2;
+
+ if (setsockopt (fd, IPPROTO_IP, IP_TOS, &tos, sizeof (tos)) < 0) {
+ GST_ERROR_OBJECT (sink, "could not set TOS: %s", g_strerror (errno));
+ }
#ifdef IPV6_TCLASS
- if (setsockopt (sink->sock, IPPROTO_IPV6, IPV6_TCLASS, &tos,
- sizeof (tos)) < 0) {
- gchar *errormessage = socket_last_error_message ();
- GST_ERROR_OBJECT (sink, "could not set TCLASS: %s", errormessage);
- g_free (errormessage);
+ if (setsockopt (fd, IPPROTO_IPV6, IPV6_TCLASS, &tos, sizeof (tos)) < 0) {
+ GST_ERROR_OBJECT (sink, "could not set TCLASS: %s", g_strerror (errno));
+ }
}
#endif
+#endif
}
static void
udpsink = GST_MULTIUDPSINK (object);
switch (prop_id) {
- case PROP_SOCKFD:
- if (udpsink->sockfd >= 0 && udpsink->sockfd != udpsink->sock &&
- udpsink->closefd)
- CLOSE_SOCKET (udpsink->sockfd);
- udpsink->sockfd = g_value_get_int (value);
- GST_DEBUG_OBJECT (udpsink, "setting SOCKFD to %d", udpsink->sockfd);
+ case PROP_SOCKET:
+ if (udpsink->socket != NULL && udpsink->socket != udpsink->used_socket &&
+ udpsink->close_socket) {
+ GError *err = NULL;
+
+ if (!g_socket_close (udpsink->socket, &err)) {
+ GST_ERROR ("failed to close socket %p: %s", udpsink->socket,
+ err->message);
+ g_clear_error (&err);
+ }
+ }
+ if (udpsink->socket)
+ g_object_unref (udpsink->socket);
+ udpsink->socket = g_value_dup_object (value);
+ GST_DEBUG_OBJECT (udpsink, "setting socket to %p", udpsink->socket);
break;
- case PROP_CLOSEFD:
- udpsink->closefd = g_value_get_boolean (value);
+ case PROP_CLOSE_SOCKET:
+ udpsink->close_socket = g_value_get_boolean (value);
break;
case PROP_CLIENTS:
gst_multiudpsink_set_clients_string (udpsink, g_value_get_string (value));
case PROP_BYTES_SERVED:
g_value_set_uint64 (value, udpsink->bytes_served);
break;
- case PROP_SOCKFD:
- g_value_set_int (value, udpsink->sockfd);
+ case PROP_SOCKET:
+ g_value_set_object (value, udpsink->socket);
break;
- case PROP_CLOSEFD:
- g_value_set_boolean (value, udpsink->closefd);
+ case PROP_CLOSE_SOCKET:
+ g_value_set_boolean (value, udpsink->close_socket);
break;
- case PROP_SOCK:
- g_value_set_int (value, udpsink->sock);
+ case PROP_USED_SOCKET:
+ g_value_set_object (value, udpsink->used_socket);
break;
case PROP_CLIENTS:
g_value_take_string (value,
gst_multiudpsink_configure_client (GstMultiUDPSink * sink,
GstUDPClient * client)
{
+ GInetSocketAddress *saddr = G_INET_SOCKET_ADDRESS (client->addr);
+ GInetAddress *addr = g_inet_socket_address_get_address (saddr);
+ GError *err = NULL;
+
GST_DEBUG_OBJECT (sink, "configuring client %p", client);
- if (gst_udp_is_multicast (&client->theiraddr)) {
+ if (g_inet_address_get_is_multicast (addr)) {
GST_DEBUG_OBJECT (sink, "we have a multicast client %p", client);
if (sink->auto_multicast) {
GST_DEBUG_OBJECT (sink, "autojoining group");
- if (gst_udp_join_group (*(client->sock), &client->theiraddr, NULL)
- != 0)
+ if (!g_socket_join_multicast_group (sink->used_socket, addr, FALSE, NULL,
+ &err))
goto join_group_failed;
}
GST_DEBUG_OBJECT (sink, "setting loop to %d", sink->loop);
- if (gst_udp_set_loop (sink->sock, sink->ss_family, sink->loop) != 0)
- goto loop_failed;
+ g_socket_set_multicast_loopback (sink->used_socket, sink->loop);
GST_DEBUG_OBJECT (sink, "setting ttl to %d", sink->ttl_mc);
- if (gst_udp_set_ttl (sink->sock, sink->ss_family, sink->ttl_mc, TRUE) != 0)
- goto ttl_failed;
+ g_socket_set_multicast_ttl (sink->used_socket, sink->ttl_mc);
} else {
GST_DEBUG_OBJECT (sink, "setting unicast ttl to %d", sink->ttl);
- if (gst_udp_set_ttl (sink->sock, sink->ss_family, sink->ttl, FALSE) != 0)
- goto ttl_failed;
+ g_socket_set_ttl (sink->used_socket, sink->ttl);
}
return TRUE;
/* ERRORS */
join_group_failed:
{
- gchar *errormessage = socket_last_error_message ();
- int errorcode = socket_last_error_code ();
- CLOSE_IF_REQUESTED (sink);
+ gst_multiudpsink_stop (GST_BASE_SINK (sink));
GST_ELEMENT_ERROR (sink, RESOURCE, SETTINGS, (NULL),
- ("Could not join multicast group (%d): %s", errorcode, errormessage));
- g_free (errormessage);
- return FALSE;
- }
-ttl_failed:
- {
- gchar *errormessage = socket_last_error_message ();
- int errorcode = socket_last_error_code ();
- CLOSE_IF_REQUESTED (sink);
- GST_ELEMENT_ERROR (sink, RESOURCE, SETTINGS, (NULL),
- ("Could not set TTL socket option (%d): %s", errorcode, errormessage));
- g_free (errormessage);
- return FALSE;
- }
-loop_failed:
- {
- gchar *errormessage = socket_last_error_message ();
- int errorcode = socket_last_error_code ();
- CLOSE_IF_REQUESTED (sink);
- GST_ELEMENT_ERROR (sink, RESOURCE, SETTINGS, (NULL),
- ("Could not set loopback socket option (%d): %s",
- errorcode, errormessage));
- g_free (errormessage);
+ ("Could not join multicast group: %s", err->message));
+ g_clear_error (&err);
return FALSE;
}
}
/* create a socket for sending to remote machine */
static gboolean
-gst_multiudpsink_init_send (GstMultiUDPSink * sink)
+gst_multiudpsink_start (GstBaseSink * bsink)
{
- guint bc_val;
+ GstMultiUDPSink *sink;
GList *clients;
GstUDPClient *client;
- int sndsize, ret;
- socklen_t len;
+ GError *err = NULL;
+
+ sink = GST_MULTIUDPSINK (bsink);
- if (sink->sockfd == -1) {
+ if (sink->socket == NULL) {
GST_DEBUG_OBJECT (sink, "creating sockets");
/* create sender socket try IP6, fall back to IP4 */
- sink->ss_family = AF_INET6;
- if ((sink->sock = socket (AF_INET6, SOCK_DGRAM, 0)) == -1) {
- sink->ss_family = AF_INET;
- if ((sink->sock = socket (AF_INET, SOCK_DGRAM, 0)) == -1)
+ sink->family = G_SOCKET_FAMILY_IPV6;
+ if ((sink->used_socket =
+ g_socket_new (G_SOCKET_FAMILY_IPV6,
+ G_SOCKET_TYPE_DATAGRAM, G_SOCKET_PROTOCOL_UDP, &err)) == NULL) {
+ sink->family = G_SOCKET_FAMILY_IPV4;
+ if ((sink->used_socket = g_socket_new (G_SOCKET_FAMILY_IPV4,
+ G_SOCKET_TYPE_DATAGRAM, G_SOCKET_PROTOCOL_UDP, &err)) == NULL)
goto no_socket;
}
GST_DEBUG_OBJECT (sink, "have socket");
- sink->externalfd = FALSE;
+ sink->external_socket = FALSE;
} else {
- struct sockaddr_storage myaddr;
-
GST_DEBUG_OBJECT (sink, "using configured socket");
- /* we use the configured socket, try to get some info about it */
- len = sizeof (myaddr);
- if (getsockname (sink->sockfd, (struct sockaddr *) &myaddr, &len) < 0)
- goto getsockname_error;
-
- sink->ss_family = myaddr.ss_family;
/* we use the configured socket */
- sink->sock = sink->sockfd;
- sink->externalfd = TRUE;
+ sink->used_socket = G_SOCKET (g_object_ref (sink->socket));
+ sink->family = g_socket_get_family (sink->used_socket);
+ sink->external_socket = TRUE;
}
- len = sizeof (sndsize);
- if (sink->buffer_size != 0) {
- sndsize = sink->buffer_size;
+#ifdef SO_SNDBUF
- GST_DEBUG_OBJECT (sink, "setting udp buffer of %d bytes", sndsize);
- /* set buffer size, Note that on Linux this is typically limited to a
- * maximum of around 100K. Also a minimum of 128 bytes is required on
- * Linux. */
- ret =
- setsockopt (sink->sockfd, SOL_SOCKET, SO_SNDBUF, (void *) &sndsize,
- len);
- if (ret != 0) {
- GST_ELEMENT_WARNING (sink, RESOURCE, SETTINGS, (NULL),
- ("Could not create a buffer of requested %d bytes, %d: %s (%d)",
- sndsize, ret, g_strerror (errno), errno));
+ {
+ socklen_t len;
+ gint sndsize, ret;
+
+ len = sizeof (sndsize);
+ if (sink->buffer_size != 0) {
+ sndsize = sink->buffer_size;
+
+ GST_DEBUG_OBJECT (sink, "setting udp buffer of %d bytes", sndsize);
+ /* set buffer size, Note that on Linux this is typically limited to a
+ * maximum of around 100K. Also a minimum of 128 bytes is required on
+ * Linux. */
+ ret =
+ setsockopt (g_socket_get_fd (sink->used_socket), SOL_SOCKET,
+ SO_SNDBUF, (void *) &sndsize, len);
+ if (ret != 0) {
+ GST_ELEMENT_WARNING (sink, RESOURCE, SETTINGS, (NULL),
+ ("Could not create a buffer of requested %d bytes, %d: %s",
+ sndsize, ret, g_strerror (errno)));
+ }
}
- }
-
- /* read the value of the receive buffer. Note that on linux this returns 2x the
- * value we set because the kernel allocates extra memory for metadata.
- * The default on Linux is about 100K (which is about 50K without metadata) */
- ret =
- getsockopt (sink->sockfd, SOL_SOCKET, SO_SNDBUF, (void *) &sndsize, &len);
- if (ret == 0)
- GST_DEBUG_OBJECT (sink, "have udp buffer of %d bytes", sndsize);
- else
- GST_DEBUG_OBJECT (sink, "could not get udp buffer size");
+ /* read the value of the receive buffer. Note that on linux this returns 2x the
+ * value we set because the kernel allocates extra memory for metadata.
+ * The default on Linux is about 100K (which is about 50K without metadata) */
+ ret =
+ getsockopt (g_socket_get_fd (sink->used_socket), SOL_SOCKET, SO_SNDBUF,
+ (void *) &sndsize, &len);
+ if (ret == 0)
+ GST_DEBUG_OBJECT (sink, "have udp buffer of %d bytes", sndsize);
+ else
+ GST_DEBUG_OBJECT (sink, "could not get udp buffer size");
+ }
+#endif
- bc_val = 1;
- if (setsockopt (sink->sock, SOL_SOCKET, SO_BROADCAST, &bc_val,
- sizeof (bc_val)) < 0)
- goto no_broadcast;
+ g_socket_set_broadcast (sink->used_socket, TRUE);
sink->bytes_to_serve = 0;
sink->bytes_served = 0;
/* ERRORS */
no_socket:
{
- gchar *errormessage = socket_last_error_message ();
- int errorcode = socket_last_error_code ();
GST_ELEMENT_ERROR (sink, RESOURCE, FAILED, (NULL),
- ("Could not create socket (%d): %s", errorcode, errormessage));
- g_free (errormessage);
- return FALSE;
- }
-getsockname_error:
- {
- gchar *errormessage = socket_last_error_message ();
- int errorcode = socket_last_error_code ();
- GST_ELEMENT_ERROR (sink, RESOURCE, FAILED, (NULL),
- ("Could not getsockname (%d): %s", errorcode, errormessage));
- g_free (errormessage);
- return FALSE;
- }
-no_broadcast:
- {
- gchar *errormessage = socket_last_error_message ();
- int errorcode = socket_last_error_code ();
- CLOSE_IF_REQUESTED (sink);
- GST_ELEMENT_ERROR (sink, RESOURCE, SETTINGS, (NULL),
- ("Could not set broadcast socket option (%d): %s",
- errorcode, errormessage));
- g_free (errormessage);
+ ("Could not create socket: %s", err->message));
+ g_clear_error (&err);
return FALSE;
}
}
-static void
-gst_multiudpsink_close (GstMultiUDPSink * sink)
+static gboolean
+gst_multiudpsink_stop (GstBaseSink * bsink)
{
- CLOSE_IF_REQUESTED (sink);
+ GstMultiUDPSink *udpsink;
+
+ udpsink = GST_MULTIUDPSINK (bsink);
+
+ if (udpsink->used_socket) {
+ if (udpsink->close_socket || !udpsink->external_socket) {
+ GError *err = NULL;
+
+ if (!g_socket_close (udpsink->used_socket, &err)) {
+ GST_ERROR_OBJECT (udpsink, "Failed to close socket: %s", err->message);
+ g_clear_error (&err);
+ }
+ }
+
+ g_object_unref (udpsink->used_socket);
+ udpsink->used_socket = NULL;
+ }
+
+ return TRUE;
}
static void
GST_DEBUG_OBJECT (sink, "adding client on host %s, port %d", host, port);
if (lock)
- g_mutex_lock (sink->client_lock);
+ g_mutex_lock (&sink->client_lock);
find = g_list_find_custom (sink->clients, &udpclient,
(GCompareFunc) client_compare);
client->refcount++;
} else {
client = create_client (sink, host, port);
-
- client->sock = &sink->sock;
-
- if (gst_udp_get_addr (host, port, &client->theiraddr) < 0)
- goto getaddrinfo_error;
+ if (!client)
+ goto error;
g_get_current_time (&now);
client->connect_time = GST_TIMEVAL_TO_TIME (now);
- if (*client->sock > 0) {
+ if (sink->used_socket)
gst_multiudpsink_configure_client (sink, client);
- }
GST_DEBUG_OBJECT (sink, "add client with host %s, port %d", host, port);
sink->clients = g_list_prepend (sink->clients, client);
}
if (lock)
- g_mutex_unlock (sink->client_lock);
+ g_mutex_unlock (&sink->client_lock);
g_signal_emit (G_OBJECT (sink),
gst_multiudpsink_signals[SIGNAL_CLIENT_ADDED], 0, host, port);
return;
/* ERRORS */
-getaddrinfo_error:
+error:
{
GST_DEBUG_OBJECT (sink, "did not add client on host %s, port %d", host,
port);
- GST_WARNING_OBJECT (sink, "getaddrinfo lookup error?");
- free_client (client);
if (lock)
- g_mutex_unlock (sink->client_lock);
+ g_mutex_unlock (&sink->client_lock);
return;
}
}
udpclient.host = (gchar *) host;
udpclient.port = port;
- g_mutex_lock (sink->client_lock);
+ g_mutex_lock (&sink->client_lock);
find = g_list_find_custom (sink->clients, &udpclient,
(GCompareFunc) client_compare);
if (!find)
client->refcount--;
if (client->refcount == 0) {
+ GInetSocketAddress *saddr = G_INET_SOCKET_ADDRESS (client->addr);
+ GInetAddress *addr = g_inet_socket_address_get_address (saddr);
+
GST_DEBUG_OBJECT (sink, "remove client with host %s, port %d", host, port);
g_get_current_time (&now);
client->disconnect_time = GST_TIMEVAL_TO_TIME (now);
- if (*(client->sock) != -1 && sink->auto_multicast
- && gst_udp_is_multicast (&client->theiraddr))
- gst_udp_leave_group (*(client->sock), &client->theiraddr);
+ if (sink->used_socket && sink->auto_multicast
+ && g_inet_address_get_is_multicast (addr)) {
+ GError *err = NULL;
+
+ if (!g_socket_leave_multicast_group (sink->used_socket, addr, FALSE, NULL,
+ &err)) {
+ GST_DEBUG_OBJECT (sink, "Failed to leave multicast group: %s",
+ err->message);
+ g_clear_error (&err);
+ }
+ }
/* Unlock to emit signal before we delete the actual client */
- g_mutex_unlock (sink->client_lock);
+ g_mutex_unlock (&sink->client_lock);
g_signal_emit (G_OBJECT (sink),
gst_multiudpsink_signals[SIGNAL_CLIENT_REMOVED], 0, host, port);
- g_mutex_lock (sink->client_lock);
+ g_mutex_lock (&sink->client_lock);
sink->clients = g_list_delete_link (sink->clients, find);
free_client (client);
}
- g_mutex_unlock (sink->client_lock);
+ g_mutex_unlock (&sink->client_lock);
return;
/* ERRORS */
not_found:
{
- g_mutex_unlock (sink->client_lock);
+ g_mutex_unlock (&sink->client_lock);
GST_WARNING_OBJECT (sink, "client at host %s, port %d not found",
host, port);
return;
/* we only need to remove the client structure, there is no additional
* socket or anything to free for UDP */
if (lock)
- g_mutex_lock (sink->client_lock);
+ g_mutex_lock (&sink->client_lock);
g_list_foreach (sink->clients, (GFunc) free_client, sink);
g_list_free (sink->clients);
sink->clients = NULL;
if (lock)
- g_mutex_unlock (sink->client_lock);
+ g_mutex_unlock (&sink->client_lock);
}
void
gst_multiudpsink_clear_internal (sink, TRUE);
}
-GValueArray *
+GstStructure *
gst_multiudpsink_get_stats (GstMultiUDPSink * sink, const gchar * host,
gint port)
{
GstUDPClient *client;
- GValueArray *result = NULL;
+ GstStructure *result = NULL;
GstUDPClient udpclient;
GList *find;
- GValue value = { 0 };
udpclient.host = (gchar *) host;
udpclient.port = port;
- g_mutex_lock (sink->client_lock);
+ g_mutex_lock (&sink->client_lock);
find = g_list_find_custom (sink->clients, &udpclient,
(GCompareFunc) client_compare);
client = (GstUDPClient *) find->data;
- /* Result is a value array of (bytes_sent, packets_sent,
- * connect_time, disconnect_time), all as uint64 */
- result = g_value_array_new (4);
-
- g_value_init (&value, G_TYPE_UINT64);
- g_value_set_uint64 (&value, client->bytes_sent);
- result = g_value_array_append (result, &value);
- g_value_unset (&value);
-
- g_value_init (&value, G_TYPE_UINT64);
- g_value_set_uint64 (&value, client->packets_sent);
- result = g_value_array_append (result, &value);
- g_value_unset (&value);
-
- g_value_init (&value, G_TYPE_UINT64);
- g_value_set_uint64 (&value, client->connect_time);
- result = g_value_array_append (result, &value);
- g_value_unset (&value);
+ result = gst_structure_new_empty ("multiudpsink-stats");
- g_value_init (&value, G_TYPE_UINT64);
- g_value_set_uint64 (&value, client->disconnect_time);
- result = g_value_array_append (result, &value);
- g_value_unset (&value);
+ gst_structure_set (result,
+ "bytes-sent", G_TYPE_UINT64, client->bytes_sent,
+ "packets-sent", G_TYPE_UINT64, client->packets_sent,
+ "connect-time", G_TYPE_UINT64, client->connect_time,
+ "disconnect-time", G_TYPE_UINT64, client->disconnect_time, NULL);
- g_mutex_unlock (sink->client_lock);
+ g_mutex_unlock (&sink->client_lock);
return result;
/* ERRORS */
not_found:
{
- g_mutex_unlock (sink->client_lock);
+ g_mutex_unlock (&sink->client_lock);
GST_WARNING_OBJECT (sink, "client with host %s, port %d not found",
host, port);
/* Apparently (see comment in gstmultifdsink.c) returning NULL from here may
* confuse/break python bindings */
- return g_value_array_new (0);
+ return gst_structure_new_empty ("multiudpsink-stats");
}
}
-static GstStateChangeReturn
-gst_multiudpsink_change_state (GstElement * element, GstStateChange transition)
+static gboolean
+gst_multiudpsink_unlock (GstBaseSink * bsink)
{
- GstStateChangeReturn ret;
GstMultiUDPSink *sink;
- sink = GST_MULTIUDPSINK (element);
+ sink = GST_MULTIUDPSINK (bsink);
- switch (transition) {
- case GST_STATE_CHANGE_READY_TO_PAUSED:
- if (!gst_multiudpsink_init_send (sink))
- goto no_init;
- break;
- default:
- break;
- }
+ g_cancellable_cancel (sink->cancellable);
- ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+ return TRUE;
+}
- switch (transition) {
- case GST_STATE_CHANGE_PAUSED_TO_READY:
- gst_multiudpsink_close (sink);
- break;
- default:
- break;
- }
- return ret;
+static gboolean
+gst_multiudpsink_unlock_stop (GstBaseSink * bsink)
+{
+ GstMultiUDPSink *sink;
- /* ERRORS */
-no_init:
- {
- /* _init_send() posted specific error already */
- return GST_STATE_CHANGE_FAILURE;
- }
+ sink = GST_MULTIUDPSINK (bsink);
+
+ g_cancellable_reset (sink->cancellable);
+
+ return TRUE;
}
/* GStreamer
- * Copyright (C) <2005> Wim Taymand <wim@fluendo.com>
+ * Copyright (C) <2005> Wim Taymans <wim@fluendo.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
#include <gst/gst.h>
#include <gst/base/gstbasesink.h>
+#include <gio/gio.h>
G_BEGIN_DECLS
typedef struct {
gint refcount;
- int *sock;
-
- struct sockaddr_storage theiraddr;
-
+ GSocketAddress *addr;
gchar *host;
gint port;
struct _GstMultiUDPSink {
GstBaseSink parent;
- int sock;
+ GSocket *used_socket;
+ GCancellable *cancellable;
- GMutex *client_lock;
+ GMutex client_lock;
GList *clients;
/* properties */
guint64 bytes_to_serve;
guint64 bytes_served;
- int sockfd;
- gboolean closefd;
+ GSocket *socket;
+ gboolean close_socket;
- gboolean externalfd;
+ gboolean external_socket;
gboolean auto_multicast;
gint ttl;
gint ttl_mc;
gboolean loop;
gint qos_dscp;
- guint16 ss_family;
+ GSocketFamily family;
gboolean send_duplicates;
gint buffer_size;
void (*add) (GstMultiUDPSink *sink, const gchar *host, gint port);
void (*remove) (GstMultiUDPSink *sink, const gchar *host, gint port);
void (*clear) (GstMultiUDPSink *sink);
- GValueArray* (*get_stats) (GstMultiUDPSink *sink, const gchar *host, gint port);
+ GstStructure* (*get_stats) (GstMultiUDPSink *sink, const gchar *host, gint port);
/* signals */
void (*client_added) (GstElement *element, const gchar *host, gint port);
void gst_multiudpsink_add (GstMultiUDPSink *sink, const gchar *host, gint port);
void gst_multiudpsink_remove (GstMultiUDPSink *sink, const gchar *host, gint port);
void gst_multiudpsink_clear (GstMultiUDPSink *sink);
-GValueArray* gst_multiudpsink_get_stats (GstMultiUDPSink *sink, const gchar *host, gint port);
+GstStructure* gst_multiudpsink_get_stats (GstMultiUDPSink *sink, const gchar *host, gint port);
G_END_DECLS
#include "config.h"
#endif
-#include <gst/netbuffer/gstnetbuffer.h>
+#include <gst/net/gstnetaddressmeta.h>
#include "gstudpsrc.h"
#include "gstmultiudpsink.h"
static gboolean
plugin_init (GstPlugin * plugin)
{
-#ifdef G_OS_WIN32
- if (!gst_udp_net_utils_win32_wsa_startup (GST_OBJECT (plugin)))
- return FALSE;
-#endif
-
- /* register type of the netbuffer so that we can use it from multiple threads
- * right away. Note that the plugin loading is always serialized */
- gst_netbuffer_get_type ();
+ /* register info of the netaddress metadata so that we can use it from
+ * multiple threads right away. Note that the plugin loading is always
+ * serialized */
+ gst_net_address_meta_get_info ();
if (!gst_element_register (plugin, "udpsink", GST_RANK_NONE,
GST_TYPE_UDPSINK))
#include "config.h"
#endif
-#include <errno.h>
-#include <stdio.h>
-#include <stdlib.h>
-#include <memory.h>
-
#include <gst/gst.h>
+#include <string.h>
#include "gstudpnetutils.h"
-/* EAI_ADDRFAMILY was obsoleted in BSD at some point */
-#ifndef EAI_ADDRFAMILY
-#define EAI_ADDRFAMILY 1
-#endif
-
-#ifdef G_OS_WIN32
-
gboolean
-gst_udp_net_utils_win32_wsa_startup (GstObject * obj)
-{
- WSADATA w;
- int error;
-
- error = WSAStartup (0x0202, &w);
-
- if (error) {
- GST_WARNING_OBJECT (obj, "WSAStartup error: %d", error);
- return FALSE;
- }
-
- if (w.wVersion != 0x0202) {
- WSACleanup ();
- GST_WARNING_OBJECT (obj, "Winsock version wrong : 0x%x", w.wVersion);
- return FALSE;
- }
-
- return TRUE;
-}
-
-#endif
-
-int
-gst_udp_get_sockaddr_length (struct sockaddr_storage *addr)
-{
- /* MacOS is picky about passing precisely the correct length,
- * so we calculate it here for the given socket type.
- */
- switch (addr->ss_family) {
- case AF_INET:
- return sizeof (struct sockaddr_in);
- case AF_INET6:
- return sizeof (struct sockaddr_in6);
- default:
- /* don't know, Screw MacOS and use the full length */
- return sizeof (*addr);
- }
-}
-
-int
-gst_udp_get_addr (const char *hostname, int port, struct sockaddr_storage *addr)
-{
- struct addrinfo hints, *res = NULL, *nres;
- char service[NI_MAXSERV];
- int ret;
-
- memset (&hints, 0, sizeof (hints));
- hints.ai_family = AF_UNSPEC;
- hints.ai_socktype = SOCK_DGRAM;
- g_snprintf (service, sizeof (service) - 1, "%d", port);
- service[sizeof (service) - 1] = '\0';
-
- if ((ret = getaddrinfo (hostname, (port == -1) ? NULL : service, &hints,
- &res)) < 0) {
- goto beach;
- }
-
- nres = res;
- while (nres) {
- if (nres->ai_family == AF_INET || nres->ai_family == AF_INET6)
- break;
- nres = nres->ai_next;
- }
-
- if (nres) {
- memcpy (addr, nres->ai_addr, nres->ai_addrlen);
- } else {
- ret = EAI_ADDRFAMILY;
- }
-
- freeaddrinfo (res);
-beach:
- return ret;
-}
-
-int
-gst_udp_set_loop (int sockfd, guint16 ss_family, gboolean loop)
-{
- int ret = -1;
- int l = (loop == FALSE) ? 0 : 1;
-
- switch (ss_family) {
- case AF_INET:
- {
- ret = setsockopt (sockfd, IPPROTO_IP, IP_MULTICAST_LOOP, &l, sizeof (l));
- if (ret < 0)
- return ret;
-
- break;
- }
- case AF_INET6:
- {
- ret =
- setsockopt (sockfd, IPPROTO_IPV6, IPV6_MULTICAST_LOOP, &l,
- sizeof (l));
- if (ret < 0)
- return ret;
-
- break;
- }
- default:
-#ifdef G_OS_WIN32
- WSASetLastError (WSAEAFNOSUPPORT);
-#else
- errno = EAFNOSUPPORT;
-#endif
- }
-
- return ret;
-}
-
-int
-gst_udp_set_ttl (int sockfd, guint16 ss_family, int ttl, gboolean is_multicast)
-{
- int optname = -1;
- int ret = -1;
-
- switch (ss_family) {
- case AF_INET:
- {
- optname = (is_multicast == TRUE) ? IP_MULTICAST_TTL : IP_TTL;
- ret = setsockopt (sockfd, IPPROTO_IP, optname, &ttl, sizeof (ttl));
- if (ret < 0)
- return ret;
- break;
- }
- case AF_INET6:
- {
- optname =
- (is_multicast == TRUE) ? IPV6_MULTICAST_HOPS : IPV6_UNICAST_HOPS;
- ret = setsockopt (sockfd, IPPROTO_IPV6, optname, &ttl, sizeof (ttl));
- if (ret < 0)
- return ret;
-
- /* When using IPV4 address with IPV6 socket, both TTL values
- must be set in order to actually use the given value.
- Has no effect when IPV6 address is used. */
- optname = (is_multicast == TRUE) ? IP_MULTICAST_TTL : IP_TTL;
- ret = setsockopt (sockfd, IPPROTO_IP, optname, &ttl, sizeof (ttl));
- if (ret < 0)
- return ret;
- break;
- }
- default:
-#ifdef G_OS_WIN32
- WSASetLastError (WSAEAFNOSUPPORT);
-#else
- errno = EAFNOSUPPORT;
-#endif
- }
- return ret;
-}
-
-/* FIXME: Add interface selection for windows hosts. */
-int
-gst_udp_join_group (int sockfd, struct sockaddr_storage *addr, gchar * iface)
-{
- int ret = -1;
-
- switch (addr->ss_family) {
- case AF_INET:
- {
-#ifdef HAVE_IP_MREQN
- struct ip_mreqn mreq4;
-#else
- struct ip_mreq mreq4;
-#endif
-
- memset (&mreq4, 0, sizeof (mreq4));
- mreq4.imr_multiaddr.s_addr =
- ((struct sockaddr_in *) addr)->sin_addr.s_addr;
-#ifdef HAVE_IP_MREQN
- if (iface)
- mreq4.imr_ifindex = if_nametoindex (iface);
- else
- mreq4.imr_ifindex = 0; /* Pick any. */
-#else
- mreq4.imr_interface.s_addr = INADDR_ANY;
-#endif
-
- if ((ret =
- setsockopt (sockfd, IPPROTO_IP, IP_ADD_MEMBERSHIP,
- (const void *) &mreq4, sizeof (mreq4))) < 0)
- return ret;
-
- break;
- }
- case AF_INET6:
- {
- struct ipv6_mreq mreq6;
-
- memset (&mreq6, 0, sizeof (mreq6));
- memcpy (&mreq6.ipv6mr_multiaddr,
- &(((struct sockaddr_in6 *) addr)->sin6_addr),
- sizeof (struct in6_addr));
- mreq6.ipv6mr_interface = 0;
-#if !defined(G_OS_WIN32)
- if (iface)
- mreq6.ipv6mr_interface = if_nametoindex (iface);
-#endif
-
- if ((ret =
- setsockopt (sockfd, IPPROTO_IPV6, IPV6_JOIN_GROUP,
- (const void *) &mreq6, sizeof (mreq6))) < 0)
- return ret;
-
- break;
- }
- default:
-#ifdef G_OS_WIN32
- WSASetLastError (WSAEAFNOSUPPORT);
-#else
- errno = EAFNOSUPPORT;
-#endif
- }
- return ret;
-}
-
-int
-gst_udp_leave_group (int sockfd, struct sockaddr_storage *addr)
-{
- int ret = -1;
-
- switch (addr->ss_family) {
- case AF_INET:
- {
- struct ip_mreq mreq4;
-
- memset (&mreq4, 0, sizeof (mreq4));
- mreq4.imr_multiaddr.s_addr =
- ((struct sockaddr_in *) addr)->sin_addr.s_addr;
- mreq4.imr_interface.s_addr = INADDR_ANY;
-
- if ((ret =
- setsockopt (sockfd, IPPROTO_IP, IP_DROP_MEMBERSHIP,
- (const void *) &mreq4, sizeof (mreq4))) < 0)
- return ret;
- }
- break;
-
- case AF_INET6:
- {
- struct ipv6_mreq mreq6;
-
- memset (&mreq6, 0, sizeof (mreq6));
- memcpy (&mreq6.ipv6mr_multiaddr,
- &(((struct sockaddr_in6 *) addr)->sin6_addr),
- sizeof (struct in6_addr));
- mreq6.ipv6mr_interface = 0;
-
- if ((ret =
- setsockopt (sockfd, IPPROTO_IPV6, IPV6_LEAVE_GROUP,
- (const void *) &mreq6, sizeof (mreq6))) < 0)
- return ret;
- }
- break;
-
- default:
-#ifdef G_OS_WIN32
- WSASetLastError (WSAEAFNOSUPPORT);
-#else
- errno = EAFNOSUPPORT;
-#endif
- }
-
- return ret;
-}
-
-int
-gst_udp_is_multicast (struct sockaddr_storage *addr)
-{
- int ret = -1;
-
- switch (addr->ss_family) {
- case AF_INET:
- {
- struct sockaddr_in *addr4 = (struct sockaddr_in *) addr;
-
- ret = IN_MULTICAST (g_ntohl (addr4->sin_addr.s_addr));
- }
- break;
-
- case AF_INET6:
- {
- struct sockaddr_in6 *addr6 = (struct sockaddr_in6 *) addr;
-
- ret = IN6_IS_ADDR_MULTICAST (&addr6->sin6_addr);
- }
- break;
-
- default:
-#ifdef G_OS_WIN32
- WSASetLastError (WSAEAFNOSUPPORT);
-#else
- errno = EAFNOSUPPORT;
-#endif
- }
-
- return ret;
-}
-
-void
-gst_udp_uri_init (GstUDPUri * uri, const gchar * host, gint port)
-{
- uri->host = NULL;
- uri->port = -1;
- gst_udp_uri_update (uri, host, port);
-}
-
-int
-gst_udp_uri_update (GstUDPUri * uri, const gchar * host, gint port)
-{
- if (host) {
- g_free (uri->host);
- uri->host = g_strdup (host);
- if (strchr (host, ':'))
- uri->is_ipv6 = TRUE;
- else
- uri->is_ipv6 = FALSE;
- }
- if (port != -1)
- uri->port = port;
-
- return 0;
-}
-
-int
-gst_udp_parse_uri (const gchar * uristr, GstUDPUri * uri)
+gst_udp_parse_uri (const gchar * uristr, gchar ** host, guint16 * port)
{
gchar *protocol, *location_start;
gchar *location, *location_end;
if (location_end == NULL)
goto wrong_address;
- uri->is_ipv6 = TRUE;
- g_free (uri->host);
- uri->host = g_strndup (location + 1, location_end - location - 1);
+ *host = g_strndup (location + 1, location_end - location - 1);
colptr = strrchr (location_end, ':');
} else {
GST_DEBUG ("parse IPV4 address '%s'", location);
- uri->is_ipv6 = FALSE;
colptr = strrchr (location, ':');
- g_free (uri->host);
if (colptr != NULL) {
- uri->host = g_strndup (location, colptr - location);
+ *host = g_strndup (location, colptr - location);
} else {
- uri->host = g_strdup (location);
+ *host = g_strdup (location);
}
}
- GST_DEBUG ("host set to '%s'", uri->host);
+ GST_DEBUG ("host set to '%s'", *host);
if (colptr != NULL) {
- uri->port = atoi (colptr + 1);
+ *port = g_ascii_strtoll (colptr + 1, NULL, 10);
+ } else {
+ *port = 0;
}
g_free (location_start);
- return 0;
+ return TRUE;
/* ERRORS */
no_protocol:
{
GST_ERROR ("error parsing uri %s: no protocol", uristr);
- return -1;
+ return FALSE;
}
wrong_protocol:
{
GST_ERROR ("error parsing uri %s: wrong protocol (%s != udp)", uristr,
protocol);
g_free (protocol);
- return -1;
+ return FALSE;
}
wrong_address:
{
GST_ERROR ("error parsing uri %s", uristr);
g_free (location);
- return -1;
- }
-}
-
-gchar *
-gst_udp_uri_string (GstUDPUri * uri)
-{
- gchar *result;
-
- if (uri->is_ipv6) {
- result = g_strdup_printf ("udp://[%s]:%d", uri->host, uri->port);
- } else {
- result = g_strdup_printf ("udp://%s:%d", uri->host, uri->port);
+ return FALSE;
}
- return result;
-}
-
-void
-gst_udp_uri_free (GstUDPUri * uri)
-{
- g_free (uri->host);
- uri->host = NULL;
- uri->port = -1;
}
* Boston, MA 02111-1307, USA.
*/
+#include <gst/gst.h>
+
#ifndef __GST_UDP_NET_UTILS_H__
#define __GST_UDP_NET_UTILS_H__
-#include <sys/types.h>
-
-/* Needed for G_OS_XXXX */
-#include <glib.h>
-
-#ifdef G_OS_WIN32
-/* ws2_32.dll has getaddrinfo and freeaddrinfo on Windows XP and later.
- * minwg32 headers check WINVER before allowing the use of these */
-#ifndef WINVER
-#define WINVER 0x0501
-#endif
-#include <winsock2.h>
-#include <ws2tcpip.h>
-#ifndef socklen_t
-#define socklen_t int
-#endif
-
-/* Needed for GstObject and GST_WARNING_OBJECT */
-#include <gst/gstobject.h>
-#include <gst/gstinfo.h>
-
-#else
-#include <sys/time.h>
-#include <sys/socket.h>
-#include <netinet/in.h>
-#include <net/if.h>
-#include <netdb.h>
-#include <sys/wait.h>
-#include <arpa/inet.h>
-#include <unistd.h>
-#include <sys/ioctl.h>
-#endif
-
-#include <fcntl.h>
-
-#ifdef G_OS_WIN32
-
-#define IOCTL_SOCKET ioctlsocket
-#define CLOSE_SOCKET(sock) closesocket(sock)
-#define setsockopt(sock,l,opt,val,len) setsockopt(sock,l,opt,(char *)(val),len)
-#define WSA_STARTUP(obj) gst_udp_net_utils_win32_wsa_startup(GST_OBJECT(obj))
-#define WSA_CLEANUP(obj) WSACleanup ()
-
-#else
-
-#define IOCTL_SOCKET ioctl
-#define CLOSE_SOCKET(sock) close(sock)
-#define setsockopt(sock,l,opt,val,len) setsockopt(sock,l,opt,(void *)(val),len)
-#define WSA_STARTUP(obj)
-#define WSA_CLEANUP(obj)
-
-#endif
-
-#ifdef G_OS_WIN32
-
-gboolean gst_udp_net_utils_win32_wsa_startup (GstObject * obj);
-
-#endif
-
-typedef struct {
- gchar *host;
- gint port;
- gboolean is_ipv6;
-} GstUDPUri;
-
-int gst_udp_get_sockaddr_length (struct sockaddr_storage *addr);
-
-int gst_udp_get_addr (const char *hostname, int port, struct sockaddr_storage *addr);
-int gst_udp_is_multicast (struct sockaddr_storage *addr);
-
-int gst_udp_set_loop (int sockfd, guint16 ss_family, gboolean loop);
-int gst_udp_set_ttl (int sockfd, guint16 ss_family, int ttl, gboolean is_multicast);
-
-/* multicast groups */
-int gst_udp_join_group (int sockfd, struct sockaddr_storage *addr,
- gchar *iface);
-int gst_udp_leave_group (int sockfd, struct sockaddr_storage *addr);
-
-/* uri handling */
-void gst_udp_uri_init (GstUDPUri *uri, const gchar *host, gint port);
-int gst_udp_uri_update (GstUDPUri *uri, const gchar *host, gint port);
-int gst_udp_parse_uri (const gchar *uristr, GstUDPUri *uri);
-gchar * gst_udp_uri_string (GstUDPUri *uri);
-void gst_udp_uri_free (GstUDPUri *uri);
+gboolean gst_udp_parse_uri (const gchar *uristr, gchar **host, guint16 *port);
#endif /* __GST_UDP_NET_UTILS_H__*/
/* GStreamer
* Copyright (C) <2005> Wim Taymans <wim@fluendo.com>
+ * Copyright (C) <2012> Collabora Ltd.
+ * Author: Sebastian Dröge <sebastian.droege@collabora.co.uk>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
#endif
#include "gstudpsink.h"
-#include <stdio.h>
-#include <stdlib.h>
-#ifdef HAVE_UNISTD_H
-#include <unistd.h>
-#endif
-#include <errno.h>
-#include <string.h>
-
#define UDP_DEFAULT_HOST "localhost"
#define UDP_DEFAULT_PORT 4951
/* FILL ME */
};
-static void gst_udpsink_base_init (gpointer g_class);
-static void gst_udpsink_class_init (GstUDPSink * klass);
-static void gst_udpsink_init (GstUDPSink * udpsink);
static void gst_udpsink_finalize (GstUDPSink * udpsink);
static void gst_udpsink_uri_handler_init (gpointer g_iface,
static void gst_udpsink_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
-static GstElementClass *parent_class = NULL;
-
/*static guint gst_udpsink_signals[LAST_SIGNAL] = { 0 }; */
-
-GType
-gst_udpsink_get_type (void)
-{
- static GType udpsink_type = 0;
-
- if (!udpsink_type) {
- static const GTypeInfo udpsink_info = {
- sizeof (GstUDPSinkClass),
- gst_udpsink_base_init,
- NULL,
- (GClassInitFunc) gst_udpsink_class_init,
- NULL,
- NULL,
- sizeof (GstUDPSink),
- 0,
- (GInstanceInitFunc) gst_udpsink_init,
- NULL
- };
- static const GInterfaceInfo urihandler_info = {
- gst_udpsink_uri_handler_init,
- NULL,
- NULL
- };
-
- udpsink_type =
- g_type_register_static (GST_TYPE_MULTIUDPSINK, "GstUDPSink",
- &udpsink_info, 0);
-
- g_type_add_interface_static (udpsink_type, GST_TYPE_URI_HANDLER,
- &urihandler_info);
-
- }
- return udpsink_type;
-}
+#define gst_udpsink_parent_class parent_class
+G_DEFINE_TYPE_WITH_CODE (GstUDPSink, gst_udpsink, GST_TYPE_MULTIUDPSINK,
+ G_IMPLEMENT_INTERFACE (GST_TYPE_URI_HANDLER, gst_udpsink_uri_handler_init));
static void
-gst_udpsink_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_set_details_simple (element_class, "UDP packet sender",
- "Sink/Network",
- "Send data over the network via UDP", "Wim Taymans <wim@fluendo.com>");
-}
-
-static void
-gst_udpsink_class_init (GstUDPSink * klass)
+gst_udpsink_class_init (GstUDPSinkClass * klass)
{
GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
gobject_class = (GObjectClass *) klass;
-
- parent_class = g_type_class_peek_parent (klass);
+ gstelement_class = (GstElementClass *) klass;
gobject_class->set_property = gst_udpsink_set_property;
gobject_class->get_property = gst_udpsink_get_property;
g_param_spec_int ("port", "port", "The port to send the packets to",
0, 65535, UDP_DEFAULT_PORT,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
-}
+ gst_element_class_set_details_simple (gstelement_class, "UDP packet sender",
+ "Sink/Network",
+ "Send data over the network via UDP", "Wim Taymans <wim@fluendo.com>");
+}
static void
gst_udpsink_init (GstUDPSink * udpsink)
{
- gst_udp_uri_init (&udpsink->uri, UDP_DEFAULT_HOST, UDP_DEFAULT_PORT);
+ udpsink->host = g_strdup (UDP_DEFAULT_HOST);
+ udpsink->port = UDP_DEFAULT_PORT;
+ udpsink->uri = g_strdup_printf ("udp://%s:%d", udpsink->host, udpsink->port);
- gst_multiudpsink_add (GST_MULTIUDPSINK (udpsink), udpsink->uri.host,
- udpsink->uri.port);
+ gst_multiudpsink_add (GST_MULTIUDPSINK (udpsink), udpsink->host,
+ udpsink->port);
}
static void
gst_udpsink_finalize (GstUDPSink * udpsink)
{
- gst_udp_uri_free (&udpsink->uri);
- g_free (udpsink->uristr);
+ g_free (udpsink->host);
+ udpsink->host = NULL;
+
+ g_free (udpsink->uri);
+ udpsink->uri = NULL;
G_OBJECT_CLASS (parent_class)->finalize ((GObject *) udpsink);
}
static gboolean
-gst_udpsink_set_uri (GstUDPSink * sink, const gchar * uri)
+gst_udpsink_set_uri (GstUDPSink * sink, const gchar * uri, GError ** error)
{
- gst_multiudpsink_remove (GST_MULTIUDPSINK (sink), sink->uri.host,
- sink->uri.port);
+ gst_multiudpsink_remove (GST_MULTIUDPSINK (sink), sink->host, sink->port);
- if (gst_udp_parse_uri (uri, &sink->uri) < 0)
+ if (!gst_udp_parse_uri (uri, &sink->host, &sink->port))
goto wrong_uri;
- gst_multiudpsink_add (GST_MULTIUDPSINK (sink), sink->uri.host,
- sink->uri.port);
+ g_free (sink->uri);
+ sink->uri = g_strdup (uri);
+
+ gst_multiudpsink_add (GST_MULTIUDPSINK (sink), sink->host, sink->port);
return TRUE;
{
GST_ELEMENT_ERROR (sink, RESOURCE, READ, (NULL),
("error parsing uri %s", uri));
+ g_set_error_literal (error, GST_URI_ERROR, GST_URI_ERROR_BAD_URI,
+ "Could not parse UDP URI");
return FALSE;
}
}
/* remove old host */
gst_multiudpsink_remove (GST_MULTIUDPSINK (udpsink),
- udpsink->uri.host, udpsink->uri.port);
+ udpsink->host, udpsink->port);
switch (prop_id) {
case PROP_HOST:
const gchar *host;
host = g_value_get_string (value);
-
- if (host)
- gst_udp_uri_update (&udpsink->uri, host, -1);
- else
- gst_udp_uri_update (&udpsink->uri, UDP_DEFAULT_HOST, -1);
+ g_free (udpsink->host);
+ udpsink->host = g_strdup (host);
+ g_free (udpsink->uri);
+ udpsink->uri =
+ g_strdup_printf ("udp://%s:%d", udpsink->host, udpsink->port);
break;
}
case PROP_PORT:
- gst_udp_uri_update (&udpsink->uri, NULL, g_value_get_int (value));
+ udpsink->port = g_value_get_int (value);
+ g_free (udpsink->uri);
+ udpsink->uri =
+ g_strdup_printf ("udp://%s:%d", udpsink->host, udpsink->port);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
}
/* add new host */
gst_multiudpsink_add (GST_MULTIUDPSINK (udpsink),
- udpsink->uri.host, udpsink->uri.port);
+ udpsink->host, udpsink->port);
}
static void
switch (prop_id) {
case PROP_HOST:
- g_value_set_string (value, udpsink->uri.host);
+ g_value_set_string (value, udpsink->host);
break;
case PROP_PORT:
- g_value_set_int (value, udpsink->uri.port);
+ g_value_set_int (value, udpsink->port);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
/*** GSTURIHANDLER INTERFACE *************************************************/
static GstURIType
-gst_udpsink_uri_get_type (void)
+gst_udpsink_uri_get_type (GType type)
{
return GST_URI_SINK;
}
-static gchar **
-gst_udpsink_uri_get_protocols (void)
+static const gchar *const *
+gst_udpsink_uri_get_protocols (GType type)
{
- static gchar *protocols[] = { (char *) "udp", NULL };
+ static const gchar *protocols[] = { "udp", NULL };
return protocols;
}
-static const gchar *
+static gchar *
gst_udpsink_uri_get_uri (GstURIHandler * handler)
{
GstUDPSink *sink = GST_UDPSINK (handler);
- g_free (sink->uristr);
- sink->uristr = gst_udp_uri_string (&sink->uri);
-
- return sink->uristr;
+ return g_strdup (sink->uri);
}
static gboolean
-gst_udpsink_uri_set_uri (GstURIHandler * handler, const gchar * uri)
+gst_udpsink_uri_set_uri (GstURIHandler * handler, const gchar * uri,
+ GError ** error)
{
- gboolean ret;
- GstUDPSink *sink = GST_UDPSINK (handler);
-
- ret = gst_udpsink_set_uri (sink, uri);
-
- return ret;
+ return gst_udpsink_set_uri (GST_UDPSINK (handler), uri, error);
}
static void
struct _GstUDPSink {
GstMultiUDPSink parent;
- GstUDPUri uri;
- gchar *uristr;
+ gchar *host;
+ guint16 port;
+
+ gchar *uri;
};
struct _GstUDPSinkClass {
/* GStreamer
* Copyright (C) <2005> Wim Taymans <wim@fluendo.com>
* Copyright (C) <2005> Nokia Corporation <kai.vehmanen@nokia.com>
+ * Copyright (C) <2012> Collabora Ltd.
+ * Author: Sebastian Dröge <sebastian.droege@collabora.co.uk>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
#endif
#include "gstudpsrc.h"
-#ifdef HAVE_UNISTD_H
-#include <unistd.h>
-#endif
-#include <stdlib.h>
-
-#if defined _MSC_VER && (_MSC_VER >= 1400)
-#include <io.h>
-#endif
-#include <gst/netbuffer/gstnetbuffer.h>
-
-#ifdef HAVE_FIONREAD_IN_SYS_FILIO
-#include <sys/filio.h>
-#endif
+#include <gst/net/gstnetaddressmeta.h>
GST_DEBUG_CATEGORY_STATIC (udpsrc_debug);
#define GST_CAT_DEFAULT (udpsrc_debug)
#define UDP_DEFAULT_MULTICAST_IFACE NULL
#define UDP_DEFAULT_URI "udp://"UDP_DEFAULT_MULTICAST_GROUP":"G_STRINGIFY(UDP_DEFAULT_PORT)
#define UDP_DEFAULT_CAPS NULL
-#define UDP_DEFAULT_SOCKFD -1
+#define UDP_DEFAULT_SOCKET NULL
#define UDP_DEFAULT_BUFFER_SIZE 0
#define UDP_DEFAULT_TIMEOUT 0
#define UDP_DEFAULT_SKIP_FIRST_BYTES 0
-#define UDP_DEFAULT_CLOSEFD TRUE
-#define UDP_DEFAULT_SOCK -1
+#define UDP_DEFAULT_CLOSE_SOCKET TRUE
+#define UDP_DEFAULT_USED_SOCKET NULL
#define UDP_DEFAULT_AUTO_MULTICAST TRUE
#define UDP_DEFAULT_REUSE TRUE
PROP_MULTICAST_IFACE,
PROP_URI,
PROP_CAPS,
- PROP_SOCKFD,
+ PROP_SOCKET,
PROP_BUFFER_SIZE,
PROP_TIMEOUT,
PROP_SKIP_FIRST_BYTES,
- PROP_CLOSEFD,
- PROP_SOCK,
+ PROP_CLOSE_SOCKET,
+ PROP_USED_SOCKET,
PROP_AUTO_MULTICAST,
PROP_REUSE,
PROP_LAST
};
-#define CLOSE_IF_REQUESTED(udpctx) \
-G_STMT_START { \
- if ((!udpctx->externalfd) || (udpctx->externalfd && udpctx->closefd)) { \
- CLOSE_SOCKET(udpctx->sock.fd); \
- if (udpctx->sock.fd == udpctx->sockfd) \
- udpctx->sockfd = UDP_DEFAULT_SOCKFD; \
- } \
- udpctx->sock.fd = UDP_DEFAULT_SOCK; \
-} G_STMT_END
-
static void gst_udpsrc_uri_handler_init (gpointer g_iface, gpointer iface_data);
-static GstCaps *gst_udpsrc_getcaps (GstBaseSrc * src);
+static GstCaps *gst_udpsrc_getcaps (GstBaseSrc * src, GstCaps * filter);
static GstFlowReturn gst_udpsrc_create (GstPushSrc * psrc, GstBuffer ** buf);
static void gst_udpsrc_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
-static void
-_do_init (GType type)
-{
- static const GInterfaceInfo urihandler_info = {
- gst_udpsrc_uri_handler_init,
- NULL,
- NULL
- };
-
- g_type_add_interface_static (type, GST_TYPE_URI_HANDLER, &urihandler_info);
-
- GST_DEBUG_CATEGORY_INIT (udpsrc_debug, "udpsrc", 0, "UDP src");
-}
-
-GST_BOILERPLATE_FULL (GstUDPSrc, gst_udpsrc, GstPushSrc, GST_TYPE_PUSH_SRC,
- _do_init);
-
-static void
-gst_udpsrc_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_static_pad_template (element_class, &src_template);
-
- gst_element_class_set_details_simple (element_class, "UDP packet receiver",
- "Source/Network",
- "Receive data over the network via UDP",
- "Wim Taymans <wim@fluendo.com>, "
- "Thijs Vermeir <thijs.vermeir@barco.com>");
-}
+#define gst_udpsrc_parent_class parent_class
+G_DEFINE_TYPE_WITH_CODE (GstUDPSrc, gst_udpsrc, GST_TYPE_PUSH_SRC,
+ G_IMPLEMENT_INTERFACE (GST_TYPE_URI_HANDLER, gst_udpsrc_uri_handler_init));
static void
gst_udpsrc_class_init (GstUDPSrcClass * klass)
{
GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
GstBaseSrcClass *gstbasesrc_class;
GstPushSrcClass *gstpushsrc_class;
gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
gstbasesrc_class = (GstBaseSrcClass *) klass;
gstpushsrc_class = (GstPushSrcClass *) klass;
+ GST_DEBUG_CATEGORY_INIT (udpsrc_debug, "udpsrc", 0, "UDP src");
+
gobject_class->set_property = gst_udpsrc_set_property;
gobject_class->get_property = gst_udpsrc_get_property;
gobject_class->finalize = gst_udpsrc_finalize;
g_param_spec_boxed ("caps", "Caps",
"The caps of the source pad", GST_TYPE_CAPS,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
- g_object_class_install_property (gobject_class, PROP_SOCKFD,
- g_param_spec_int ("sockfd", "Socket Handle",
- "Socket to use for UDP reception. (-1 == allocate)",
- -1, G_MAXINT, UDP_DEFAULT_SOCKFD,
- G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_SOCKET,
+ g_param_spec_object ("socket", "Socket",
+ "Socket to use for UDP reception. (NULL == allocate)",
+ G_TYPE_SOCKET, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_BUFFER_SIZE,
g_param_spec_int ("buffer-size", "Buffer Size",
"Size of the kernel receive buffer in bytes, 0=default", 0, G_MAXINT,
UDP_DEFAULT_BUFFER_SIZE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_TIMEOUT,
g_param_spec_uint64 ("timeout", "Timeout",
- "Post a message after timeout microseconds (0 = disabled)", 0,
+ "Post a message after timeout nanoseconds (0 = disabled)", 0,
G_MAXUINT64, UDP_DEFAULT_TIMEOUT,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (G_OBJECT_CLASS (klass),
"Skip first bytes", "number of bytes to skip for each udp packet", 0,
G_MAXINT, UDP_DEFAULT_SKIP_FIRST_BYTES,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
- g_object_class_install_property (gobject_class, PROP_CLOSEFD,
- g_param_spec_boolean ("closefd", "Close sockfd",
- "Close sockfd if passed as property on state change",
- UDP_DEFAULT_CLOSEFD, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
- g_object_class_install_property (gobject_class, PROP_SOCK,
- g_param_spec_int ("sock", "Socket Handle",
- "Socket currently in use for UDP reception. (-1 = no socket)",
- -1, G_MAXINT, UDP_DEFAULT_SOCK,
- G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_CLOSE_SOCKET,
+ g_param_spec_boolean ("close-socket", "Close socket",
+ "Close socket if passed as property on state change",
+ UDP_DEFAULT_CLOSE_SOCKET,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_USED_SOCKET,
+ g_param_spec_object ("used-socket", "Socket Handle",
+ "Socket currently in use for UDP reception. (NULL = no socket)",
+ G_TYPE_SOCKET, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_AUTO_MULTICAST,
g_param_spec_boolean ("auto-multicast", "Auto Multicast",
"Automatically join/leave multicast groups",
g_param_spec_boolean ("reuse", "Reuse", "Enable reuse of the port",
UDP_DEFAULT_REUSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&src_template));
+
+ gst_element_class_set_details_simple (gstelement_class, "UDP packet receiver",
+ "Source/Network",
+ "Receive data over the network via UDP",
+ "Wim Taymans <wim@fluendo.com>, "
+ "Thijs Vermeir <thijs.vermeir@barco.com>");
+
gstbasesrc_class->start = gst_udpsrc_start;
gstbasesrc_class->stop = gst_udpsrc_stop;
gstbasesrc_class->unlock = gst_udpsrc_unlock;
}
static void
-gst_udpsrc_init (GstUDPSrc * udpsrc, GstUDPSrcClass * g_class)
+gst_udpsrc_init (GstUDPSrc * udpsrc)
{
- WSA_STARTUP (udpsrc);
-
- gst_udp_uri_init (&udpsrc->uri, UDP_DEFAULT_MULTICAST_GROUP,
+ udpsrc->uri =
+ g_strdup_printf ("udp://%s:%u", UDP_DEFAULT_MULTICAST_GROUP,
UDP_DEFAULT_PORT);
- udpsrc->sockfd = UDP_DEFAULT_SOCKFD;
+ udpsrc->host = g_strdup (UDP_DEFAULT_MULTICAST_GROUP);
+ udpsrc->port = UDP_DEFAULT_PORT;
+ udpsrc->socket = UDP_DEFAULT_SOCKET;
udpsrc->multi_iface = g_strdup (UDP_DEFAULT_MULTICAST_IFACE);
udpsrc->buffer_size = UDP_DEFAULT_BUFFER_SIZE;
udpsrc->timeout = UDP_DEFAULT_TIMEOUT;
udpsrc->skip_first_bytes = UDP_DEFAULT_SKIP_FIRST_BYTES;
- udpsrc->closefd = UDP_DEFAULT_CLOSEFD;
- udpsrc->externalfd = (udpsrc->sockfd != -1);
+ udpsrc->close_socket = UDP_DEFAULT_CLOSE_SOCKET;
+ udpsrc->external_socket = (udpsrc->socket != NULL);
udpsrc->auto_multicast = UDP_DEFAULT_AUTO_MULTICAST;
- udpsrc->sock.fd = UDP_DEFAULT_SOCK;
+ udpsrc->used_socket = UDP_DEFAULT_USED_SOCKET;
udpsrc->reuse = UDP_DEFAULT_REUSE;
+ udpsrc->cancellable = g_cancellable_new ();
+
/* configure basesrc to be a live source */
gst_base_src_set_live (GST_BASE_SRC (udpsrc), TRUE);
/* make basesrc output a segment in time */
if (udpsrc->caps)
gst_caps_unref (udpsrc->caps);
+ udpsrc->caps = NULL;
g_free (udpsrc->multi_iface);
+ udpsrc->multi_iface = NULL;
+
+ g_free (udpsrc->uri);
+ udpsrc->uri = NULL;
+
+ g_free (udpsrc->host);
+ udpsrc->host = NULL;
- gst_udp_uri_free (&udpsrc->uri);
- g_free (udpsrc->uristr);
+ if (udpsrc->socket)
+ g_object_unref (udpsrc->socket);
+ udpsrc->socket = NULL;
- if (udpsrc->sockfd >= 0 && udpsrc->closefd)
- CLOSE_SOCKET (udpsrc->sockfd);
+ if (udpsrc->used_socket)
+ g_object_unref (udpsrc->used_socket);
+ udpsrc->used_socket = NULL;
- WSA_CLEANUP (object);
+ if (udpsrc->cancellable)
+ g_object_unref (udpsrc->cancellable);
+ udpsrc->cancellable = NULL;
G_OBJECT_CLASS (parent_class)->finalize (object);
}
static GstCaps *
-gst_udpsrc_getcaps (GstBaseSrc * src)
+gst_udpsrc_getcaps (GstBaseSrc * src, GstCaps * filter)
{
GstUDPSrc *udpsrc;
udpsrc = GST_UDPSRC (src);
- if (udpsrc->caps)
- return gst_caps_ref (udpsrc->caps);
- else
- return gst_caps_new_any ();
-}
-
-/* read a message from the error queue */
-static void
-clear_error (GstUDPSrc * udpsrc)
-{
-#if defined (MSG_ERRQUEUE)
- struct msghdr cmsg;
- char cbuf[128];
- char msgbuf[CMSG_SPACE (128)];
- struct iovec iov;
-
- /* Flush ERRORS from fd so next poll will not return at once */
- /* No need for address : We look for local error */
- cmsg.msg_name = NULL;
- cmsg.msg_namelen = 0;
-
- /* IOV */
- memset (&cbuf, 0, sizeof (cbuf));
- iov.iov_base = cbuf;
- iov.iov_len = sizeof (cbuf);
- cmsg.msg_iov = &iov;
- cmsg.msg_iovlen = 1;
-
- /* msg_control */
- memset (&msgbuf, 0, sizeof (msgbuf));
- cmsg.msg_control = &msgbuf;
- cmsg.msg_controllen = sizeof (msgbuf);
-
- recvmsg (udpsrc->sock.fd, &cmsg, MSG_ERRQUEUE);
-#endif
+ if (udpsrc->caps) {
+ return (filter) ? gst_caps_intersect_full (filter, udpsrc->caps,
+ GST_CAPS_INTERSECT_FIRST) : gst_caps_ref (udpsrc->caps);
+ } else {
+ return (filter) ? gst_caps_ref (filter) : gst_caps_new_any ();
+ }
}
static GstFlowReturn
gst_udpsrc_create (GstPushSrc * psrc, GstBuffer ** buf)
{
GstUDPSrc *udpsrc;
- GstNetBuffer *outbuf;
- union gst_sockaddr
- {
- struct sockaddr sa;
- struct sockaddr_in sa_in;
- struct sockaddr_in6 sa_in6;
- struct sockaddr_storage sa_stor;
- } sa;
- socklen_t slen;
+ GstBuffer *outbuf;
+ GSocketAddress *saddr = NULL;
guint8 *pktdata;
gint pktsize;
-#ifdef G_OS_UNIX
- gint readsize;
-#elif defined G_OS_WIN32
- gulong readsize;
-#endif
- GstClockTime timeout;
- gint ret;
+ gsize offset;
+ gssize readsize;
+ gssize ret;
gboolean try_again;
+ GError *err = NULL;
udpsrc = GST_UDPSRC_CAST (psrc);
retry:
/* quick check, avoid going in select when we already have data */
- readsize = 0;
- if (G_UNLIKELY ((ret =
- IOCTL_SOCKET (udpsrc->sock.fd, FIONREAD, &readsize)) < 0))
- goto ioctl_failed;
-
+ readsize = g_socket_get_available_bytes (udpsrc->used_socket);
if (readsize > 0)
goto no_select;
- if (udpsrc->timeout > 0) {
- timeout = udpsrc->timeout * GST_USECOND;
- } else {
- timeout = GST_CLOCK_TIME_NONE;
- }
-
do {
try_again = FALSE;
GST_LOG_OBJECT (udpsrc, "doing select, timeout %" G_GUINT64_FORMAT,
udpsrc->timeout);
- ret = gst_poll_wait (udpsrc->fdset, timeout);
- GST_LOG_OBJECT (udpsrc, "select returned %d", ret);
- if (G_UNLIKELY (ret < 0)) {
- if (errno == EBUSY)
+ if (!g_socket_condition_wait (udpsrc->used_socket, G_IO_IN | G_IO_PRI,
+ udpsrc->cancellable, &err)) {
+ if (g_error_matches (err, G_IO_ERROR, G_IO_ERROR_BUSY)
+ || g_error_matches (err, G_IO_ERROR, G_IO_ERROR_CANCELLED)) {
goto stopped;
-#ifdef G_OS_WIN32
- if (WSAGetLastError () != WSAEINTR)
- goto select_error;
-#else
- if (errno != EAGAIN && errno != EINTR)
+ } else if (g_error_matches (err, G_IO_ERROR, G_IO_ERROR_TIMED_OUT)) {
+ /* timeout, post element message */
+ gst_element_post_message (GST_ELEMENT_CAST (udpsrc),
+ gst_message_new_element (GST_OBJECT_CAST (udpsrc),
+ gst_structure_new ("GstUDPSrcTimeout",
+ "timeout", G_TYPE_UINT64, udpsrc->timeout, NULL)));
+ } else {
goto select_error;
-#endif
- try_again = TRUE;
- } else if (G_UNLIKELY (ret == 0)) {
- /* timeout, post element message */
- gst_element_post_message (GST_ELEMENT_CAST (udpsrc),
- gst_message_new_element (GST_OBJECT_CAST (udpsrc),
- gst_structure_new ("GstUDPSrcTimeout",
- "timeout", G_TYPE_UINT64, udpsrc->timeout, NULL)));
+ }
+
try_again = TRUE;
}
} while (G_UNLIKELY (try_again));
/* ask how much is available for reading on the socket, this should be exactly
* one UDP packet. We will check the return value, though, because in some
* case it can return 0 and we don't want a 0 sized buffer. */
- readsize = 0;
- if (G_UNLIKELY ((ret =
- IOCTL_SOCKET (udpsrc->sock.fd, FIONREAD, &readsize)) < 0))
- goto ioctl_failed;
+ readsize = g_socket_get_available_bytes (udpsrc->used_socket);
+ if (G_UNLIKELY (readsize < 0))
+ goto get_available_error;
/* If we get here and the readsize is zero, then either select was woken up
* by activity that is not a read, or a poll error occurred, or a UDP packet
if (G_UNLIKELY (!readsize)) {
/* try to read a packet (and it will be ignored),
* in case a packet with no data arrived */
- slen = sizeof (sa);
- recvfrom (udpsrc->sock.fd, (char *) &slen, 0, 0, &sa.sa, &slen);
- /* clear any error, in case a poll error occurred */
- clear_error (udpsrc);
+ pktdata = NULL;
+ pktsize = 0;
+ ret =
+ g_socket_receive_from (udpsrc->used_socket, NULL, (gchar *) pktdata,
+ pktsize, udpsrc->cancellable, &err);
+ if (G_UNLIKELY (ret < 0))
+ goto receive_error;
/* poll again */
goto retry;
pktdata = g_malloc (readsize);
pktsize = readsize;
+ offset = 0;
- while (TRUE) {
- slen = sizeof (sa);
-#ifdef G_OS_WIN32
- ret = recvfrom (udpsrc->sock.fd, (char *) pktdata, pktsize, 0, &sa.sa,
- &slen);
-#else
- ret = recvfrom (udpsrc->sock.fd, pktdata, pktsize, 0, &sa.sa, &slen);
-#endif
- if (G_UNLIKELY (ret < 0)) {
-#ifdef G_OS_WIN32
- /* WSAECONNRESET for a UDP socket means that a packet sent with udpsink
- * generated a "port unreachable" ICMP response. We ignore that and try
- * again. */
- if (WSAGetLastError () == WSAECONNRESET) {
- g_free (pktdata);
- pktdata = NULL;
- goto retry;
- }
- if (WSAGetLastError () != WSAEINTR)
- goto receive_error;
-#else
- if (errno != EAGAIN && errno != EINTR)
- goto receive_error;
-#endif
- } else
- break;
- }
+ if (saddr)
+ g_object_unref (saddr);
+ saddr = NULL;
+
+ ret =
+ g_socket_receive_from (udpsrc->used_socket, &saddr, (gchar *) pktdata,
+ pktsize, udpsrc->cancellable, &err);
- /* special case buffer so receivers can also track the address */
- outbuf = gst_netbuffer_new ();
- GST_BUFFER_MALLOCDATA (outbuf) = pktdata;
+ if (G_UNLIKELY (ret < 0))
+ goto receive_error;
/* patch pktdata and len when stripping off the headers */
if (G_UNLIKELY (udpsrc->skip_first_bytes != 0)) {
if (G_UNLIKELY (readsize < udpsrc->skip_first_bytes))
goto skip_error;
- pktdata += udpsrc->skip_first_bytes;
+ offset += udpsrc->skip_first_bytes;
ret -= udpsrc->skip_first_bytes;
}
- GST_BUFFER_DATA (outbuf) = pktdata;
- GST_BUFFER_SIZE (outbuf) = ret;
- switch (sa.sa.sa_family) {
- case AF_INET:
- {
- gst_netaddress_set_ip4_address (&outbuf->from, sa.sa_in.sin_addr.s_addr,
- sa.sa_in.sin_port);
- }
- break;
- case AF_INET6:
- {
- guint8 ip6[16];
+ outbuf = gst_buffer_new ();
+ gst_buffer_take_memory (outbuf, -1,
+ gst_memory_new_wrapped (0, pktdata, g_free, pktsize, offset, ret));
- memcpy (ip6, &sa.sa_in6.sin6_addr, sizeof (ip6));
- gst_netaddress_set_ip6_address (&outbuf->from, ip6, sa.sa_in6.sin6_port);
- }
- break;
- default:
-#ifdef G_OS_WIN32
- WSASetLastError (WSAEAFNOSUPPORT);
-#else
- errno = EAFNOSUPPORT;
-#endif
- goto receive_error;
+ /* use buffer metadata so receivers can also track the address */
+ if (saddr) {
+ gst_buffer_add_net_address_meta (outbuf, saddr);
+ g_object_unref (saddr);
}
+ saddr = NULL;
+
GST_LOG_OBJECT (udpsrc, "read %d bytes", (int) readsize);
*buf = GST_BUFFER_CAST (outbuf);
select_error:
{
GST_ELEMENT_ERROR (udpsrc, RESOURCE, READ, (NULL),
- ("select error %d: %s (%d)", ret, g_strerror (errno), errno));
+ ("select error: %s", err->message));
+ g_clear_error (&err);
return GST_FLOW_ERROR;
}
stopped:
{
GST_DEBUG ("stop called");
- return GST_FLOW_WRONG_STATE;
+ return GST_FLOW_FLUSHING;
}
-ioctl_failed:
+get_available_error:
{
GST_ELEMENT_ERROR (udpsrc, RESOURCE, READ, (NULL),
- ("ioctl failed %d: %s (%d)", ret, g_strerror (errno), errno));
+ ("get available bytes failed"));
return GST_FLOW_ERROR;
}
receive_error:
{
g_free (pktdata);
-#ifdef G_OS_WIN32
- GST_ELEMENT_ERROR (udpsrc, RESOURCE, READ, (NULL),
- ("receive error %d (WSA error: %d)", ret, WSAGetLastError ()));
-#else
- GST_ELEMENT_ERROR (udpsrc, RESOURCE, READ, (NULL),
- ("receive error %d: %s (%d)", ret, g_strerror (errno), errno));
-#endif
- return GST_FLOW_ERROR;
+
+ if (g_error_matches (err, G_IO_ERROR, G_IO_ERROR_BUSY) ||
+ g_error_matches (err, G_IO_ERROR, G_IO_ERROR_CANCELLED)) {
+ g_clear_error (&err);
+ return GST_FLOW_FLUSHING;
+ } else {
+ GST_ELEMENT_ERROR (udpsrc, RESOURCE, READ, (NULL),
+ ("receive error %d: %s", ret, err->message));
+ g_clear_error (&err);
+ return GST_FLOW_ERROR;
+ }
}
skip_error:
{
}
static gboolean
-gst_udpsrc_set_uri (GstUDPSrc * src, const gchar * uri)
+gst_udpsrc_set_uri (GstUDPSrc * src, const gchar * uri, GError ** error)
{
- if (gst_udp_parse_uri (uri, &src->uri) < 0)
+ gchar *host;
+ guint16 port;
+
+ if (!gst_udp_parse_uri (uri, &host, &port))
goto wrong_uri;
- if (src->uri.port == -1)
- src->uri.port = UDP_DEFAULT_PORT;
+ if (port == -1)
+ port = UDP_DEFAULT_PORT;
+
+ g_free (src->host);
+ src->host = host;
+ src->port = port;
+
+ g_free (src->uri);
+ src->uri = g_strdup (uri);
return TRUE;
{
GST_ELEMENT_ERROR (src, RESOURCE, READ, (NULL),
("error parsing uri %s", uri));
+ g_set_error_literal (error, GST_URI_ERROR, GST_URI_ERROR_BAD_URI,
+ "Could not parse UDP URI");
return FALSE;
}
}
udpsrc->buffer_size = g_value_get_int (value);
break;
case PROP_PORT:
- gst_udp_uri_update (&udpsrc->uri, NULL, g_value_get_int (value));
+ udpsrc->port = g_value_get_int (value);
+ g_free (udpsrc->uri);
+ udpsrc->uri = g_strdup_printf ("udp://%s:%u", udpsrc->host, udpsrc->port);
break;
case PROP_MULTICAST_GROUP:
{
const gchar *group;
+ g_free (udpsrc->host);
if ((group = g_value_get_string (value)))
- gst_udp_uri_update (&udpsrc->uri, group, -1);
+ udpsrc->host = g_strdup (group);
else
- gst_udp_uri_update (&udpsrc->uri, UDP_DEFAULT_MULTICAST_GROUP, -1);
+ udpsrc->host = g_strdup (UDP_DEFAULT_MULTICAST_GROUP);
+
+ g_free (udpsrc->uri);
+ udpsrc->uri = g_strdup_printf ("udp://%s:%u", udpsrc->host, udpsrc->port);
break;
}
case PROP_MULTICAST_IFACE:
udpsrc->multi_iface = g_value_dup_string (value);
break;
case PROP_URI:
- gst_udpsrc_set_uri (udpsrc, g_value_get_string (value));
+ gst_udpsrc_set_uri (udpsrc, g_value_get_string (value), NULL);
break;
case PROP_CAPS:
{
gst_pad_set_caps (GST_BASE_SRC (udpsrc)->srcpad, new_caps);
break;
}
- case PROP_SOCKFD:
- if (udpsrc->sockfd >= 0 && udpsrc->sockfd != udpsrc->sock.fd &&
- udpsrc->closefd)
- CLOSE_SOCKET (udpsrc->sockfd);
- udpsrc->sockfd = g_value_get_int (value);
- GST_DEBUG ("setting SOCKFD to %d", udpsrc->sockfd);
+ case PROP_SOCKET:
+ if (udpsrc->socket != NULL && udpsrc->socket != udpsrc->used_socket &&
+ udpsrc->close_socket) {
+ GError *err = NULL;
+
+ if (!g_socket_close (udpsrc->socket, &err)) {
+ GST_ERROR ("failed to close socket %p: %s", udpsrc->socket,
+ err->message);
+ g_clear_error (&err);
+ }
+ }
+ if (udpsrc->socket)
+ g_object_unref (udpsrc->socket);
+ udpsrc->socket = g_value_dup_object (value);
+ GST_DEBUG ("setting socket to %p", udpsrc->socket);
break;
case PROP_TIMEOUT:
udpsrc->timeout = g_value_get_uint64 (value);
case PROP_SKIP_FIRST_BYTES:
udpsrc->skip_first_bytes = g_value_get_int (value);
break;
- case PROP_CLOSEFD:
- udpsrc->closefd = g_value_get_boolean (value);
+ case PROP_CLOSE_SOCKET:
+ udpsrc->close_socket = g_value_get_boolean (value);
break;
case PROP_AUTO_MULTICAST:
udpsrc->auto_multicast = g_value_get_boolean (value);
g_value_set_int (value, udpsrc->buffer_size);
break;
case PROP_PORT:
- g_value_set_int (value, udpsrc->uri.port);
+ g_value_set_int (value, udpsrc->port);
break;
case PROP_MULTICAST_GROUP:
- g_value_set_string (value, udpsrc->uri.host);
+ g_value_set_string (value, udpsrc->host);
break;
case PROP_MULTICAST_IFACE:
g_value_set_string (value, udpsrc->multi_iface);
break;
case PROP_URI:
- g_value_take_string (value, gst_udp_uri_string (&udpsrc->uri));
+ g_value_set_string (value, udpsrc->uri);
break;
case PROP_CAPS:
gst_value_set_caps (value, udpsrc->caps);
break;
- case PROP_SOCKFD:
- g_value_set_int (value, udpsrc->sockfd);
+ case PROP_SOCKET:
+ g_value_set_object (value, udpsrc->socket);
break;
case PROP_TIMEOUT:
g_value_set_uint64 (value, udpsrc->timeout);
case PROP_SKIP_FIRST_BYTES:
g_value_set_int (value, udpsrc->skip_first_bytes);
break;
- case PROP_CLOSEFD:
- g_value_set_boolean (value, udpsrc->closefd);
+ case PROP_CLOSE_SOCKET:
+ g_value_set_boolean (value, udpsrc->close_socket);
break;
- case PROP_SOCK:
- g_value_set_int (value, udpsrc->sock.fd);
+ case PROP_USED_SOCKET:
+ g_value_set_object (value, udpsrc->used_socket);
break;
case PROP_AUTO_MULTICAST:
g_value_set_boolean (value, udpsrc->auto_multicast);
static gboolean
gst_udpsrc_start (GstBaseSrc * bsrc)
{
- guint bc_val;
- guint err_val;
- gint reuse;
- int port;
GstUDPSrc *src;
- gint ret;
- int rcvsize;
- struct sockaddr_storage bind_address;
- socklen_t len;
+ GInetAddress *addr, *bind_addr;
+ GSocketAddress *bind_saddr;
+ GResolver *resolver;
+ GError *err = NULL;
+
src = GST_UDPSRC (bsrc);
- if (src->sockfd == -1) {
+ if (src->socket == NULL) {
/* need to allocate a socket */
- GST_DEBUG_OBJECT (src, "allocating socket for %s:%d", src->uri.host,
- src->uri.port);
- if ((ret =
- gst_udp_get_addr (src->uri.host, src->uri.port, &src->myaddr)) < 0)
- goto getaddrinfo_error;
-
- if ((ret = socket (src->myaddr.ss_family, SOCK_DGRAM, IPPROTO_UDP)) < 0)
- goto no_socket;
+ GST_DEBUG_OBJECT (src, "allocating socket for %s:%d", src->host, src->port);
+
+ addr = g_inet_address_new_from_string (src->host);
+ if (!addr) {
+ GList *results;
+
+ resolver = g_resolver_get_default ();
+ results =
+ g_resolver_lookup_by_name (resolver, src->host, src->cancellable,
+ &err);
+ if (!results)
+ goto name_resolve;
+ addr = G_INET_ADDRESS (g_object_ref (results->data));
+
+ g_resolver_free_addresses (results);
+ g_object_unref (resolver);
+ }
+#ifndef GST_DISABLE_GST_DEBUG
+ {
+ gchar *ip = g_inet_address_to_string (addr);
- src->sock.fd = ret;
- src->externalfd = FALSE;
-
- GST_DEBUG_OBJECT (src, "got socket %d", src->sock.fd);
-
- GST_DEBUG_OBJECT (src, "setting reuse %d", src->reuse);
- reuse = src->reuse ? 1 : 0;
- if ((ret =
- setsockopt (src->sock.fd, SOL_SOCKET, SO_REUSEADDR, &reuse,
- sizeof (reuse))) < 0)
- goto setsockopt_error;
-
- GST_DEBUG_OBJECT (src, "binding on port %d", src->uri.port);
-
- /* Take a temporary copy of the address in case we need to fix it for bind */
- memcpy (&bind_address, &src->myaddr, sizeof (struct sockaddr_storage));
-
-#ifdef G_OS_WIN32
- /* Windows does not allow binding to a multicast group so fix source address */
- if (gst_udp_is_multicast (&src->myaddr)) {
- switch (((struct sockaddr *) &bind_address)->sa_family) {
- case AF_INET:
- ((struct sockaddr_in *) &bind_address)->sin_addr.s_addr =
- htonl (INADDR_ANY);
- break;
- case AF_INET6:
- ((struct sockaddr_in6 *) &bind_address)->sin6_addr = in6addr_any;
- break;
- default:
- break;
- }
+ GST_DEBUG_OBJECT (src, "IP address for host %s is %s", src->host, ip);
+ g_free (ip);
}
#endif
- len = gst_udp_get_sockaddr_length (&bind_address);
- if ((ret = bind (src->sock.fd, (struct sockaddr *) &bind_address, len)) < 0)
+ if ((src->used_socket =
+ g_socket_new (g_inet_address_get_family (addr),
+ G_SOCKET_TYPE_DATAGRAM, G_SOCKET_PROTOCOL_UDP, &err)) == NULL)
+ goto no_socket;
+
+ src->external_socket = FALSE;
+
+ GST_DEBUG_OBJECT (src, "got socket %p", src->used_socket);
+
+ if (src->addr)
+ g_object_unref (src->addr);
+ src->addr =
+ G_INET_SOCKET_ADDRESS (g_inet_socket_address_new (addr, src->port));
+
+ GST_DEBUG_OBJECT (src, "binding on port %d", src->port);
+
+ if (g_inet_address_get_is_multicast (addr))
+ bind_addr = g_inet_address_new_any (g_inet_address_get_family (addr));
+ else
+ bind_addr = G_INET_ADDRESS (g_object_ref (addr));
+
+ g_object_unref (addr);
+
+ bind_saddr = g_inet_socket_address_new (bind_addr, src->port);
+ g_object_unref (bind_addr);
+ if (!g_socket_bind (src->used_socket, bind_saddr, src->reuse, &err))
goto bind_error;
- if (!gst_udp_is_multicast (&src->myaddr)) {
- len = sizeof (src->myaddr);
- if ((ret = getsockname (src->sock.fd, (struct sockaddr *) &src->myaddr,
- &len)) < 0)
- goto getsockname_error;
- }
+ g_object_unref (bind_saddr);
} else {
- GST_DEBUG_OBJECT (src, "using provided socket %d", src->sockfd);
+ GST_DEBUG_OBJECT (src, "using provided socket %p", src->socket);
/* we use the configured socket, try to get some info about it */
- len = sizeof (src->myaddr);
- if ((ret =
- getsockname (src->sockfd, (struct sockaddr *) &src->myaddr,
- &len)) < 0)
+ src->used_socket = G_SOCKET (g_object_ref (src->socket));
+ src->external_socket = TRUE;
+
+ if (src->addr)
+ g_object_unref (src->addr);
+ src->addr =
+ G_INET_SOCKET_ADDRESS (g_socket_get_local_address (src->used_socket,
+ &err));
+ if (!src->addr)
goto getsockname_error;
-
- src->sock.fd = src->sockfd;
- src->externalfd = TRUE;
}
- len = sizeof (rcvsize);
- if (src->buffer_size != 0) {
- rcvsize = src->buffer_size;
+ if (src->timeout)
+ g_socket_set_timeout (src->used_socket, src->timeout / GST_SECOND);
- GST_DEBUG_OBJECT (src, "setting udp buffer of %d bytes", rcvsize);
- /* set buffer size, Note that on Linux this is typically limited to a
- * maximum of around 100K. Also a minimum of 128 bytes is required on
- * Linux. */
- ret =
- setsockopt (src->sock.fd, SOL_SOCKET, SO_RCVBUF, (void *) &rcvsize,
- len);
- if (ret != 0) {
- GST_ELEMENT_WARNING (src, RESOURCE, SETTINGS, (NULL),
- ("Could not create a buffer of requested %d bytes, %d: %s (%d)",
- rcvsize, ret, g_strerror (errno), errno));
+#ifdef SO_RECVBUF
+ {
+ gint rcvsize, ret;
+
+ len = sizeof (rcvsize);
+ if (src->buffer_size != 0) {
+ rcvsize = src->buffer_size;
+
+ GST_DEBUG_OBJECT (src, "setting udp buffer of %d bytes", rcvsize);
+ /* set buffer size, Note that on Linux this is typically limited to a
+ * maximum of around 100K. Also a minimum of 128 bytes is required on
+ * Linux. */
+ ret =
+ setsockopt (g_socket_get_fd (src->used_socket), SOL_SOCKET, SO_RCVBUF,
+ (void *) &rcvsize, len);
+ if (ret != 0) {
+ GST_ELEMENT_WARNING (src, RESOURCE, SETTINGS, (NULL),
+ ("Could not create a buffer of requested %d bytes, %d: %s (%d)",
+ rcvsize, ret, g_strerror (errno), errno));
+ }
}
- }
-
- /* read the value of the receive buffer. Note that on linux this returns 2x the
- * value we set because the kernel allocates extra memory for metadata.
- * The default on Linux is about 100K (which is about 50K without metadata) */
- ret =
- getsockopt (src->sock.fd, SOL_SOCKET, SO_RCVBUF, (void *) &rcvsize, &len);
- if (ret == 0)
- GST_DEBUG_OBJECT (src, "have udp buffer of %d bytes", rcvsize);
- else
- GST_DEBUG_OBJECT (src, "could not get udp buffer size");
-
- bc_val = 1;
- if ((ret = setsockopt (src->sock.fd, SOL_SOCKET, SO_BROADCAST, &bc_val,
- sizeof (bc_val))) < 0) {
- GST_ELEMENT_WARNING (src, RESOURCE, SETTINGS, (NULL),
- ("could not configure socket for broadcast %d: %s (%d)", ret,
- g_strerror (errno), errno));
- }
- /* Accept ERRQUEUE to get and flush icmp errors */
- err_val = 1;
-#if defined (IP_RECVERR)
- if ((ret = setsockopt (src->sock.fd, IPPROTO_IP, IP_RECVERR, &err_val,
- sizeof (err_val))) < 0) {
- GST_ELEMENT_WARNING (src, RESOURCE, SETTINGS, (NULL),
- ("could not configure socket for IP_RECVERR %d: %s (%d)", ret,
- g_strerror (errno), errno));
+ /* read the value of the receive buffer. Note that on linux this returns 2x the
+ * value we set because the kernel allocates extra memory for metadata.
+ * The default on Linux is about 100K (which is about 50K without metadata) */
+ ret =
+ getsockopt (g_socket_get_fd (src->used_socket), SOL_SOCKET, SO_RCVBUF,
+ (void *) &rcvsize, &len);
+ if (ret == 0)
+ GST_DEBUG_OBJECT (src, "have udp buffer of %d bytes", rcvsize);
+ else
+ GST_DEBUG_OBJECT (src, "could not get udp buffer size");
}
#endif
- if (src->auto_multicast && gst_udp_is_multicast (&src->myaddr)) {
- GST_DEBUG_OBJECT (src, "joining multicast group %s", src->uri.host);
- ret = gst_udp_join_group (src->sock.fd, &src->myaddr, src->multi_iface);
- if (ret < 0)
+ g_socket_set_broadcast (src->used_socket, TRUE);
+
+ if (src->auto_multicast
+ &&
+ g_inet_address_get_is_multicast (g_inet_socket_address_get_address
+ (src->addr))) {
+ GST_DEBUG_OBJECT (src, "joining multicast group %s", src->host);
+ if (!g_socket_join_multicast_group (src->used_socket,
+ g_inet_socket_address_get_address (src->addr),
+ FALSE, src->multi_iface, &err))
goto membership;
}
/* NOTE: sockaddr_in.sin_port works for ipv4 and ipv6 because sin_port
* follows ss_family on both */
- port = g_ntohs (((struct sockaddr_in *) &src->myaddr)->sin_port);
- GST_DEBUG_OBJECT (src, "bound, on port %d", port);
- if (port != src->uri.port) {
- src->uri.port = port;
- GST_DEBUG_OBJECT (src, "notifying port %d", port);
- g_object_notify (G_OBJECT (src), "port");
- }
+ {
+ GInetSocketAddress *addr;
+ guint16 port;
- if ((src->fdset = gst_poll_new (TRUE)) == NULL)
- goto no_fdset;
+ addr =
+ G_INET_SOCKET_ADDRESS (g_socket_get_local_address (src->used_socket,
+ &err));
+ if (!addr)
+ goto getsockname_error;
- gst_poll_add_fd (src->fdset, &src->sock);
- gst_poll_fd_ctl_read (src->fdset, &src->sock, TRUE);
+ port = g_inet_socket_address_get_port (addr);
+ GST_DEBUG_OBJECT (src, "bound, on port %d", port);
+ if (port != src->port) {
+ src->port = port;
+ GST_DEBUG_OBJECT (src, "notifying port %d", port);
+ g_object_notify (G_OBJECT (src), "port");
+ }
+ g_object_unref (addr);
+ }
return TRUE;
/* ERRORS */
-getaddrinfo_error:
+name_resolve:
{
GST_ELEMENT_ERROR (src, RESOURCE, SETTINGS, (NULL),
- ("getaddrinfo failed: %s (%d)", gai_strerror (ret), ret));
+ ("Name resolval failed: %s", err->message));
+ g_clear_error (&err);
+ g_object_unref (resolver);
return FALSE;
}
no_socket:
{
GST_ELEMENT_ERROR (src, RESOURCE, OPEN_READ, (NULL),
- ("no socket error %d: %s (%d)", ret, g_strerror (errno), errno));
- return FALSE;
- }
-setsockopt_error:
- {
- CLOSE_IF_REQUESTED (src);
- GST_ELEMENT_ERROR (src, RESOURCE, SETTINGS, (NULL),
- ("setsockopt failed %d: %s (%d)", ret, g_strerror (errno), errno));
+ ("no socket error: %s", err->message));
+ g_clear_error (&err);
+ g_object_unref (addr);
return FALSE;
}
bind_error:
{
- CLOSE_IF_REQUESTED (src);
+ gst_udpsrc_stop (GST_BASE_SRC (src));
GST_ELEMENT_ERROR (src, RESOURCE, SETTINGS, (NULL),
- ("bind failed %d: %s (%d)", ret, g_strerror (errno), errno));
+ ("bind failed: %s", err->message));
+ g_clear_error (&err);
+ g_object_unref (bind_saddr);
return FALSE;
}
membership:
{
- CLOSE_IF_REQUESTED (src);
+ gst_udpsrc_stop (GST_BASE_SRC (src));
GST_ELEMENT_ERROR (src, RESOURCE, SETTINGS, (NULL),
- ("could add membership %d: %s (%d)", ret, g_strerror (errno), errno));
+ ("could add membership: %s", err->message));
+ g_clear_error (&err);
return FALSE;
}
getsockname_error:
{
- CLOSE_IF_REQUESTED (src);
+ gst_udpsrc_stop (GST_BASE_SRC (src));
GST_ELEMENT_ERROR (src, RESOURCE, SETTINGS, (NULL),
- ("getsockname failed %d: %s (%d)", ret, g_strerror (errno), errno));
- return FALSE;
- }
-no_fdset:
- {
- CLOSE_IF_REQUESTED (src);
- GST_ELEMENT_ERROR (src, RESOURCE, OPEN_READ_WRITE, (NULL),
- ("could not create an fdset %d: %s (%d)", ret, g_strerror (errno),
- errno));
+ ("getsockname failed: %s", err->message));
+ g_clear_error (&err);
return FALSE;
}
}
src = GST_UDPSRC (bsrc);
GST_LOG_OBJECT (src, "Flushing");
- gst_poll_set_flushing (src->fdset, TRUE);
+ g_cancellable_cancel (src->cancellable);
return TRUE;
}
src = GST_UDPSRC (bsrc);
GST_LOG_OBJECT (src, "No longer flushing");
- gst_poll_set_flushing (src->fdset, FALSE);
+ g_cancellable_reset (src->cancellable);
return TRUE;
}
GST_DEBUG ("stopping, closing sockets");
- if (src->sock.fd >= 0) {
- if (src->auto_multicast && gst_udp_is_multicast (&src->myaddr)) {
- GST_DEBUG_OBJECT (src, "leaving multicast group %s", src->uri.host);
- gst_udp_leave_group (src->sock.fd, &src->myaddr);
+ if (src->used_socket) {
+ if (src->auto_multicast
+ &&
+ g_inet_address_get_is_multicast (g_inet_socket_address_get_address
+ (src->addr))) {
+ GError *err = NULL;
+
+ GST_DEBUG_OBJECT (src, "leaving multicast group %s", src->host);
+
+ if (!g_socket_leave_multicast_group (src->used_socket,
+ g_inet_socket_address_get_address (src->addr), FALSE,
+ src->multi_iface, NULL)) {
+ GST_ERROR_OBJECT (src, "Failed to leave multicast group: %s",
+ err->message);
+ g_clear_error (&err);
+ }
}
- CLOSE_IF_REQUESTED (src);
- }
- if (src->fdset) {
- gst_poll_free (src->fdset);
- src->fdset = NULL;
+ if (src->close_socket || !src->external_socket) {
+ GError *err = NULL;
+ if (!g_socket_close (src->used_socket, &err)) {
+ GST_ERROR_OBJECT (src, "Failed to close socket: %s", err->message);
+ g_clear_error (&err);
+ }
+ }
+
+ g_object_unref (src->used_socket);
+ src->used_socket = NULL;
+ g_object_unref (src->addr);
+ src->addr = NULL;
}
return TRUE;
/*** GSTURIHANDLER INTERFACE *************************************************/
static GstURIType
-gst_udpsrc_uri_get_type (void)
+gst_udpsrc_uri_get_type (GType type)
{
return GST_URI_SRC;
}
-static gchar **
-gst_udpsrc_uri_get_protocols (void)
+static const gchar *const *
+gst_udpsrc_uri_get_protocols (GType type)
{
- static gchar *protocols[] = { (char *) "udp", NULL };
+ static const gchar *protocols[] = { "udp", NULL };
return protocols;
}
-static const gchar *
+static gchar *
gst_udpsrc_uri_get_uri (GstURIHandler * handler)
{
GstUDPSrc *src = GST_UDPSRC (handler);
- g_free (src->uristr);
- src->uristr = gst_udp_uri_string (&src->uri);
-
- return src->uristr;
+ return g_strdup (src->uri);
}
static gboolean
-gst_udpsrc_uri_set_uri (GstURIHandler * handler, const gchar * uri)
+gst_udpsrc_uri_set_uri (GstURIHandler * handler, const gchar * uri,
+ GError ** error)
{
- gboolean ret;
-
- GstUDPSrc *src = GST_UDPSRC (handler);
-
- ret = gst_udpsrc_set_uri (src, uri);
-
- return ret;
+ return gst_udpsrc_set_uri (GST_UDPSRC (handler), uri, error);
}
static void
#include <gst/gst.h>
#include <gst/base/gstpushsrc.h>
+#include <gio/gio.h>
G_BEGIN_DECLS
-#include <errno.h>
-#include <string.h>
-#include <sys/types.h>
-
#include "gstudpnetutils.h"
-
#include "gstudp.h"
#define GST_TYPE_UDPSRC \
GstPushSrc parent;
/* properties */
- GstUDPUri uri;
+ gchar *host;
+ gint port;
gchar *multi_iface;
gint ttl;
GstCaps *caps;
gint buffer_size;
guint64 timeout;
gint skip_first_bytes;
- int sockfd;
- gboolean closefd;
+ GSocket *socket;
+ gboolean close_socket;
gboolean auto_multicast;
gboolean reuse;
/* our sockets */
- GstPollFD sock;
- GstPoll *fdset;
- gboolean externalfd;
- gboolean is_ipv6;
-
- struct sockaddr_storage myaddr;
+ GSocket *used_socket;
+ GCancellable *cancellable;
+ GInetSocketAddress *addr;
+ gboolean external_socket;
- gchar *uristr;
+ gchar *uri;
};
struct _GstUDPSrcClass {
libgstvideobox_la_SOURCES = gstvideobox.c
nodist_libgstvideobox_la_SOURCES = $(ORC_NODIST_SOURCES)
libgstvideobox_la_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) \
- $(GST_CONTROLLER_CFLAGS) \
$(GST_BASE_CFLAGS) \
$(GST_CFLAGS) \
$(ORC_CFLAGS)
libgstvideobox_la_LIBADD = $(GST_PLUGINS_BASE_LIBS) \
- $(GST_CONTROLLER_LIBS) \
$(GST_BASE_LIBS) \
$(GST_LIBS) \
$(ORC_LIBS) \
#include <math.h>
#include <string.h>
-#include <gst/controller/gstcontroller.h>
-
-#include "gst/glib-compat-private.h"
-
GST_DEBUG_CATEGORY_STATIC (videobox_debug);
#define GST_CAT_DEFAULT videobox_debug
"Resizes a video by adding borders or cropping",
"Wim Taymans <wim@fluendo.com>");
- gst_element_class_add_static_pad_template (element_class,
- &gst_video_box_sink_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_video_box_src_template);
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_video_box_sink_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_video_box_src_template));
}
static void
GST_TIME_ARGS (timestamp));
if (GST_CLOCK_TIME_IS_VALID (stream_time))
- gst_object_sync_values (G_OBJECT (video_box), stream_time);
+ gst_object_sync_values (GST_OBJECT (video_box), stream_time);
}
static GstFlowReturn
static gboolean
plugin_init (GstPlugin * plugin)
{
- gst_controller_init (NULL, NULL);
-
GST_DEBUG_CATEGORY_INIT (videobox_debug, "videobox", 0,
"Resizes a video by adding borders or cropping");
libgstvideocrop_la_SOURCES = gstvideocrop.c gstaspectratiocrop.c
libgstvideocrop_la_CFLAGS = $(GST_CFLAGS) $(GST_BASE_CFLAGS) \
$(GST_PLUGINS_BASE_CFLAGS)
-libgstvideocrop_la_LIBADD = $(GST_BASE_LIBS)
+libgstvideocrop_la_LIBADD = $(GST_BASE_LIBS) -lgstvideo-$(GST_MAJORMINOR)
libgstvideocrop_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
libgstvideocrop_la_LIBTOOLFLAGS = --tag=disable-static
};
/* we support the same caps as videocrop */
-#define ASPECT_RATIO_CROP_CAPS \
- GST_VIDEO_CAPS_RGBx ";" \
- GST_VIDEO_CAPS_xRGB ";" \
- GST_VIDEO_CAPS_BGRx ";" \
- GST_VIDEO_CAPS_xBGR ";" \
- GST_VIDEO_CAPS_RGBA ";" \
- GST_VIDEO_CAPS_ARGB ";" \
- GST_VIDEO_CAPS_BGRA ";" \
- GST_VIDEO_CAPS_ABGR ";" \
- GST_VIDEO_CAPS_RGB ";" \
- GST_VIDEO_CAPS_BGR ";" \
- GST_VIDEO_CAPS_YUV ("AYUV") ";" \
- GST_VIDEO_CAPS_YUV ("YUY2") ";" \
- GST_VIDEO_CAPS_YUV ("YVYU") ";" \
- GST_VIDEO_CAPS_YUV ("UYVY") ";" \
- GST_VIDEO_CAPS_YUV ("Y800") ";" \
- GST_VIDEO_CAPS_YUV ("I420") ";" \
- GST_VIDEO_CAPS_YUV ("YV12") ";" \
- GST_VIDEO_CAPS_RGB_16 ";" \
- GST_VIDEO_CAPS_RGB_15
+#define ASPECT_RATIO_CROP_CAPS \
+ GST_VIDEO_CAPS_MAKE ("{ RGBx, xRGB, BGRx, xBGR, " \
+ "RGBA, ARGB, BGRA, ABGR, RGB, BGR, AYUV, YUY2, " \
+ "YVYU, UYVY, Y800, I420, RGB16, RGB15, GRAY8 }")
static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_STATIC_CAPS (ASPECT_RATIO_CROP_CAPS)
);
-GST_BOILERPLATE (GstAspectRatioCrop, gst_aspect_ratio_crop, GstBin,
- GST_TYPE_BIN);
+#define gst_aspect_ratio_crop_parent_class parent_class
+G_DEFINE_TYPE (GstAspectRatioCrop, gst_aspect_ratio_crop, GST_TYPE_BIN);
static void gst_aspect_ratio_crop_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
GValue * value, GParamSpec * pspec);
static void gst_aspect_ratio_crop_set_cropping (GstAspectRatioCrop *
aspect_ratio_crop, gint top, gint right, gint bottom, gint left);
-static GstCaps *gst_aspect_ratio_crop_get_caps (GstPad * pad);
-static gboolean gst_aspect_ratio_crop_set_caps (GstPad * pad, GstCaps * caps);
+static GstCaps *gst_aspect_ratio_crop_get_caps (GstPad * pad, GstCaps * filter);
+static gboolean gst_aspect_ratio_crop_src_query (GstPad * pad,
+ GstObject * parent, GstQuery * query);
+static gboolean gst_aspect_ratio_crop_set_caps (GstAspectRatioCrop *
+ aspect_ratio_crop, GstCaps * caps);
+static gboolean gst_aspect_ratio_crop_sink_event (GstPad * pad,
+ GstObject * parent, GstEvent * evt);
static void gst_aspect_ratio_crop_finalize (GObject * object);
static void gst_aspect_ratio_transform_structure (GstAspectRatioCrop *
aspect_ratio_crop, GstStructure * structure, GstStructure ** new_structure,
}
static gboolean
-gst_aspect_ratio_crop_set_caps (GstPad * pad, GstCaps * caps)
+gst_aspect_ratio_crop_set_caps (GstAspectRatioCrop * aspect_ratio_crop,
+ GstCaps * caps)
{
- GstAspectRatioCrop *aspect_ratio_crop;
GstPad *peer_pad;
GstStructure *structure;
gboolean ret;
- aspect_ratio_crop = GST_ASPECT_RATIO_CROP (gst_pad_get_parent (pad));
-
- g_mutex_lock (aspect_ratio_crop->crop_lock);
+ g_mutex_lock (&aspect_ratio_crop->crop_lock);
structure = gst_caps_get_structure (caps, 0);
gst_aspect_ratio_transform_structure (aspect_ratio_crop, structure, NULL,
"sink");
ret = gst_pad_set_caps (peer_pad, caps);
gst_object_unref (peer_pad);
- gst_object_unref (aspect_ratio_crop);
- g_mutex_unlock (aspect_ratio_crop->crop_lock);
+ g_mutex_unlock (&aspect_ratio_crop->crop_lock);
return ret;
}
-static void
-gst_aspect_ratio_crop_base_init (gpointer g_class)
+static gboolean
+gst_aspect_ratio_crop_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * evt)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
+ gboolean ret;
+ GstAspectRatioCrop *aspect_ratio_crop = GST_ASPECT_RATIO_CROP (parent);
- gst_element_class_set_details_simple (element_class, "aspectratiocrop",
- "Filter/Effect/Video",
- "Crops video into a user-defined aspect-ratio",
- "Thijs Vermeir <thijsvermeir@gmail.com>");
+ ret =
+ aspect_ratio_crop->sinkpad_old_eventfunc (pad, parent,
+ gst_event_ref (evt));
+
+ switch (GST_EVENT_TYPE (evt)) {
+ case GST_EVENT_CAPS:
+ {
+ GstCaps *caps;
+
+ gst_event_parse_caps (evt, &caps);
+ ret = gst_aspect_ratio_crop_set_caps (aspect_ratio_crop, caps);
+ break;
+ }
+ default:
+ break;
+ }
+ gst_event_unref (evt);
- gst_element_class_add_static_pad_template (element_class, &sink_template);
- gst_element_class_add_static_pad_template (element_class, &src_template);
+ return ret;
}
static void
gst_aspect_ratio_crop_class_init (GstAspectRatioCropClass * klass)
{
GObjectClass *gobject_class;
+ GstElementClass *element_class;
gobject_class = (GObjectClass *) klass;
+ element_class = (GstElementClass *) klass;
gobject_class->set_property = gst_aspect_ratio_crop_set_property;
gobject_class->get_property = gst_aspect_ratio_crop_get_property;
gst_param_spec_fraction ("aspect-ratio", "aspect-ratio",
"Target aspect-ratio of video", 0, 1, G_MAXINT, 1, 0, 1,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gst_element_class_set_details_simple (element_class, "aspectratiocrop",
+ "Filter/Effect/Video",
+ "Crops video into a user-defined aspect-ratio",
+ "Thijs Vermeir <thijsvermeir@gmail.com>");
+
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&sink_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&src_template));
}
static void
aspect_ratio_crop = GST_ASPECT_RATIO_CROP (object);
- if (aspect_ratio_crop->crop_lock)
- g_mutex_free (aspect_ratio_crop->crop_lock);
+ g_mutex_clear (&aspect_ratio_crop->crop_lock);
G_OBJECT_CLASS (parent_class)->finalize (object);
}
static void
-gst_aspect_ratio_crop_init (GstAspectRatioCrop * aspect_ratio_crop,
- GstAspectRatioCropClass * klass)
+gst_aspect_ratio_crop_init (GstAspectRatioCrop * aspect_ratio_crop)
{
GstPad *link_pad;
GstPad *src_pad;
aspect_ratio_crop->ar_num = 0;
aspect_ratio_crop->ar_denom = 1;
- aspect_ratio_crop->crop_lock = g_mutex_new ();
+ g_mutex_init (&aspect_ratio_crop->crop_lock);
/* add the transform element */
aspect_ratio_crop->videocrop = gst_element_factory_make ("videocrop", NULL);
gst_element_get_static_pad (GST_ELEMENT (aspect_ratio_crop->videocrop),
"src");
src_pad = gst_ghost_pad_new ("src", link_pad);
- gst_pad_set_getcaps_function (src_pad,
- GST_DEBUG_FUNCPTR (gst_aspect_ratio_crop_get_caps));
+ gst_pad_set_query_function (src_pad,
+ GST_DEBUG_FUNCPTR (gst_aspect_ratio_crop_src_query));
gst_element_add_pad (GST_ELEMENT (aspect_ratio_crop), src_pad);
gst_object_unref (link_pad);
/* create ghost pad sink */
gst_element_add_pad (GST_ELEMENT (aspect_ratio_crop),
aspect_ratio_crop->sink);
gst_object_unref (link_pad);
- gst_pad_set_setcaps_function (aspect_ratio_crop->sink,
- GST_DEBUG_FUNCPTR (gst_aspect_ratio_crop_set_caps));
+
+ aspect_ratio_crop->sinkpad_old_eventfunc =
+ GST_PAD_EVENTFUNC (aspect_ratio_crop->sink);
+ gst_pad_set_event_function (aspect_ratio_crop->sink,
+ GST_DEBUG_FUNCPTR (gst_aspect_ratio_crop_sink_event));
}
static void
}
static GstCaps *
-gst_aspect_ratio_crop_get_caps (GstPad * pad)
+gst_aspect_ratio_crop_get_caps (GstPad * pad, GstCaps * filter)
{
GstPad *peer;
GstAspectRatioCrop *aspect_ratio_crop;
aspect_ratio_crop = GST_ASPECT_RATIO_CROP (gst_pad_get_parent (pad));
- g_mutex_lock (aspect_ratio_crop->crop_lock);
+ g_mutex_lock (&aspect_ratio_crop->crop_lock);
peer = gst_pad_get_peer (aspect_ratio_crop->sink);
if (peer == NULL) {
return_caps = gst_static_pad_template_get_caps (&src_template);
- gst_caps_ref (return_caps);
} else {
GstCaps *peer_caps;
- peer_caps = gst_pad_get_caps (peer);
+ peer_caps = gst_pad_query_caps (peer, filter);
return_caps =
gst_aspect_ratio_crop_transform_caps (aspect_ratio_crop, peer_caps);
gst_caps_unref (peer_caps);
gst_object_unref (peer);
}
- g_mutex_unlock (aspect_ratio_crop->crop_lock);
+ g_mutex_unlock (&aspect_ratio_crop->crop_lock);
gst_object_unref (aspect_ratio_crop);
+ if (return_caps && filter) {
+ GstCaps *tmp =
+ gst_caps_intersect_full (filter, return_caps, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_replace (&return_caps, tmp);
+ gst_caps_unref (tmp);
+ }
+
return return_caps;
}
+static gboolean
+gst_aspect_ratio_crop_src_query (GstPad * pad, GstObject * parent,
+ GstQuery * query)
+{
+ gboolean res = FALSE;
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_CAPS:
+ {
+ GstCaps *filter, *caps;
+
+ gst_query_parse_caps (query, &filter);
+ caps = gst_aspect_ratio_crop_get_caps (pad, filter);
+ gst_query_set_caps_result (query, caps);
+ gst_caps_unref (caps);
+ res = TRUE;
+ break;
+ }
+ default:
+ res = gst_pad_query_default (pad, parent, query);
+ break;
+ }
+ return res;
+}
+
static void
gst_aspect_ratio_crop_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec)
aspect_ratio_crop->ar_num = gst_value_get_fraction_numerator (value);
aspect_ratio_crop->ar_denom =
gst_value_get_fraction_denominator (value);
- recheck = (GST_PAD_CAPS (aspect_ratio_crop->sink) != NULL);
+ recheck = gst_pad_has_current_caps (aspect_ratio_crop->sink);
}
break;
default:
GST_OBJECT_UNLOCK (aspect_ratio_crop);
if (recheck) {
- gst_aspect_ratio_crop_set_caps (aspect_ratio_crop->sink,
- GST_PAD_CAPS (aspect_ratio_crop->sink));
+ GstCaps *caps = gst_pad_get_current_caps (aspect_ratio_crop->sink);
+ gst_aspect_ratio_crop_set_caps (aspect_ratio_crop, caps);
+ gst_caps_unref (caps);
}
}
GstElement *videocrop;
GstPad *sink;
+ GstPadEventFunction sinkpad_old_eventfunc;
/* target aspect ratio */
gint ar_num; /* if < 1 then don't change ar */
gint ar_denom;
- GMutex *crop_lock;
+ GMutex crop_lock;
};
struct _GstAspectRatioCropClass
ARG_BOTTOM
};
-/* the formats we support */
-#define GST_VIDEO_CAPS_GRAY "video/x-raw-gray, " \
- "bpp = (int) 8, " \
- "width = " GST_VIDEO_SIZE_RANGE ", " \
- "height = " GST_VIDEO_SIZE_RANGE ", " \
- "framerate = " GST_VIDEO_FPS_RANGE
-
-#define VIDEO_CROP_CAPS \
- GST_VIDEO_CAPS_RGBx ";" \
- GST_VIDEO_CAPS_xRGB ";" \
- GST_VIDEO_CAPS_BGRx ";" \
- GST_VIDEO_CAPS_xBGR ";" \
- GST_VIDEO_CAPS_RGBA ";" \
- GST_VIDEO_CAPS_ARGB ";" \
- GST_VIDEO_CAPS_BGRA ";" \
- GST_VIDEO_CAPS_ABGR ";" \
- GST_VIDEO_CAPS_RGB ";" \
- GST_VIDEO_CAPS_BGR ";" \
- GST_VIDEO_CAPS_YUV ("AYUV") ";" \
- GST_VIDEO_CAPS_YUV ("YUY2") ";" \
- GST_VIDEO_CAPS_YUV ("YVYU") ";" \
- GST_VIDEO_CAPS_YUV ("UYVY") ";" \
- GST_VIDEO_CAPS_YUV ("Y800") ";" \
- GST_VIDEO_CAPS_YUV ("I420") ";" \
- GST_VIDEO_CAPS_YUV ("YV12") ";" \
- GST_VIDEO_CAPS_RGB_16 ";" \
- GST_VIDEO_CAPS_RGB_15 ";" \
- GST_VIDEO_CAPS_GRAY
+#define VIDEO_CROP_CAPS \
+ GST_VIDEO_CAPS_MAKE ("{ RGBx, xRGB, BGRx, xBGR, " \
+ "RGBA, ARGB, BGRA, ABGR, RGB, BGR, AYUV, YUY2, " \
+ "YVYU, UYVY, Y800, I420, RGB16, RGB15, GRAY8 }")
static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_STATIC_CAPS (VIDEO_CROP_CAPS)
);
-GST_BOILERPLATE (GstVideoCrop, gst_video_crop, GstBaseTransform,
- GST_TYPE_BASE_TRANSFORM);
+#define gst_video_crop_parent_class parent_class
+G_DEFINE_TYPE (GstVideoCrop, gst_video_crop, GST_TYPE_BASE_TRANSFORM);
static void gst_video_crop_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
GValue * value, GParamSpec * pspec);
static GstCaps *gst_video_crop_transform_caps (GstBaseTransform * trans,
- GstPadDirection direction, GstCaps * caps);
+ GstPadDirection direction, GstCaps * caps, GstCaps * filter_caps);
static GstFlowReturn gst_video_crop_transform (GstBaseTransform * trans,
GstBuffer * inbuf, GstBuffer * outbuf);
static gboolean gst_video_crop_get_unit_size (GstBaseTransform * trans,
- GstCaps * caps, guint * size);
+ GstCaps * caps, gsize * size);
static gboolean gst_video_crop_set_caps (GstBaseTransform * trans,
GstCaps * in_caps, GstCaps * outcaps);
static gboolean gst_video_crop_src_event (GstBaseTransform * trans,
GstEvent * event);
-static void
-gst_video_crop_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_set_details_simple (element_class, "Crop",
- "Filter/Effect/Video",
- "Crops video into a user-defined region",
- "Tim-Philipp Müller <tim centricular net>");
-
- gst_element_class_add_static_pad_template (element_class,
- &sink_template);
- gst_element_class_add_static_pad_template (element_class, &src_template);
-}
-
static gboolean
gst_video_crop_src_event (GstBaseTransform * trans, GstEvent * event)
{
gst_video_crop_class_init (GstVideoCropClass * klass)
{
GObjectClass *gobject_class;
+ GstElementClass *element_class;
GstBaseTransformClass *basetransform_class;
gobject_class = (GObjectClass *) klass;
+ element_class = (GstElementClass *) klass;
basetransform_class = (GstBaseTransformClass *) klass;
+ gst_element_class_set_details_simple (element_class, "Crop",
+ "Filter/Effect/Video",
+ "Crops video into a user-defined region",
+ "Tim-Philipp Müller <tim centricular net>");
+
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&sink_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&src_template));
+ gst_element_class_set_details_simple (element_class, "Crop",
+ "Filter/Effect/Video",
+ "Crops video into a user-defined region",
+ "Tim-Philipp Müller <tim centricular net>");
+
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&sink_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&src_template));
+
gobject_class->set_property = gst_video_crop_set_property;
gobject_class->get_property = gst_video_crop_get_property;
}
static void
-gst_video_crop_init (GstVideoCrop * vcrop, GstVideoCropClass * klass)
+gst_video_crop_init (GstVideoCrop * vcrop)
{
vcrop->crop_right = 0;
vcrop->crop_left = 0;
gst_video_crop_get_image_details_from_caps (GstVideoCrop * vcrop,
GstVideoCropImageDetails * details, GstCaps * caps)
{
- GstStructure *structure;
- gint width, height;
-
- structure = gst_caps_get_structure (caps, 0);
- if (!gst_structure_get_int (structure, "width", &width) ||
- !gst_structure_get_int (structure, "height", &height)) {
+ gst_video_info_init (&details->info);
+ if (!gst_video_info_from_caps (&details->info, caps)) {
goto incomplete_format;
}
- details->width = width;
- details->height = height;
-
- if (gst_structure_has_name (structure, "video/x-raw-rgb") ||
- gst_structure_has_name (structure, "video/x-raw-gray")) {
- gint bpp = 0;
-
- if (!gst_structure_get_int (structure, "bpp", &bpp) || (bpp & 0x07) != 0)
- goto incomplete_format;
+ if (details->info.width == 0 && details->info.height == 0) {
+ goto incomplete_format;
+ }
+ if (GST_VIDEO_INFO_IS_RGB (&details->info)
+ || GST_VIDEO_INFO_IS_GRAY (&details->info)) {
details->packing = VIDEO_CROP_PIXEL_FORMAT_PACKED_SIMPLE;
- details->bytes_per_pixel = bpp / 8;
- details->stride = GST_ROUND_UP_4 (width * details->bytes_per_pixel);
- details->size = details->stride * height;
- } else if (gst_structure_has_name (structure, "video/x-raw-yuv")) {
- guint32 format = 0;
-
- if (!gst_structure_get_fourcc (structure, "format", &format))
- goto incomplete_format;
-
- switch (format) {
- case GST_MAKE_FOURCC ('A', 'Y', 'U', 'V'):
+ } else {
+ switch (GST_VIDEO_INFO_FORMAT (&details->info)) {
+ case GST_VIDEO_FORMAT_AYUV:
details->packing = VIDEO_CROP_PIXEL_FORMAT_PACKED_SIMPLE;
- details->bytes_per_pixel = 4;
- details->stride = GST_ROUND_UP_4 (width * 4);
- details->size = details->stride * height;
break;
- case GST_MAKE_FOURCC ('Y', 'V', 'Y', 'U'):
- case GST_MAKE_FOURCC ('Y', 'U', 'Y', '2'):
- case GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y'):
+ case GST_VIDEO_FORMAT_YVYU:
+ case GST_VIDEO_FORMAT_YUY2:
+ case GST_VIDEO_FORMAT_UYVY:
details->packing = VIDEO_CROP_PIXEL_FORMAT_PACKED_COMPLEX;
- details->bytes_per_pixel = 2;
- details->stride = GST_ROUND_UP_4 (width * 2);
- details->size = details->stride * height;
- if (format == GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y')) {
+ if (GST_VIDEO_INFO_FORMAT (&details->info) == GST_VIDEO_FORMAT_UYVY) {
/* UYVY = 4:2:2 - [U0 Y0 V0 Y1] [U2 Y2 V2 Y3] [U4 Y4 V4 Y5] */
details->macro_y_off = 1;
} else {
details->macro_y_off = 0;
}
break;
- case GST_MAKE_FOURCC ('Y', '8', '0', '0'):
+ case GST_VIDEO_FORMAT_Y800:
details->packing = VIDEO_CROP_PIXEL_FORMAT_PACKED_SIMPLE;
- details->bytes_per_pixel = 1;
- details->stride = GST_ROUND_UP_4 (width);
- details->size = details->stride * height;
break;
- case GST_MAKE_FOURCC ('I', '4', '2', '0'):
- case GST_MAKE_FOURCC ('Y', 'V', '1', '2'):{
+ case GST_VIDEO_FORMAT_I420:
+ case GST_VIDEO_FORMAT_YV12:
details->packing = VIDEO_CROP_PIXEL_FORMAT_PLANAR;
-
- details->y_stride = GST_ROUND_UP_4 (width);
- details->u_stride = GST_ROUND_UP_8 (width) / 2;
- details->v_stride = GST_ROUND_UP_8 (width) / 2;
-
- /* I420 and YV12 have U/V planes swapped, but doesn't matter for us */
- details->y_off = 0;
- details->u_off = 0 + details->y_stride * GST_ROUND_UP_2 (height);
- details->v_off = details->u_off +
- details->u_stride * (GST_ROUND_UP_2 (height) / 2);
- details->size = details->v_off +
- details->v_stride * (GST_ROUND_UP_2 (height) / 2);
break;
- }
default:
goto unknown_format;
}
- } else {
- goto unknown_format;
}
return TRUE;
static gboolean
gst_video_crop_get_unit_size (GstBaseTransform * trans, GstCaps * caps,
- guint * size)
+ gsize * size)
{
GstVideoCropImageDetails img_details = { 0, };
GstVideoCrop *vcrop = GST_VIDEO_CROP (trans);
if (!gst_video_crop_get_image_details_from_caps (vcrop, &img_details, caps))
return FALSE;
- *size = img_details.size;
+ *size = GST_VIDEO_INFO_SIZE (&img_details.info);
return TRUE;
}
gst_video_crop_transform_packed_complex (GstVideoCrop * vcrop,
GstBuffer * inbuf, GstBuffer * outbuf)
{
+ GstMapInfo in_map, out_map;
guint8 *in_data, *out_data;
guint i, dx;
+ gint in_stride;
+ gint out_stride;
+
+ gst_buffer_map (inbuf, &in_map, GST_MAP_READ);
+ gst_buffer_map (outbuf, &out_map, GST_MAP_WRITE);
+
+ in_data = in_map.data;
+ out_data = out_map.data;
- in_data = GST_BUFFER_DATA (inbuf);
- out_data = GST_BUFFER_DATA (outbuf);
+ in_stride = GST_VIDEO_INFO_PLANE_STRIDE (&vcrop->in.info, 0);
+ out_stride = GST_VIDEO_INFO_PLANE_STRIDE (&vcrop->out.info, 0);
- in_data += vcrop->crop_top * vcrop->in.stride;
+ in_data += vcrop->crop_top * in_stride;
/* rounding down here so we end up at the start of a macro-pixel and not
* in the middle of one */
- in_data += ROUND_DOWN_2 (vcrop->crop_left) * vcrop->in.bytes_per_pixel;
+ in_data +=
+ ROUND_DOWN_2 (vcrop->crop_left) *
+ GST_VIDEO_INFO_COMP_PSTRIDE (&vcrop->in.info, 0);
- dx = vcrop->out.width * vcrop->out.bytes_per_pixel;
+ dx = GST_VIDEO_INFO_WIDTH (&vcrop->out.info) *
+ GST_VIDEO_INFO_COMP_PSTRIDE (&vcrop->out.info, 0);
/* UYVY = 4:2:2 - [U0 Y0 V0 Y1] [U2 Y2 V2 Y3] [U4 Y4 V4 Y5]
* YUYV = 4:2:2 - [Y0 U0 Y1 V0] [Y2 U2 Y3 V2] [Y4 U4 Y5 V4] = YUY2 */
if ((vcrop->crop_left % 2) != 0) {
- for (i = 0; i < vcrop->out.height; ++i) {
+ for (i = 0; i < GST_VIDEO_INFO_HEIGHT (&vcrop->out.info); ++i) {
gint j;
memcpy (out_data, in_data, dx);
/* move just the Y samples one pixel to the left, don't worry about
* chroma shift */
- for (j = vcrop->in.macro_y_off; j < vcrop->out.stride - 2; j += 2)
+ for (j = vcrop->in.macro_y_off; j < out_stride - 2; j += 2)
out_data[j] = in_data[j + 2];
- in_data += vcrop->in.stride;
- out_data += vcrop->out.stride;
+ in_data += in_stride;
+ out_data += out_stride;
}
} else {
- for (i = 0; i < vcrop->out.height; ++i) {
+ for (i = 0; i < GST_VIDEO_INFO_HEIGHT (&vcrop->out.info); ++i) {
memcpy (out_data, in_data, dx);
- in_data += vcrop->in.stride;
- out_data += vcrop->out.stride;
+ in_data += in_stride;
+ out_data += out_stride;
}
}
+ gst_buffer_unmap (inbuf, &in_map);
+ gst_buffer_unmap (outbuf, &out_map);
}
static void
gst_video_crop_transform_packed_simple (GstVideoCrop * vcrop,
GstBuffer * inbuf, GstBuffer * outbuf)
{
+ GstMapInfo in_map, out_map;
guint8 *in_data, *out_data;
guint i, dx;
+ gint in_stride, out_stride;
+
+ gst_buffer_map (inbuf, &in_map, GST_MAP_READ);
+ gst_buffer_map (outbuf, &out_map, GST_MAP_WRITE);
+
+ in_data = in_map.data;
+ out_data = out_map.data;
- in_data = GST_BUFFER_DATA (inbuf);
- out_data = GST_BUFFER_DATA (outbuf);
+ in_stride = GST_VIDEO_INFO_PLANE_STRIDE (&vcrop->in.info, 0);
+ out_stride = GST_VIDEO_INFO_PLANE_STRIDE (&vcrop->out.info, 0);
- in_data += vcrop->crop_top * vcrop->in.stride;
- in_data += vcrop->crop_left * vcrop->in.bytes_per_pixel;
+ in_data += vcrop->crop_top * in_stride;
+ in_data +=
+ vcrop->crop_left * GST_VIDEO_INFO_COMP_PSTRIDE (&vcrop->in.info, 0);
- dx = vcrop->out.width * vcrop->out.bytes_per_pixel;
+ dx = GST_VIDEO_INFO_WIDTH (&vcrop->out.info) *
+ GST_VIDEO_INFO_COMP_PSTRIDE (&vcrop->out.info, 0);
- for (i = 0; i < vcrop->out.height; ++i) {
+ for (i = 0; i < GST_VIDEO_INFO_HEIGHT (&vcrop->out.info); ++i) {
memcpy (out_data, in_data, dx);
- in_data += vcrop->in.stride;
- out_data += vcrop->out.stride;
+ in_data += in_stride;
+ out_data += out_stride;
}
+ gst_buffer_unmap (inbuf, &in_map);
+ gst_buffer_unmap (outbuf, &out_map);
}
static void
gst_video_crop_transform_planar (GstVideoCrop * vcrop, GstBuffer * inbuf,
GstBuffer * outbuf)
{
+ GstMapInfo in_map, out_map;
guint8 *y_out, *u_out, *v_out;
guint8 *y_in, *u_in, *v_in;
guint i, dx;
+ gst_buffer_map (inbuf, &in_map, GST_MAP_READ);
+ gst_buffer_map (outbuf, &out_map, GST_MAP_WRITE);
+
/* Y plane */
- y_in = GST_BUFFER_DATA (inbuf);
- y_out = GST_BUFFER_DATA (outbuf);
+ y_in = in_map.data;
+ y_out = out_map.data;
- y_in += (vcrop->crop_top * vcrop->in.y_stride) + vcrop->crop_left;
- dx = vcrop->out.width * 1;
+ y_in +=
+ (vcrop->crop_top * GST_VIDEO_INFO_PLANE_STRIDE (&vcrop->in.info,
+ 0)) + vcrop->crop_left;
+ dx = GST_VIDEO_INFO_WIDTH (&vcrop->out.info) * 1;
- for (i = 0; i < vcrop->out.height; ++i) {
+ for (i = 0; i < GST_VIDEO_INFO_HEIGHT (&vcrop->out.info); ++i) {
memcpy (y_out, y_in, dx);
- y_in += vcrop->in.y_stride;
- y_out += vcrop->out.y_stride;
+ y_in += GST_VIDEO_INFO_PLANE_STRIDE (&vcrop->in.info, 0);
+ y_out += GST_VIDEO_INFO_PLANE_STRIDE (&vcrop->out.info, 0);
}
/* U + V planes */
- u_in = GST_BUFFER_DATA (inbuf) + vcrop->in.u_off;
- u_out = GST_BUFFER_DATA (outbuf) + vcrop->out.u_off;
-
- u_in += (vcrop->crop_top / 2) * vcrop->in.u_stride;
+ u_in =
+ (guint8 *) in_map.data + GST_VIDEO_INFO_PLANE_OFFSET (&vcrop->in.info, 1);
+ u_out =
+ (guint8 *) out_map.data + GST_VIDEO_INFO_PLANE_OFFSET (&vcrop->out.info,
+ 1);
+
+ u_in +=
+ (vcrop->crop_top / 2) * GST_VIDEO_INFO_PLANE_STRIDE (&vcrop->in.info, 1);
u_in += vcrop->crop_left / 2;
- v_in = GST_BUFFER_DATA (inbuf) + vcrop->in.v_off;
- v_out = GST_BUFFER_DATA (outbuf) + vcrop->out.v_off;
+ v_in =
+ (guint8 *) in_map.data + GST_VIDEO_INFO_PLANE_OFFSET (&vcrop->in.info, 2);
+ v_out =
+ (guint8 *) out_map.data + GST_VIDEO_INFO_PLANE_OFFSET (&vcrop->out.info,
+ 2);
- v_in += (vcrop->crop_top / 2) * vcrop->in.v_stride;
+ v_in +=
+ (vcrop->crop_top / 2) * GST_VIDEO_INFO_PLANE_STRIDE (&vcrop->in.info, 2);
v_in += vcrop->crop_left / 2;
- dx = GST_ROUND_UP_2 (vcrop->out.width) / 2;
+ dx = GST_ROUND_UP_2 (GST_VIDEO_INFO_WIDTH (&vcrop->out.info)) / 2;
- for (i = 0; i < GST_ROUND_UP_2 (vcrop->out.height) / 2; ++i) {
+ for (i = 0; i < GST_ROUND_UP_2 (GST_VIDEO_INFO_HEIGHT (&vcrop->out.info)) / 2;
+ ++i) {
memcpy (u_out, u_in, dx);
memcpy (v_out, v_in, dx);
- u_in += vcrop->in.u_stride;
- u_out += vcrop->out.u_stride;
- v_in += vcrop->in.v_stride;
- v_out += vcrop->out.v_stride;
+ u_in += GST_VIDEO_INFO_PLANE_STRIDE (&vcrop->in.info, 1);
+ u_out += GST_VIDEO_INFO_PLANE_STRIDE (&vcrop->out.info, 1);
+ v_in += GST_VIDEO_INFO_PLANE_STRIDE (&vcrop->in.info, 2);
+ v_out += GST_VIDEO_INFO_PLANE_STRIDE (&vcrop->out.info, 2);
}
+
+ gst_buffer_unmap (inbuf, &in_map);
+ gst_buffer_unmap (outbuf, &out_map);
}
static GstFlowReturn
return ret;
}
+/* TODO use filter_caps */
static GstCaps *
gst_video_crop_transform_caps (GstBaseTransform * trans,
- GstPadDirection direction, GstCaps * caps)
+ GstPadDirection direction, GstCaps * caps, GstCaps * filter_caps)
{
GstVideoCrop *vcrop;
GstCaps *other_caps;
other_caps = NULL;
}
+ if (other_caps && filter_caps) {
+ GstCaps *tmp = gst_caps_intersect_full (filter_caps, other_caps,
+ GST_CAPS_INTERSECT_FIRST);
+ gst_caps_replace (&other_caps, tmp);
+ gst_caps_unref (tmp);
+ }
+
return other_caps;
}
if (!gst_video_crop_get_image_details_from_caps (crop, &crop->out, outcaps))
goto wrong_output;
- if (G_UNLIKELY ((crop->crop_left + crop->crop_right) >= crop->in.width ||
- (crop->crop_top + crop->crop_bottom) >= crop->in.height))
+ if (G_UNLIKELY ((crop->crop_left + crop->crop_right) >=
+ GST_VIDEO_INFO_WIDTH (&crop->in.info)
+ || (crop->crop_top + crop->crop_bottom) >=
+ GST_VIDEO_INFO_HEIGHT (&crop->in.info)))
goto cropping_too_much;
GST_LOG_OBJECT (crop, "incaps = %" GST_PTR_FORMAT ", outcaps = %"
/*< private >*/
VideoCropPixelFormat packing;
- guint width;
- guint height;
- guint size;
+ GstVideoInfo info;
/* for packed RGB and YUV */
- guint stride;
- guint bytes_per_pixel;
guint8 macro_y_off; /* for YUY2, YVYU, UYVY, Y offset within macropixel in bytes */
-
- /* for planar YUV */
- guint y_stride, y_off;
- guint u_stride, u_off;
- guint v_stride, v_off;
};
typedef struct _GstVideoCrop GstVideoCrop;
gstvideoflip.c \
gstvideobalance.c \
gstgamma.c
-libgstvideofilter_la_CFLAGS = $(GST_CFLAGS) $(GST_CONTROLLER_CFLAGS) \
+libgstvideofilter_la_CFLAGS = $(GST_CFLAGS) \
$(GST_BASE_CFLAGS) \
$(GST_PLUGINS_BASE_CFLAGS)
libgstvideofilter_la_LIBADD = $(GST_PLUGINS_BASE_LIBS) \
-lgstvideo-@GST_MAJORMINOR@ \
- -lgstinterfaces-@GST_MAJORMINOR@ \
- $(GST_CONTROLLER_LIBS) \
$(GST_BASE_LIBS) $(GST_LIBS)
libgstvideofilter_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS) $(LIBM)
libgstvideofilter_la_LIBTOOLFLAGS = --tag=disable-static
#include <math.h>
#include <gst/video/video.h>
-#include <gst/controller/gstcontroller.h>
GST_DEBUG_CATEGORY_STATIC (gamma_debug);
#define GST_CAT_DEFAULT gamma_debug
#define DEFAULT_PROP_GAMMA 1
static GstStaticPadTemplate gst_gamma_src_template =
- GST_STATIC_PAD_TEMPLATE ("src",
+GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("AYUV") ";"
- GST_VIDEO_CAPS_ARGB ";" GST_VIDEO_CAPS_BGRA ";"
- GST_VIDEO_CAPS_ABGR ";" GST_VIDEO_CAPS_RGBA ";"
- GST_VIDEO_CAPS_YUV ("Y444") ";"
- GST_VIDEO_CAPS_xRGB ";" GST_VIDEO_CAPS_RGBx ";"
- GST_VIDEO_CAPS_xBGR ";" GST_VIDEO_CAPS_BGRx ";"
- GST_VIDEO_CAPS_RGB ";" GST_VIDEO_CAPS_BGR ";"
- GST_VIDEO_CAPS_YUV ("Y42B") ";"
- GST_VIDEO_CAPS_YUV ("NV12") ";"
- GST_VIDEO_CAPS_YUV ("NV21") ";"
- GST_VIDEO_CAPS_YUV ("YUY2") ";"
- GST_VIDEO_CAPS_YUV ("UYVY") ";"
- GST_VIDEO_CAPS_YUV ("YVYU") ";"
- GST_VIDEO_CAPS_YUV ("I420") ";"
- GST_VIDEO_CAPS_YUV ("YV12") ";"
- GST_VIDEO_CAPS_YUV ("IYUV") ";" GST_VIDEO_CAPS_YUV ("Y41B")
- )
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ AYUV, "
+ "ARGB, BGRA, ABGR, RGBA, ABGR, RGBA, Y444, "
+ "xRGB, RGBx, xBGR, BGRx, RGB, BGR, Y42B, NV12, "
+ "NV21, YUY2, UYVY, YVYU, I420, YV12, IYUV, Y41B }"))
);
static GstStaticPadTemplate gst_gamma_sink_template =
- GST_STATIC_PAD_TEMPLATE ("sink",
+GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("AYUV") ";"
- GST_VIDEO_CAPS_ARGB ";" GST_VIDEO_CAPS_BGRA ";"
- GST_VIDEO_CAPS_ABGR ";" GST_VIDEO_CAPS_RGBA ";"
- GST_VIDEO_CAPS_YUV ("Y444") ";"
- GST_VIDEO_CAPS_xRGB ";" GST_VIDEO_CAPS_RGBx ";"
- GST_VIDEO_CAPS_xBGR ";" GST_VIDEO_CAPS_BGRx ";"
- GST_VIDEO_CAPS_RGB ";" GST_VIDEO_CAPS_BGR ";"
- GST_VIDEO_CAPS_YUV ("Y42B") ";"
- GST_VIDEO_CAPS_YUV ("NV12") ";"
- GST_VIDEO_CAPS_YUV ("NV21") ";"
- GST_VIDEO_CAPS_YUV ("YUY2") ";"
- GST_VIDEO_CAPS_YUV ("UYVY") ";"
- GST_VIDEO_CAPS_YUV ("YVYU") ";"
- GST_VIDEO_CAPS_YUV ("I420") ";"
- GST_VIDEO_CAPS_YUV ("YV12") ";"
- GST_VIDEO_CAPS_YUV ("IYUV") ";" GST_VIDEO_CAPS_YUV ("Y41B")
- )
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ AYUV, "
+ "ARGB, BGRA, ABGR, RGBA, ABGR, RGBA, Y444, "
+ "xRGB, RGBx, xBGR, BGRx, RGB, BGR, Y42B, NV12, "
+ "NV21, YUY2, UYVY, YVYU, I420, YV12, IYUV, Y41B }"))
);
static void gst_gamma_set_property (GObject * object, guint prop_id,
static void gst_gamma_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
-static gboolean gst_gamma_set_caps (GstBaseTransform * base, GstCaps * incaps,
- GstCaps * outcaps);
-static GstFlowReturn gst_gamma_transform_ip (GstBaseTransform * transform,
- GstBuffer * buf);
+static gboolean gst_gamma_set_info (GstVideoFilter * vfilter, GstCaps * incaps,
+ GstVideoInfo * in_info, GstCaps * outcaps, GstVideoInfo * out_info);
+static GstFlowReturn gst_gamma_transform_frame_ip (GstVideoFilter * vfilter,
+ GstVideoFrame * frame);
static void gst_gamma_before_transform (GstBaseTransform * transform,
GstBuffer * buf);
static void gst_gamma_calculate_tables (GstGamma * gamma);
-GST_BOILERPLATE (GstGamma, gst_gamma, GstVideoFilter, GST_TYPE_VIDEO_FILTER);
-
-static void
-gst_gamma_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_set_details_simple (element_class, "Video gamma correction",
- "Filter/Effect/Video",
- "Adjusts gamma on a video stream",
- "Arwed v. Merkatz <v.merkatz@gmx.net>");
-
- gst_element_class_add_static_pad_template (element_class,
- &gst_gamma_sink_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_gamma_src_template);
-}
+G_DEFINE_TYPE (GstGamma, gst_gamma, GST_TYPE_VIDEO_FILTER);
static void
gst_gamma_class_init (GstGammaClass * g_class)
{
GObjectClass *gobject_class = (GObjectClass *) g_class;
+ GstElementClass *gstelement_class = (GstElementClass *) g_class;
GstBaseTransformClass *trans_class = (GstBaseTransformClass *) g_class;
+ GstVideoFilterClass *vfilter_class = (GstVideoFilterClass *) g_class;
GST_DEBUG_CATEGORY_INIT (gamma_debug, "gamma", 0, "gamma");
0.01, 10, DEFAULT_PROP_GAMMA,
GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS | G_PARAM_READWRITE));
- trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_gamma_set_caps);
- trans_class->transform_ip = GST_DEBUG_FUNCPTR (gst_gamma_transform_ip);
+ gst_element_class_set_details_simple (gstelement_class,
+ "Video gamma correction", "Filter/Effect/Video",
+ "Adjusts gamma on a video stream", "Arwed v. Merkatz <v.merkatz@gmx.net");
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_gamma_sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_gamma_src_template));
+
trans_class->before_transform =
GST_DEBUG_FUNCPTR (gst_gamma_before_transform);
+
+ vfilter_class->set_info = GST_DEBUG_FUNCPTR (gst_gamma_set_info);
+ vfilter_class->transform_frame_ip =
+ GST_DEBUG_FUNCPTR (gst_gamma_transform_frame_ip);
}
static void
-gst_gamma_init (GstGamma * gamma, GstGammaClass * g_class)
+gst_gamma_init (GstGamma * gamma)
{
/* properties */
gamma->gamma = DEFAULT_PROP_GAMMA;
}
static void
-gst_gamma_planar_yuv_ip (GstGamma * gamma, guint8 * data)
+gst_gamma_planar_yuv_ip (GstGamma * gamma, GstVideoFrame * frame)
{
gint i, j, height;
- gint width, row_stride, row_wrap;
+ gint width, stride, row_wrap;
const guint8 *table = gamma->gamma_table;
+ guint8 *data;
- data =
- data + gst_video_format_get_component_offset (gamma->format, 0,
- gamma->width, gamma->height);
-
- width = gst_video_format_get_component_width (gamma->format, 0, gamma->width);
- height = gst_video_format_get_component_height (gamma->format, 0,
- gamma->height);
- row_stride = gst_video_format_get_row_stride (gamma->format, 0, gamma->width);
- row_wrap = row_stride - width;
+ data = GST_VIDEO_FRAME_COMP_DATA (frame, 0);
+ stride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
+ width = GST_VIDEO_FRAME_COMP_WIDTH (frame, 0);
+ height = GST_VIDEO_FRAME_COMP_HEIGHT (frame, 0);
+ row_wrap = stride - width;
for (i = 0; i < height; i++) {
for (j = 0; j < width; j++) {
}
static void
-gst_gamma_packed_yuv_ip (GstGamma * gamma, guint8 * data)
+gst_gamma_packed_yuv_ip (GstGamma * gamma, GstVideoFrame * frame)
{
gint i, j, height;
- gint width, row_stride, row_wrap;
+ gint width, stride, row_wrap;
gint pixel_stride;
const guint8 *table = gamma->gamma_table;
+ guint8 *data;
- data = data + gst_video_format_get_component_offset (gamma->format, 0,
- gamma->width, gamma->height);
-
- width = gst_video_format_get_component_width (gamma->format, 0, gamma->width);
- height = gst_video_format_get_component_height (gamma->format, 0,
- gamma->height);
- row_stride = gst_video_format_get_row_stride (gamma->format, 0, gamma->width);
- pixel_stride = gst_video_format_get_pixel_stride (gamma->format, 0);
- row_wrap = row_stride - pixel_stride * width;
+ data = GST_VIDEO_FRAME_COMP_DATA (frame, 0);
+ stride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
+ width = GST_VIDEO_FRAME_COMP_WIDTH (frame, 0);
+ height = GST_VIDEO_FRAME_COMP_HEIGHT (frame, 0);
+ pixel_stride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
+ row_wrap = stride - pixel_stride * width;
for (i = 0; i < height; i++) {
for (j = 0; j < width; j++) {
#define APPLY_MATRIX(m,o,v1,v2,v3) ((m[o*4] * v1 + m[o*4+1] * v2 + m[o*4+2] * v3 + m[o*4+3]) >> 8)
static void
-gst_gamma_packed_rgb_ip (GstGamma * gamma, guint8 * data)
+gst_gamma_packed_rgb_ip (GstGamma * gamma, GstVideoFrame * frame)
{
gint i, j, height;
- gint width, row_stride, row_wrap;
+ gint width, stride, row_wrap;
gint pixel_stride;
const guint8 *table = gamma->gamma_table;
gint offsets[3];
gint r, g, b;
gint y, u, v;
+ guint8 *data;
- offsets[0] = gst_video_format_get_component_offset (gamma->format, 0,
- gamma->width, gamma->height);
- offsets[1] = gst_video_format_get_component_offset (gamma->format, 1,
- gamma->width, gamma->height);
- offsets[2] = gst_video_format_get_component_offset (gamma->format, 2,
- gamma->width, gamma->height);
+ data = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
+ stride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
+ width = GST_VIDEO_FRAME_COMP_WIDTH (frame, 0);
+ height = GST_VIDEO_FRAME_COMP_HEIGHT (frame, 0);
- width = gst_video_format_get_component_width (gamma->format, 0, gamma->width);
- height = gst_video_format_get_component_height (gamma->format, 0,
- gamma->height);
- row_stride = gst_video_format_get_row_stride (gamma->format, 0, gamma->width);
- pixel_stride = gst_video_format_get_pixel_stride (gamma->format, 0);
- row_wrap = row_stride - pixel_stride * width;
+ offsets[0] = GST_VIDEO_FRAME_COMP_OFFSET (frame, 0);
+ offsets[1] = GST_VIDEO_FRAME_COMP_OFFSET (frame, 1);
+ offsets[2] = GST_VIDEO_FRAME_COMP_OFFSET (frame, 2);
+
+ pixel_stride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
+ row_wrap = stride - pixel_stride * width;
for (i = 0; i < height; i++) {
for (j = 0; j < width; j++) {
}
static gboolean
-gst_gamma_set_caps (GstBaseTransform * base, GstCaps * incaps,
- GstCaps * outcaps)
+gst_gamma_set_info (GstVideoFilter * vfilter, GstCaps * incaps,
+ GstVideoInfo * in_info, GstCaps * outcaps, GstVideoInfo * out_info)
{
- GstGamma *gamma = GST_GAMMA (base);
+ GstGamma *gamma = GST_GAMMA (vfilter);
GST_DEBUG_OBJECT (gamma,
"setting caps: in %" GST_PTR_FORMAT " out %" GST_PTR_FORMAT, incaps,
outcaps);
- if (!gst_video_format_parse_caps (incaps, &gamma->format, &gamma->width,
- &gamma->height))
- goto invalid_caps;
-
- gamma->size =
- gst_video_format_get_size (gamma->format, gamma->width, gamma->height);
-
- switch (gamma->format) {
+ switch (GST_VIDEO_INFO_FORMAT (in_info)) {
case GST_VIDEO_FORMAT_I420:
case GST_VIDEO_FORMAT_YV12:
case GST_VIDEO_FORMAT_Y41B:
goto invalid_caps;
break;
}
-
return TRUE;
+ /* ERRORS */
invalid_caps:
- GST_ERROR_OBJECT (gamma, "Invalid caps: %" GST_PTR_FORMAT, incaps);
- return FALSE;
+ {
+ GST_ERROR_OBJECT (gamma, "Invalid caps: %" GST_PTR_FORMAT, incaps);
+ return FALSE;
+ }
}
static void
GST_TIME_ARGS (timestamp));
if (GST_CLOCK_TIME_IS_VALID (stream_time))
- gst_object_sync_values (G_OBJECT (gamma), stream_time);
+ gst_object_sync_values (GST_OBJECT (gamma), stream_time);
}
static GstFlowReturn
-gst_gamma_transform_ip (GstBaseTransform * base, GstBuffer * outbuf)
+gst_gamma_transform_frame_ip (GstVideoFilter * vfilter, GstVideoFrame * frame)
{
- GstGamma *gamma = GST_GAMMA (base);
- guint8 *data;
- guint size;
+ GstGamma *gamma = GST_GAMMA (vfilter);
if (!gamma->process)
goto not_negotiated;
- if (base->passthrough)
+ if (GST_BASE_TRANSFORM (vfilter)->passthrough)
goto done;
- data = GST_BUFFER_DATA (outbuf);
- size = GST_BUFFER_SIZE (outbuf);
-
- if (size != gamma->size)
- goto wrong_size;
-
GST_OBJECT_LOCK (gamma);
- gamma->process (gamma, data);
+ gamma->process (gamma, frame);
GST_OBJECT_UNLOCK (gamma);
done:
return GST_FLOW_OK;
/* ERRORS */
-wrong_size:
- {
- GST_ELEMENT_ERROR (gamma, STREAM, FORMAT,
- (NULL), ("Invalid buffer size %d, expected %d", size, gamma->size));
- return GST_FLOW_ERROR;
- }
not_negotiated:
{
GST_ERROR_OBJECT (gamma, "Not negotiated yet");
GstVideoFilter videofilter;
/* < private > */
-
- /* format */
- GstVideoFormat format;
- gint width;
- gint height;
- gint size;
-
/* properties */
gdouble gamma;
/* tables */
guint8 gamma_table[256];
- void (*process) (GstGamma *gamma, guint8 *data);
+ void (*process) (GstGamma *gamma, GstVideoFrame *frame);
};
struct _GstGammaClass
#include "gstvideobalance.h"
#include <string.h>
-#include <gst/controller/gstcontroller.h>
-#include <gst/interfaces/colorbalance.h>
+#include <gst/video/colorbalance.h>
GST_DEBUG_CATEGORY_STATIC (videobalance_debug);
#define GST_CAT_DEFAULT videobalance_debug
};
static GstStaticPadTemplate gst_video_balance_src_template =
- GST_STATIC_PAD_TEMPLATE ("src",
+GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("AYUV") ";"
- GST_VIDEO_CAPS_ARGB ";" GST_VIDEO_CAPS_BGRA ";"
- GST_VIDEO_CAPS_ABGR ";" GST_VIDEO_CAPS_RGBA ";"
- GST_VIDEO_CAPS_YUV ("Y444") ";"
- GST_VIDEO_CAPS_xRGB ";" GST_VIDEO_CAPS_RGBx ";"
- GST_VIDEO_CAPS_xBGR ";" GST_VIDEO_CAPS_BGRx ";"
- GST_VIDEO_CAPS_RGB ";" GST_VIDEO_CAPS_BGR ";"
- GST_VIDEO_CAPS_YUV ("Y42B") ";"
- GST_VIDEO_CAPS_YUV ("YUY2") ";"
- GST_VIDEO_CAPS_YUV ("UYVY") ";"
- GST_VIDEO_CAPS_YUV ("YVYU") ";"
- GST_VIDEO_CAPS_YUV ("I420") ";"
- GST_VIDEO_CAPS_YUV ("YV12") ";"
- GST_VIDEO_CAPS_YUV ("IYUV") ";" GST_VIDEO_CAPS_YUV ("Y41B")
- )
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ AYUV, "
+ "ARGB, BGRA, ABGR, RGBA, Y444, xRGB, RGBx, "
+ "xBGR, BGRx, RGB, BGR, Y42B, YUY2, UYVY, YVYU, "
+ "I420, YV12, IYUV, Y41B }"))
);
static GstStaticPadTemplate gst_video_balance_sink_template =
- GST_STATIC_PAD_TEMPLATE ("sink",
+GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("AYUV") ";"
- GST_VIDEO_CAPS_ARGB ";" GST_VIDEO_CAPS_BGRA ";"
- GST_VIDEO_CAPS_ABGR ";" GST_VIDEO_CAPS_RGBA ";"
- GST_VIDEO_CAPS_YUV ("Y444") ";"
- GST_VIDEO_CAPS_xRGB ";" GST_VIDEO_CAPS_RGBx ";"
- GST_VIDEO_CAPS_xBGR ";" GST_VIDEO_CAPS_BGRx ";"
- GST_VIDEO_CAPS_RGB ";" GST_VIDEO_CAPS_BGR ";"
- GST_VIDEO_CAPS_YUV ("Y42B") ";"
- GST_VIDEO_CAPS_YUV ("YUY2") ";"
- GST_VIDEO_CAPS_YUV ("UYVY") ";"
- GST_VIDEO_CAPS_YUV ("YVYU") ";"
- GST_VIDEO_CAPS_YUV ("I420") ";"
- GST_VIDEO_CAPS_YUV ("YV12") ";"
- GST_VIDEO_CAPS_YUV ("IYUV") ";" GST_VIDEO_CAPS_YUV ("Y41B")
- )
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ AYUV, "
+ "ARGB, BGRA, ABGR, RGBA, Y444, xRGB, RGBx, "
+ "xBGR, BGRx, RGB, BGR, Y42B, YUY2, UYVY, YVYU, "
+ "I420, YV12, IYUV, Y41B }"))
);
-static void gst_video_balance_colorbalance_init (GstColorBalanceClass * iface);
-static void gst_video_balance_interface_init (GstImplementsInterfaceClass *
- klass);
+static void gst_video_balance_colorbalance_init (GstColorBalanceInterface *
+ iface);
static void gst_video_balance_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void gst_video_balance_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
-static void
-_do_init (GType video_balance_type)
-{
- static const GInterfaceInfo iface_info = {
- (GInterfaceInitFunc) gst_video_balance_interface_init,
- NULL,
- NULL,
- };
- static const GInterfaceInfo colorbalance_info = {
- (GInterfaceInitFunc) gst_video_balance_colorbalance_init,
- NULL,
- NULL,
- };
-
- g_type_add_interface_static (video_balance_type,
- GST_TYPE_IMPLEMENTS_INTERFACE, &iface_info);
- g_type_add_interface_static (video_balance_type, GST_TYPE_COLOR_BALANCE,
- &colorbalance_info);
-}
-
-GST_BOILERPLATE_FULL (GstVideoBalance, gst_video_balance, GstVideoFilter,
- GST_TYPE_VIDEO_FILTER, _do_init);
+#define gst_video_balance_parent_class parent_class
+G_DEFINE_TYPE_WITH_CODE (GstVideoBalance, gst_video_balance,
+ GST_TYPE_VIDEO_FILTER,
+ G_IMPLEMENT_INTERFACE (GST_TYPE_COLOR_BALANCE,
+ gst_video_balance_colorbalance_init));
/*
* look-up tables (LUT).
}
static void
-gst_video_balance_planar_yuv (GstVideoBalance * videobalance, guint8 * data)
+gst_video_balance_planar_yuv (GstVideoBalance * videobalance,
+ GstVideoFrame * frame)
{
gint x, y;
guint8 *ydata;
guint8 *udata, *vdata;
gint ystride, ustride, vstride;
- GstVideoFormat format;
gint width, height;
gint width2, height2;
guint8 *tabley = videobalance->tabley;
guint8 **tableu = videobalance->tableu;
guint8 **tablev = videobalance->tablev;
- format = videobalance->format;
- width = videobalance->width;
- height = videobalance->height;
+ width = GST_VIDEO_FRAME_WIDTH (frame);
+ height = GST_VIDEO_FRAME_HEIGHT (frame);
- ydata =
- data + gst_video_format_get_component_offset (format, 0, width, height);
- ystride = gst_video_format_get_row_stride (format, 0, width);
+ ydata = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
+ ystride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
for (y = 0; y < height; y++) {
guint8 *yptr;
}
}
- width2 = gst_video_format_get_component_width (format, 1, width);
- height2 = gst_video_format_get_component_height (format, 1, height);
+ width2 = GST_VIDEO_FRAME_COMP_WIDTH (frame, 1);
+ height2 = GST_VIDEO_FRAME_COMP_HEIGHT (frame, 1);
- udata =
- data + gst_video_format_get_component_offset (format, 1, width, height);
- vdata =
- data + gst_video_format_get_component_offset (format, 2, width, height);
- ustride = gst_video_format_get_row_stride (format, 1, width);
- vstride = gst_video_format_get_row_stride (format, 1, width);
+ udata = GST_VIDEO_FRAME_PLANE_DATA (frame, 1);
+ vdata = GST_VIDEO_FRAME_PLANE_DATA (frame, 2);
+ ustride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 1);
+ vstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 2);
for (y = 0; y < height2; y++) {
guint8 *uptr, *vptr;
}
static void
-gst_video_balance_packed_yuv (GstVideoBalance * videobalance, guint8 * data)
+gst_video_balance_packed_yuv (GstVideoBalance * videobalance,
+ GstVideoFrame * frame)
{
- gint x, y;
- guint8 *ydata;
- guint8 *udata, *vdata;
- gint ystride, ustride, vstride;
+ gint x, y, stride;
+ guint8 *ydata, *udata, *vdata;
gint yoff, uoff, voff;
- GstVideoFormat format;
gint width, height;
gint width2, height2;
guint8 *tabley = videobalance->tabley;
guint8 **tableu = videobalance->tableu;
guint8 **tablev = videobalance->tablev;
- format = videobalance->format;
- width = videobalance->width;
- height = videobalance->height;
+ width = GST_VIDEO_FRAME_WIDTH (frame);
+ height = GST_VIDEO_FRAME_HEIGHT (frame);
- ydata =
- data + gst_video_format_get_component_offset (format, 0, width, height);
- ystride = gst_video_format_get_row_stride (format, 0, width);
- yoff = gst_video_format_get_pixel_stride (format, 0);
+ stride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
+ ydata = GST_VIDEO_FRAME_COMP_DATA (frame, 0);
+ yoff = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
for (y = 0; y < height; y++) {
guint8 *yptr;
- yptr = ydata + y * ystride;
+ yptr = ydata + y * stride;
for (x = 0; x < width; x++) {
*yptr = tabley[*yptr];
yptr += yoff;
}
}
- width2 = gst_video_format_get_component_width (format, 1, width);
- height2 = gst_video_format_get_component_height (format, 1, height);
+ width2 = GST_VIDEO_FRAME_COMP_WIDTH (frame, 1);
+ height2 = GST_VIDEO_FRAME_COMP_HEIGHT (frame, 1);
- udata =
- data + gst_video_format_get_component_offset (format, 1, width, height);
- vdata =
- data + gst_video_format_get_component_offset (format, 2, width, height);
- ustride = gst_video_format_get_row_stride (format, 1, width);
- vstride = gst_video_format_get_row_stride (format, 1, width);
- uoff = gst_video_format_get_pixel_stride (format, 1);
- voff = gst_video_format_get_pixel_stride (format, 2);
+ udata = GST_VIDEO_FRAME_COMP_DATA (frame, 1);
+ vdata = GST_VIDEO_FRAME_COMP_DATA (frame, 2);
+ uoff = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 1);
+ voff = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 2);
for (y = 0; y < height2; y++) {
guint8 *uptr, *vptr;
guint8 u1, v1;
- uptr = udata + y * ustride;
- vptr = vdata + y * vstride;
+ uptr = udata + y * stride;
+ vptr = vdata + y * stride;
for (x = 0; x < width2; x++) {
u1 = *uptr;
#define APPLY_MATRIX(m,o,v1,v2,v3) ((m[o*4] * v1 + m[o*4+1] * v2 + m[o*4+2] * v3 + m[o*4+3]) >> 8)
static void
-gst_video_balance_packed_rgb (GstVideoBalance * videobalance, guint8 * data)
+gst_video_balance_packed_rgb (GstVideoBalance * videobalance,
+ GstVideoFrame * frame)
{
gint i, j, height;
- gint width, row_stride, row_wrap;
+ gint width, stride, row_wrap;
gint pixel_stride;
+ guint8 *data;
gint offsets[3];
gint r, g, b;
gint y, u, v;
guint8 **tableu = videobalance->tableu;
guint8 **tablev = videobalance->tablev;
- offsets[0] = gst_video_format_get_component_offset (videobalance->format, 0,
- videobalance->width, videobalance->height);
- offsets[1] = gst_video_format_get_component_offset (videobalance->format, 1,
- videobalance->width, videobalance->height);
- offsets[2] = gst_video_format_get_component_offset (videobalance->format, 2,
- videobalance->width, videobalance->height);
-
- width =
- gst_video_format_get_component_width (videobalance->format, 0,
- videobalance->width);
- height =
- gst_video_format_get_component_height (videobalance->format, 0,
- videobalance->height);
- row_stride =
- gst_video_format_get_row_stride (videobalance->format, 0,
- videobalance->width);
- pixel_stride = gst_video_format_get_pixel_stride (videobalance->format, 0);
- row_wrap = row_stride - pixel_stride * width;
+ width = GST_VIDEO_FRAME_WIDTH (frame);
+ height = GST_VIDEO_FRAME_HEIGHT (frame);
+
+ offsets[0] = GST_VIDEO_FRAME_COMP_OFFSET (frame, 0);
+ offsets[1] = GST_VIDEO_FRAME_COMP_OFFSET (frame, 1);
+ offsets[2] = GST_VIDEO_FRAME_COMP_OFFSET (frame, 2);
+
+ data = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
+ stride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
+
+ pixel_stride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
+ row_wrap = stride - pixel_stride * width;
for (i = 0; i < height; i++) {
for (j = 0; j < width; j++) {
/* get notified of caps and plug in the correct process function */
static gboolean
-gst_video_balance_set_caps (GstBaseTransform * base, GstCaps * incaps,
- GstCaps * outcaps)
+gst_video_balance_set_info (GstVideoFilter * vfilter, GstCaps * incaps,
+ GstVideoInfo * in_info, GstCaps * outcaps, GstVideoInfo * out_info)
{
- GstVideoBalance *videobalance = GST_VIDEO_BALANCE (base);
+ GstVideoBalance *videobalance = GST_VIDEO_BALANCE (vfilter);
GST_DEBUG_OBJECT (videobalance,
"in %" GST_PTR_FORMAT " out %" GST_PTR_FORMAT, incaps, outcaps);
videobalance->process = NULL;
- if (!gst_video_format_parse_caps (incaps, &videobalance->format,
- &videobalance->width, &videobalance->height))
- goto invalid_caps;
-
- videobalance->size =
- gst_video_format_get_size (videobalance->format, videobalance->width,
- videobalance->height);
-
- switch (videobalance->format) {
+ switch (GST_VIDEO_INFO_FORMAT (in_info)) {
case GST_VIDEO_FORMAT_I420:
case GST_VIDEO_FORMAT_YV12:
case GST_VIDEO_FORMAT_Y41B:
videobalance->process = gst_video_balance_packed_rgb;
break;
default:
+ goto unknown_format;
break;
}
- return videobalance->process != NULL;
+ return TRUE;
-invalid_caps:
- GST_ERROR_OBJECT (videobalance, "Invalid caps: %" GST_PTR_FORMAT, incaps);
- return FALSE;
+ /* ERRORS */
+unknown_format:
+ {
+ GST_ERROR_OBJECT (videobalance, "unknown format %" GST_PTR_FORMAT, incaps);
+ return FALSE;
+ }
}
static void
GST_TIME_ARGS (timestamp));
if (GST_CLOCK_TIME_IS_VALID (stream_time))
- gst_object_sync_values (G_OBJECT (balance), stream_time);
+ gst_object_sync_values (GST_OBJECT (balance), stream_time);
}
static GstFlowReturn
-gst_video_balance_transform_ip (GstBaseTransform * base, GstBuffer * outbuf)
+gst_video_balance_transform_frame_ip (GstVideoFilter * vfilter,
+ GstVideoFrame * frame)
{
- GstVideoBalance *videobalance = GST_VIDEO_BALANCE (base);
- guint8 *data;
- guint size;
+ GstVideoBalance *videobalance = GST_VIDEO_BALANCE (vfilter);
if (!videobalance->process)
goto not_negotiated;
/* if no change is needed, we are done */
- if (base->passthrough)
+ if (GST_BASE_TRANSFORM (vfilter)->passthrough)
goto done;
- data = GST_BUFFER_DATA (outbuf);
- size = GST_BUFFER_SIZE (outbuf);
-
- if (size != videobalance->size)
- goto wrong_size;
-
GST_OBJECT_LOCK (videobalance);
- videobalance->process (videobalance, data);
+ videobalance->process (videobalance, frame);
GST_OBJECT_UNLOCK (videobalance);
done:
return GST_FLOW_OK;
/* ERRORS */
-wrong_size:
+not_negotiated:
{
- GST_ELEMENT_ERROR (videobalance, STREAM, FORMAT,
- (NULL), ("Invalid buffer size %d, expected %d", size,
- videobalance->size));
- return GST_FLOW_ERROR;
+ GST_ERROR_OBJECT (videobalance, "Not negotiated yet");
+ return GST_FLOW_NOT_NEGOTIATED;
}
-not_negotiated:
- GST_ERROR_OBJECT (videobalance, "Not negotiated yet");
- return GST_FLOW_NOT_NEGOTIATED;
-}
-
-static void
-gst_video_balance_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_set_details_simple (element_class, "Video balance",
- "Filter/Effect/Video",
- "Adjusts brightness, contrast, hue, saturation on a video stream",
- "David Schleef <ds@schleef.org>");
-
- gst_element_class_add_static_pad_template (element_class,
- &gst_video_balance_sink_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_video_balance_src_template);
}
static void
gst_video_balance_class_init (GstVideoBalanceClass * klass)
{
GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass;
+ GstVideoFilterClass *vfilter_class = (GstVideoFilterClass *) klass;
GST_DEBUG_CATEGORY_INIT (videobalance_debug, "videobalance", 0,
"videobalance");
DEFAULT_PROP_SATURATION,
GST_PARAM_CONTROLLABLE | G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
- trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_video_balance_set_caps);
- trans_class->transform_ip =
- GST_DEBUG_FUNCPTR (gst_video_balance_transform_ip);
+ gst_element_class_set_details_simple (gstelement_class, "Video balance",
+ "Filter/Effect/Video",
+ "Adjusts brightness, contrast, hue, saturation on a video stream",
+ "David Schleef <ds@schleef.org>");
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_video_balance_sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_video_balance_src_template));
+
trans_class->before_transform =
GST_DEBUG_FUNCPTR (gst_video_balance_before_transform);
+
+ vfilter_class->set_info = GST_DEBUG_FUNCPTR (gst_video_balance_set_info);
+ vfilter_class->transform_frame_ip =
+ GST_DEBUG_FUNCPTR (gst_video_balance_transform_frame_ip);
}
static void
-gst_video_balance_init (GstVideoBalance * videobalance,
- GstVideoBalanceClass * klass)
+gst_video_balance_init (GstVideoBalance * videobalance)
{
const gchar *channels[4] = { "HUE", "SATURATION",
"BRIGHTNESS", "CONTRAST"
}
}
-static gboolean
-gst_video_balance_interface_supported (GstImplementsInterface * iface,
- GType type)
-{
- g_assert (type == GST_TYPE_COLOR_BALANCE);
- return TRUE;
-}
-
-static void
-gst_video_balance_interface_init (GstImplementsInterfaceClass * klass)
-{
- klass->supported = gst_video_balance_interface_supported;
-}
-
static const GList *
gst_video_balance_colorbalance_list_channels (GstColorBalance * balance)
{
}
static void
-gst_video_balance_colorbalance_init (GstColorBalanceClass * iface)
+gst_video_balance_colorbalance_init (GstColorBalanceInterface * iface)
{
GST_COLOR_BALANCE_TYPE (iface) = GST_COLOR_BALANCE_SOFTWARE;
iface->list_channels = gst_video_balance_colorbalance_list_channels;
gdouble hue;
gdouble saturation;
- /* format */
- GstVideoFormat format;
- gint width;
- gint height;
- gint size;
-
/* tables */
guint8 tabley[256];
guint8 *tableu[256];
guint8 *tablev[256];
- void (*process) (GstVideoBalance *balance, guint8 *data);
+ void (*process) (GstVideoBalance *balance, GstVideoFrame *frame);
};
struct _GstVideoBalanceClass {
#include <string.h>
#include <gst/gst.h>
-#include <gst/controller/gstcontroller.h>
#include <gst/video/video.h>
/* GstVideoFlip properties */
#define GST_CAT_DEFAULT video_flip_debug
static GstStaticPadTemplate gst_video_flip_src_template =
- GST_STATIC_PAD_TEMPLATE ("src",
+GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("AYUV") ";"
- GST_VIDEO_CAPS_ARGB ";" GST_VIDEO_CAPS_BGRA ";"
- GST_VIDEO_CAPS_ABGR ";" GST_VIDEO_CAPS_RGBA ";"
- GST_VIDEO_CAPS_YUV ("Y444") ";"
- GST_VIDEO_CAPS_xRGB ";" GST_VIDEO_CAPS_RGBx ";"
- GST_VIDEO_CAPS_xBGR ";" GST_VIDEO_CAPS_BGRx ";"
- GST_VIDEO_CAPS_RGB ";" GST_VIDEO_CAPS_BGR ";"
- GST_VIDEO_CAPS_YUV ("I420") ";"
- GST_VIDEO_CAPS_YUV ("YV12") ";" GST_VIDEO_CAPS_YUV ("IYUV") ";"
- GST_VIDEO_CAPS_YUV ("YUY2") ";" GST_VIDEO_CAPS_YUV ("UYVY") ";"
- GST_VIDEO_CAPS_YUV ("YVYU")
-
- )
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ AYUV, "
+ "ARGB, BGRA, ABGR, RGBA, Y444, xRGB, RGBx,xBGR, BGRx, "
+ "RGB, BGR, I420, YV12, IYUV, YUY2, UYVY, YVYU }"))
);
static GstStaticPadTemplate gst_video_flip_sink_template =
- GST_STATIC_PAD_TEMPLATE ("sink",
+GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("AYUV") ";"
- GST_VIDEO_CAPS_ARGB ";" GST_VIDEO_CAPS_BGRA ";"
- GST_VIDEO_CAPS_ABGR ";" GST_VIDEO_CAPS_RGBA ";"
- GST_VIDEO_CAPS_YUV ("Y444") ";"
- GST_VIDEO_CAPS_xRGB ";" GST_VIDEO_CAPS_RGBx ";"
- GST_VIDEO_CAPS_xBGR ";" GST_VIDEO_CAPS_BGRx ";"
- GST_VIDEO_CAPS_RGB ";" GST_VIDEO_CAPS_BGR ";"
- GST_VIDEO_CAPS_YUV ("I420") ";"
- GST_VIDEO_CAPS_YUV ("YV12") ";" GST_VIDEO_CAPS_YUV ("IYUV") ";"
- GST_VIDEO_CAPS_YUV ("YUY2") ";" GST_VIDEO_CAPS_YUV ("UYVY") ";"
- GST_VIDEO_CAPS_YUV ("YVYU")
- )
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ AYUV, "
+ "ARGB, BGRA, ABGR, RGBA, Y444, xRGB, RGBx,xBGR, BGRx, "
+ "RGB, BGR, I420, YV12, IYUV, YUY2, UYVY, YVYU }"))
);
#define GST_TYPE_VIDEO_FLIP_METHOD (gst_video_flip_method_get_type())
return video_flip_method_type;
}
-GST_BOILERPLATE (GstVideoFlip, gst_video_flip, GstVideoFilter,
- GST_TYPE_VIDEO_FILTER);
+#define gst_video_flip_parent_class parent_class
+G_DEFINE_TYPE (GstVideoFlip, gst_video_flip, GST_TYPE_VIDEO_FILTER);
static GstCaps *
gst_video_flip_transform_caps (GstBaseTransform * trans,
- GstPadDirection direction, GstCaps * caps)
+ GstPadDirection direction, GstCaps * caps, GstCaps * filter)
{
GstVideoFlip *videoflip = GST_VIDEO_FLIP (trans);
GstCaps *ret;
GST_DEBUG_OBJECT (videoflip, "transformed %" GST_PTR_FORMAT " to %"
GST_PTR_FORMAT, caps, ret);
- return ret;
-}
-
-static gboolean
-gst_video_flip_get_unit_size (GstBaseTransform * btrans, GstCaps * caps,
- guint * size)
-{
- GstVideoFormat format;
- gint width, height;
+ if (filter) {
+ GstCaps *intersection;
- if (!gst_video_format_parse_caps (caps, &format, &width, &height))
- return FALSE;
-
- *size = gst_video_format_get_size (format, width, height);
-
- GST_DEBUG_OBJECT (btrans, "our frame size is %d bytes (%dx%d)", *size,
- width, height);
+ GST_DEBUG_OBJECT (videoflip, "Using filter caps %" GST_PTR_FORMAT, filter);
+ intersection =
+ gst_caps_intersect_full (filter, ret, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (ret);
+ ret = intersection;
+ GST_DEBUG_OBJECT (videoflip, "Intersection %" GST_PTR_FORMAT, ret);
+ }
- return TRUE;
+ return ret;
}
static void
-gst_video_flip_planar_yuv (GstVideoFlip * videoflip, guint8 * dest,
- const guint8 * src)
+gst_video_flip_planar_yuv (GstVideoFlip * videoflip, GstVideoFrame * dest,
+ const GstVideoFrame * src)
{
gint x, y;
guint8 const *s;
guint8 *d;
- GstVideoFormat format = videoflip->format;
- gint sw = videoflip->from_width;
- gint sh = videoflip->from_height;
- gint dw = videoflip->to_width;
- gint dh = videoflip->to_height;
gint src_y_stride, src_u_stride, src_v_stride;
- gint src_y_offset, src_u_offset, src_v_offset;
gint src_y_height, src_u_height, src_v_height;
gint src_y_width, src_u_width, src_v_width;
gint dest_y_stride, dest_u_stride, dest_v_stride;
- gint dest_y_offset, dest_u_offset, dest_v_offset;
gint dest_y_height, dest_u_height, dest_v_height;
gint dest_y_width, dest_u_width, dest_v_width;
- src_y_stride = gst_video_format_get_row_stride (format, 0, sw);
- src_u_stride = gst_video_format_get_row_stride (format, 1, sw);
- src_v_stride = gst_video_format_get_row_stride (format, 2, sw);
+ src_y_stride = GST_VIDEO_FRAME_PLANE_STRIDE (src, 0);
+ src_u_stride = GST_VIDEO_FRAME_PLANE_STRIDE (src, 1);
+ src_v_stride = GST_VIDEO_FRAME_PLANE_STRIDE (src, 2);
- dest_y_stride = gst_video_format_get_row_stride (format, 0, dw);
- dest_u_stride = gst_video_format_get_row_stride (format, 1, dw);
- dest_v_stride = gst_video_format_get_row_stride (format, 2, dw);
+ dest_y_stride = GST_VIDEO_FRAME_PLANE_STRIDE (dest, 0);
+ dest_u_stride = GST_VIDEO_FRAME_PLANE_STRIDE (dest, 1);
+ dest_v_stride = GST_VIDEO_FRAME_PLANE_STRIDE (dest, 2);
- src_y_offset = gst_video_format_get_component_offset (format, 0, sw, sh);
- src_u_offset = gst_video_format_get_component_offset (format, 1, sw, sh);
- src_v_offset = gst_video_format_get_component_offset (format, 2, sw, sh);
+ src_y_width = GST_VIDEO_FRAME_COMP_WIDTH (src, 0);
+ src_u_width = GST_VIDEO_FRAME_COMP_WIDTH (src, 1);
+ src_v_width = GST_VIDEO_FRAME_COMP_WIDTH (src, 2);
- dest_y_offset = gst_video_format_get_component_offset (format, 0, dw, dh);
- dest_u_offset = gst_video_format_get_component_offset (format, 1, dw, dh);
- dest_v_offset = gst_video_format_get_component_offset (format, 2, dw, dh);
+ dest_y_width = GST_VIDEO_FRAME_COMP_WIDTH (dest, 0);
+ dest_u_width = GST_VIDEO_FRAME_COMP_WIDTH (dest, 1);
+ dest_v_width = GST_VIDEO_FRAME_COMP_WIDTH (dest, 2);
- src_y_width = gst_video_format_get_component_width (format, 0, sw);
- src_u_width = gst_video_format_get_component_width (format, 1, sw);
- src_v_width = gst_video_format_get_component_width (format, 2, sw);
+ src_y_height = GST_VIDEO_FRAME_COMP_HEIGHT (src, 0);
+ src_u_height = GST_VIDEO_FRAME_COMP_HEIGHT (src, 1);
+ src_v_height = GST_VIDEO_FRAME_COMP_HEIGHT (src, 2);
- dest_y_width = gst_video_format_get_component_width (format, 0, dw);
- dest_u_width = gst_video_format_get_component_width (format, 1, dw);
- dest_v_width = gst_video_format_get_component_width (format, 2, dw);
-
- src_y_height = gst_video_format_get_component_height (format, 0, sh);
- src_u_height = gst_video_format_get_component_height (format, 1, sh);
- src_v_height = gst_video_format_get_component_height (format, 2, sh);
-
- dest_y_height = gst_video_format_get_component_height (format, 0, dh);
- dest_u_height = gst_video_format_get_component_height (format, 1, dh);
- dest_v_height = gst_video_format_get_component_height (format, 2, dh);
+ dest_y_height = GST_VIDEO_FRAME_COMP_HEIGHT (dest, 0);
+ dest_u_height = GST_VIDEO_FRAME_COMP_HEIGHT (dest, 1);
+ dest_v_height = GST_VIDEO_FRAME_COMP_HEIGHT (dest, 2);
switch (videoflip->method) {
case GST_VIDEO_FLIP_METHOD_90R:
/* Flip Y */
- s = src + src_y_offset;
- d = dest + dest_y_offset;
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 0);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 0);
for (y = 0; y < dest_y_height; y++) {
for (x = 0; x < dest_y_width; x++) {
d[y * dest_y_stride + x] =
}
}
/* Flip U */
- s = src + src_u_offset;
- d = dest + dest_u_offset;
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 1);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 1);
for (y = 0; y < dest_u_height; y++) {
for (x = 0; x < dest_u_width; x++) {
d[y * dest_u_stride + x] =
}
}
/* Flip V */
- s = src + src_v_offset;
- d = dest + dest_v_offset;
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 2);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 2);
for (y = 0; y < dest_v_height; y++) {
for (x = 0; x < dest_v_width; x++) {
d[y * dest_v_stride + x] =
break;
case GST_VIDEO_FLIP_METHOD_90L:
/* Flip Y */
- s = src + src_y_offset;
- d = dest + dest_y_offset;
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 0);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 0);
for (y = 0; y < dest_y_height; y++) {
for (x = 0; x < dest_y_width; x++) {
d[y * dest_y_stride + x] =
}
}
/* Flip U */
- s = src + src_u_offset;
- d = dest + dest_u_offset;
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 1);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 1);
for (y = 0; y < dest_u_height; y++) {
for (x = 0; x < dest_u_width; x++) {
d[y * dest_u_stride + x] =
}
}
/* Flip V */
- s = src + src_v_offset;
- d = dest + dest_v_offset;
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 2);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 2);
for (y = 0; y < dest_v_height; y++) {
for (x = 0; x < dest_v_width; x++) {
d[y * dest_v_stride + x] =
break;
case GST_VIDEO_FLIP_METHOD_180:
/* Flip Y */
- s = src + src_y_offset;
- d = dest + dest_y_offset;
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 0);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 0);
for (y = 0; y < dest_y_height; y++) {
for (x = 0; x < dest_y_width; x++) {
d[y * dest_y_stride + x] =
}
}
/* Flip U */
- s = src + src_u_offset;
- d = dest + dest_u_offset;
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 1);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 1);
for (y = 0; y < dest_u_height; y++) {
for (x = 0; x < dest_u_width; x++) {
d[y * dest_u_stride + x] =
}
}
/* Flip V */
- s = src + src_v_offset;
- d = dest + dest_v_offset;
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 2);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 2);
for (y = 0; y < dest_v_height; y++) {
for (x = 0; x < dest_v_width; x++) {
d[y * dest_v_stride + x] =
break;
case GST_VIDEO_FLIP_METHOD_HORIZ:
/* Flip Y */
- s = src + src_y_offset;
- d = dest + dest_y_offset;
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 0);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 0);
for (y = 0; y < dest_y_height; y++) {
for (x = 0; x < dest_y_width; x++) {
d[y * dest_y_stride + x] =
}
}
/* Flip U */
- s = src + src_u_offset;
- d = dest + dest_u_offset;
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 1);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 1);
for (y = 0; y < dest_u_height; y++) {
for (x = 0; x < dest_u_width; x++) {
d[y * dest_u_stride + x] =
}
}
/* Flip V */
- s = src + src_v_offset;
- d = dest + dest_v_offset;
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 2);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 2);
for (y = 0; y < dest_v_height; y++) {
for (x = 0; x < dest_v_width; x++) {
d[y * dest_v_stride + x] =
break;
case GST_VIDEO_FLIP_METHOD_VERT:
/* Flip Y */
- s = src + src_y_offset;
- d = dest + dest_y_offset;
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 0);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 0);
for (y = 0; y < dest_y_height; y++) {
for (x = 0; x < dest_y_width; x++) {
d[y * dest_y_stride + x] =
}
}
/* Flip U */
- s = src + src_u_offset;
- d = dest + dest_u_offset;
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 1);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 1);
for (y = 0; y < dest_u_height; y++) {
for (x = 0; x < dest_u_width; x++) {
d[y * dest_u_stride + x] =
}
}
/* Flip V */
- s = src + src_v_offset;
- d = dest + dest_v_offset;
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 2);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 2);
for (y = 0; y < dest_v_height; y++) {
for (x = 0; x < dest_v_width; x++) {
d[y * dest_v_stride + x] =
break;
case GST_VIDEO_FLIP_METHOD_TRANS:
/* Flip Y */
- s = src + src_y_offset;
- d = dest + dest_y_offset;
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 0);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 0);
for (y = 0; y < dest_y_height; y++) {
for (x = 0; x < dest_y_width; x++) {
d[y * dest_y_stride + x] = s[x * src_y_stride + y];
}
}
/* Flip U */
- s = src + src_u_offset;
- d = dest + dest_u_offset;
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 1);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 1);
for (y = 0; y < dest_u_height; y++) {
for (x = 0; x < dest_u_width; x++) {
d[y * dest_u_stride + x] = s[x * src_u_stride + y];
}
}
/* Flip V */
- s = src + src_v_offset;
- d = dest + dest_v_offset;
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 2);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 2);
for (y = 0; y < dest_u_height; y++) {
for (x = 0; x < dest_u_width; x++) {
d[y * dest_v_stride + x] = s[x * src_v_stride + y];
break;
case GST_VIDEO_FLIP_METHOD_OTHER:
/* Flip Y */
- s = src + src_y_offset;
- d = dest + dest_y_offset;
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 0);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 0);
for (y = 0; y < dest_y_height; y++) {
for (x = 0; x < dest_y_width; x++) {
d[y * dest_y_stride + x] =
}
}
/* Flip U */
- s = src + src_u_offset;
- d = dest + dest_u_offset;
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 1);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 1);
for (y = 0; y < dest_u_height; y++) {
for (x = 0; x < dest_u_width; x++) {
d[y * dest_u_stride + x] =
}
}
/* Flip V */
- s = src + src_v_offset;
- d = dest + dest_v_offset;
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 2);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 2);
for (y = 0; y < dest_v_height; y++) {
for (x = 0; x < dest_v_width; x++) {
d[y * dest_v_stride + x] =
}
static void
-gst_video_flip_packed_simple (GstVideoFlip * videoflip, guint8 * dest,
- const guint8 * src)
+gst_video_flip_packed_simple (GstVideoFlip * videoflip, GstVideoFrame * dest,
+ const GstVideoFrame * src)
{
gint x, y, z;
- guint8 const *s = src;
- guint8 *d = dest;
- GstVideoFormat format = videoflip->format;
- gint sw = videoflip->from_width;
- gint sh = videoflip->from_height;
- gint dw = videoflip->to_width;
- gint dh = videoflip->to_height;
+ guint8 const *s;
+ guint8 *d;
+ gint sw = GST_VIDEO_FRAME_WIDTH (src);
+ gint sh = GST_VIDEO_FRAME_HEIGHT (src);
+ gint dw = GST_VIDEO_FRAME_WIDTH (dest);
+ gint dh = GST_VIDEO_FRAME_HEIGHT (dest);
gint src_stride, dest_stride;
gint bpp;
- src_stride = gst_video_format_get_row_stride (format, 0, sw);
- dest_stride = gst_video_format_get_row_stride (format, 0, dw);
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 0);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 0);
+
+ src_stride = GST_VIDEO_FRAME_PLANE_STRIDE (src, 0);
+ dest_stride = GST_VIDEO_FRAME_PLANE_STRIDE (dest, 0);
/* This is only true for non-subsampled formats! */
- bpp = gst_video_format_get_pixel_stride (format, 0);
+ bpp = GST_VIDEO_FRAME_COMP_PSTRIDE (src, 0);
switch (videoflip->method) {
case GST_VIDEO_FLIP_METHOD_90R:
static void
-gst_video_flip_y422 (GstVideoFlip * videoflip, guint8 * dest,
- const guint8 * src)
+gst_video_flip_y422 (GstVideoFlip * videoflip, GstVideoFrame * dest,
+ const GstVideoFrame * src)
{
gint x, y;
- guint8 const *s = src;
- guint8 *d = dest;
- GstVideoFormat format = videoflip->format;
- gint sw = videoflip->from_width;
- gint sh = videoflip->from_height;
- gint dw = videoflip->to_width;
- gint dh = videoflip->to_height;
+ guint8 const *s;
+ guint8 *d;
+ gint sw = GST_VIDEO_FRAME_WIDTH (src);
+ gint sh = GST_VIDEO_FRAME_HEIGHT (src);
+ gint dw = GST_VIDEO_FRAME_WIDTH (dest);
+ gint dh = GST_VIDEO_FRAME_HEIGHT (dest);
gint src_stride, dest_stride;
gint bpp;
gint y_offset;
gint v_offset;
gint y_stride;
- src_stride = gst_video_format_get_row_stride (format, 0, sw);
- dest_stride = gst_video_format_get_row_stride (format, 0, dw);
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 0);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 0);
- y_offset = gst_video_format_get_component_offset (format, 0, sw, sh);
- u_offset = gst_video_format_get_component_offset (format, 1, sw, sh);
- v_offset = gst_video_format_get_component_offset (format, 2, sw, sh);
- y_stride = gst_video_format_get_pixel_stride (format, 0);
+ src_stride = GST_VIDEO_FRAME_PLANE_STRIDE (src, 0);
+ dest_stride = GST_VIDEO_FRAME_PLANE_STRIDE (dest, 0);
+
+ y_offset = GST_VIDEO_FRAME_COMP_OFFSET (src, 0);
+ u_offset = GST_VIDEO_FRAME_COMP_OFFSET (src, 1);
+ v_offset = GST_VIDEO_FRAME_COMP_OFFSET (src, 2);
+ y_stride = GST_VIDEO_FRAME_COMP_PSTRIDE (src, 0);
bpp = y_stride;
switch (videoflip->method) {
static gboolean
-gst_video_flip_set_caps (GstBaseTransform * btrans, GstCaps * incaps,
- GstCaps * outcaps)
+gst_video_flip_set_info (GstVideoFilter * vfilter, GstCaps * incaps,
+ GstVideoInfo * in_info, GstCaps * outcaps, GstVideoInfo * out_info)
{
- GstVideoFlip *vf = GST_VIDEO_FLIP (btrans);
- GstVideoFormat in_format, out_format;
+ GstVideoFlip *vf = GST_VIDEO_FLIP (vfilter);
gboolean ret = FALSE;
vf->process = NULL;
- if (!gst_video_format_parse_caps (incaps, &in_format, &vf->from_width,
- &vf->from_height)
- || !gst_video_format_parse_caps (outcaps, &out_format, &vf->to_width,
- &vf->to_height))
+ if (GST_VIDEO_INFO_FORMAT (in_info) != GST_VIDEO_INFO_FORMAT (out_info))
goto invalid_caps;
- if (in_format != out_format)
- goto invalid_caps;
- vf->format = in_format;
-
/* Check that they are correct */
switch (vf->method) {
case GST_VIDEO_FLIP_METHOD_90R:
case GST_VIDEO_FLIP_METHOD_90L:
case GST_VIDEO_FLIP_METHOD_TRANS:
case GST_VIDEO_FLIP_METHOD_OTHER:
- if ((vf->from_width != vf->to_height) ||
- (vf->from_height != vf->to_width)) {
+ if ((in_info->width != out_info->height) ||
+ (in_info->height != out_info->width)) {
GST_ERROR_OBJECT (vf, "we are inverting width and height but caps "
- "are not correct : %dx%d to %dx%d", vf->from_width,
- vf->from_height, vf->to_width, vf->to_height);
+ "are not correct : %dx%d to %dx%d", in_info->width,
+ in_info->height, out_info->width, out_info->height);
goto beach;
}
break;
case GST_VIDEO_FLIP_METHOD_180:
case GST_VIDEO_FLIP_METHOD_HORIZ:
case GST_VIDEO_FLIP_METHOD_VERT:
- if ((vf->from_width != vf->to_width) ||
- (vf->from_height != vf->to_height)) {
+ if ((in_info->width != out_info->width) ||
+ (in_info->height != out_info->height)) {
GST_ERROR_OBJECT (vf, "we are keeping width and height but caps "
- "are not correct : %dx%d to %dx%d", vf->from_width,
- vf->from_height, vf->to_width, vf->to_height);
+ "are not correct : %dx%d to %dx%d", in_info->width,
+ in_info->height, out_info->width, out_info->height);
goto beach;
}
break;
ret = TRUE;
- switch (vf->format) {
+ switch (GST_VIDEO_INFO_FORMAT (in_info)) {
case GST_VIDEO_FORMAT_I420:
case GST_VIDEO_FORMAT_YV12:
case GST_VIDEO_FORMAT_Y444:
GST_TIME_ARGS (timestamp));
if (GST_CLOCK_TIME_IS_VALID (stream_time))
- gst_object_sync_values (G_OBJECT (videoflip), stream_time);
+ gst_object_sync_values (GST_OBJECT (videoflip), stream_time);
}
static GstFlowReturn
-gst_video_flip_transform (GstBaseTransform * trans, GstBuffer * in,
- GstBuffer * out)
+gst_video_flip_transform_frame (GstVideoFilter * vfilter,
+ GstVideoFrame * in_frame, GstVideoFrame * out_frame)
{
- GstVideoFlip *videoflip = GST_VIDEO_FLIP (trans);
- guint8 *dest;
- const guint8 *src;
+ GstVideoFlip *videoflip = GST_VIDEO_FLIP (vfilter);
if (G_UNLIKELY (videoflip->process == NULL))
goto not_negotiated;
- src = GST_BUFFER_DATA (in);
- dest = GST_BUFFER_DATA (out);
-
- GST_LOG_OBJECT (videoflip, "videoflip: flipping %dx%d to %dx%d (%s)",
- videoflip->from_width, videoflip->from_height, videoflip->to_width,
- videoflip->to_height, video_flip_methods[videoflip->method].value_nick);
+ GST_LOG_OBJECT (videoflip, "videoflip: flipping (%s)",
+ video_flip_methods[videoflip->method].value_nick);
GST_OBJECT_LOCK (videoflip);
- videoflip->process (videoflip, dest, src);
+ videoflip->process (videoflip, out_frame, in_frame);
GST_OBJECT_UNLOCK (videoflip);
return GST_FLOW_OK;
not_negotiated:
- GST_ERROR_OBJECT (videoflip, "Not negotiated yet");
- return GST_FLOW_NOT_NEGOTIATED;
+ {
+ GST_ERROR_OBJECT (videoflip, "Not negotiated yet");
+ return GST_FLOW_NOT_NEGOTIATED;
+ }
}
static gboolean
gdouble new_x, new_y, x, y;
GstStructure *structure;
gboolean ret;
+ GstVideoInfo *out_info = &GST_VIDEO_FILTER (trans)->out_info;
GST_DEBUG_OBJECT (vf, "handling %s event", GST_EVENT_TYPE_NAME (event));
switch (vf->method) {
case GST_VIDEO_FLIP_METHOD_90R:
new_x = y;
- new_y = vf->to_width - x;
+ new_y = out_info->width - x;
break;
case GST_VIDEO_FLIP_METHOD_90L:
- new_x = vf->to_height - y;
+ new_x = out_info->height - y;
new_y = x;
break;
case GST_VIDEO_FLIP_METHOD_OTHER:
- new_x = vf->to_height - y;
- new_y = vf->to_width - x;
+ new_x = out_info->height - y;
+ new_y = out_info->width - x;
break;
case GST_VIDEO_FLIP_METHOD_TRANS:
new_x = y;
new_y = x;
break;
case GST_VIDEO_FLIP_METHOD_180:
- new_x = vf->to_width - x;
- new_y = vf->to_height - y;
+ new_x = out_info->width - x;
+ new_y = out_info->height - y;
break;
case GST_VIDEO_FLIP_METHOD_HORIZ:
- new_x = vf->to_width - x;
+ new_x = out_info->width - x;
new_y = y;
break;
case GST_VIDEO_FLIP_METHOD_VERT:
new_x = x;
- new_y = vf->to_height - y;
+ new_y = out_info->height - y;
break;
default:
new_x = x;
}
static void
-gst_video_flip_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_set_details_simple (element_class, "Video flipper",
- "Filter/Effect/Video",
- "Flips and rotates video", "David Schleef <ds@schleef.org>");
-
- gst_element_class_add_static_pad_template (element_class,
- &gst_video_flip_sink_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_video_flip_src_template);
-}
-
-static void
gst_video_flip_class_init (GstVideoFlipClass * klass)
{
GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass;
+ GstVideoFilterClass *vfilter_class = (GstVideoFilterClass *) klass;
GST_DEBUG_CATEGORY_INIT (video_flip_debug, "videoflip", 0, "videoflip");
GST_TYPE_VIDEO_FLIP_METHOD, PROP_METHOD_DEFAULT,
GST_PARAM_CONTROLLABLE | G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ gst_element_class_set_details_simple (gstelement_class, "Video flipper",
+ "Filter/Effect/Video",
+ "Flips and rotates video", "David Schleef <ds@schleef.org>");
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_video_flip_sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_video_flip_src_template));
+
trans_class->transform_caps =
GST_DEBUG_FUNCPTR (gst_video_flip_transform_caps);
- trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_video_flip_set_caps);
- trans_class->get_unit_size = GST_DEBUG_FUNCPTR (gst_video_flip_get_unit_size);
- trans_class->transform = GST_DEBUG_FUNCPTR (gst_video_flip_transform);
trans_class->before_transform =
GST_DEBUG_FUNCPTR (gst_video_flip_before_transform);
trans_class->src_event = GST_DEBUG_FUNCPTR (gst_video_flip_src_event);
+
+ vfilter_class->set_info = GST_DEBUG_FUNCPTR (gst_video_flip_set_info);
+ vfilter_class->transform_frame =
+ GST_DEBUG_FUNCPTR (gst_video_flip_transform_frame);
}
static void
-gst_video_flip_init (GstVideoFlip * videoflip, GstVideoFlipClass * klass)
+gst_video_flip_init (GstVideoFlip * videoflip)
{
videoflip->method = PROP_METHOD_DEFAULT;
gst_base_transform_set_passthrough (GST_BASE_TRANSFORM (videoflip), TRUE);
*/
struct _GstVideoFlip {
GstVideoFilter videofilter;
-
+
/* < private > */
- GstVideoFormat format;
- gint from_width, from_height;
- gint to_width, to_height;
-
GstVideoFlipMethod method;
- void (*process) (GstVideoFlip *videoflip, guint8 *dest, const guint8 *src);
+ void (*process) (GstVideoFlip *videoflip, GstVideoFrame *dest, const GstVideoFrame *src);
};
struct _GstVideoFlipClass {
#endif
#include <gst/gst.h>
-#include <gst/controller/gstcontroller.h>
#include "gstgamma.h"
#include "gstvideoflip.h"
static gboolean
plugin_init (GstPlugin * plugin)
{
- gst_controller_init (NULL, NULL);
-
return (gst_element_register (plugin, "gamma", GST_RANK_NONE, GST_TYPE_GAMMA)
&& gst_element_register (plugin, "videobalance", GST_RANK_NONE,
GST_TYPE_VIDEO_BALANCE)
include $(top_srcdir)/common/orc.mak
libgstvideomixer_la_SOURCES = \
- videomixer.c \
blend.c \
videomixer2.c
nodist_libgstvideomixer_la_SOURCES = $(ORC_NODIST_SOURCES)
libgstvideomixer_la_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) \
- $(GST_BASE_CFLAGS) $(GST_CONTROLLER_CFLAGS) $(GST_CFLAGS) $(ORC_CFLAGS)
+ $(GST_BASE_CFLAGS) $(GST_CFLAGS) $(ORC_CFLAGS)
libgstvideomixer_la_LIBADD = $(GST_PLUGINS_BASE_LIBS) \
-lgstvideo-@GST_MAJORMINOR@ \
- $(GST_BASE_LIBS) $(GST_CONTROLLER_LIBS) $(GST_LIBS) $(ORC_LIBS)
+ $(GST_BASE_LIBS) $(GST_LIBS) $(ORC_LIBS)
libgstvideomixer_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
libgstvideomixer_la_LIBTOOLFLAGS = --tag=disable-static
# headers we need but don't want installed
noinst_HEADERS = \
- videomixer.h \
- videomixerpad.h \
blend.h \
videomixer2.h \
videomixer2pad.h
/* A32 is for AYUV, ARGB and BGRA */
#define BLEND_A32(name, method, LOOP) \
static void \
-method##_ ##name (const guint8 * src, gint xpos, gint ypos, \
- gint src_width, gint src_height, gdouble src_alpha, \
- guint8 * dest, gint dest_width, gint dest_height) \
+method##_ ##name (GstVideoFrame * srcframe, gint xpos, gint ypos, \
+ gdouble src_alpha, GstVideoFrame * destframe) \
{ \
guint s_alpha; \
gint src_stride, dest_stride; \
- \
- src_stride = src_width * 4; \
- dest_stride = dest_width * 4; \
+ gint dest_width, dest_height; \
+ guint8 *src, *dest; \
+ gint src_width, src_height; \
+ \
+ src_width = GST_VIDEO_FRAME_WIDTH (srcframe); \
+ src_height = GST_VIDEO_FRAME_HEIGHT (srcframe); \
+ src = GST_VIDEO_FRAME_PLANE_DATA (srcframe, 0); \
+ src_stride = GST_VIDEO_FRAME_COMP_STRIDE (srcframe, 0); \
+ dest = GST_VIDEO_FRAME_PLANE_DATA (destframe, 0); \
+ dest_stride = GST_VIDEO_FRAME_COMP_STRIDE (destframe, 0); \
+ dest_width = GST_VIDEO_FRAME_COMP_WIDTH (destframe, 0); \
+ dest_height = GST_VIDEO_FRAME_COMP_HEIGHT (destframe, 0); \
\
s_alpha = CLAMP ((gint) (src_alpha * 256), 0, 256); \
\
#define A32_CHECKER_C(name, RGB, A, C1, C2, C3) \
static void \
-fill_checker_##name##_c (guint8 * dest, gint width, gint height) \
+fill_checker_##name##_c (GstVideoFrame * frame) \
{ \
gint i, j; \
gint val; \
static const gint tab[] = { 80, 160, 80, 160 }; \
+ gint width, height; \
+ guint8 *dest; \
+ \
+ dest = GST_VIDEO_FRAME_PLANE_DATA (frame, 0); \
+ width = GST_VIDEO_FRAME_COMP_WIDTH (frame, 0); \
+ height = GST_VIDEO_FRAME_COMP_HEIGHT (frame, 0); \
\
if (!RGB) { \
for (i = 0; i < height; i++) { \
#define A32_COLOR(name, RGB, A, C1, C2, C3) \
static void \
-fill_color_##name (guint8 * dest, gint width, gint height, gint Y, gint U, gint V) \
+fill_color_##name (GstVideoFrame * frame, gint Y, gint U, gint V) \
{ \
gint c1, c2, c3; \
guint32 val; \
+ gint width, height; \
+ guint8 *dest; \
+ \
+ dest = GST_VIDEO_FRAME_PLANE_DATA (frame, 0); \
+ width = GST_VIDEO_FRAME_COMP_WIDTH (frame, 0); \
+ height = GST_VIDEO_FRAME_COMP_HEIGHT (frame, 0); \
\
if (RGB) { \
c1 = YUV_TO_R (Y, U, V); \
} \
\
static void \
-blend_##format_name (const guint8 * src, gint xpos, gint ypos, \
- gint src_width, gint src_height, gdouble src_alpha, \
- guint8 * dest, gint dest_width, gint dest_height) \
+blend_##format_name (GstVideoFrame * srcframe, gint xpos, gint ypos, \
+ gdouble src_alpha, GstVideoFrame * destframe) \
{ \
const guint8 *b_src; \
guint8 *b_dest; \
- gint b_src_width = src_width; \
- gint b_src_height = src_height; \
+ gint b_src_width; \
+ gint b_src_height; \
gint xoffset = 0; \
gint yoffset = 0; \
gint src_comp_rowstride, dest_comp_rowstride; \
gint src_comp_width; \
gint comp_ypos, comp_xpos; \
gint comp_yoffset, comp_xoffset; \
+ gint dest_width, dest_height; \
+ const GstVideoFormatInfo *info; \
+ gint src_width, src_height; \
+ \
+ src_width = GST_VIDEO_FRAME_WIDTH (srcframe); \
+ src_height = GST_VIDEO_FRAME_HEIGHT (srcframe); \
+ \
+ info = srcframe->info.finfo; \
+ dest_width = GST_VIDEO_FRAME_WIDTH (destframe); \
+ dest_height = GST_VIDEO_FRAME_WIDTH (destframe); \
\
xpos = x_round (xpos); \
ypos = y_round (ypos); \
\
+ b_src_width = src_width; \
+ b_src_height = src_height; \
+ \
/* adjust src pointers for negative sizes */ \
if (xpos < 0) { \
xoffset = -xpos; \
} \
\
/* First mix Y, then U, then V */ \
- b_src = src + gst_video_format_get_component_offset (format_enum, 0, src_width, src_height); \
- b_dest = dest + gst_video_format_get_component_offset (format_enum, 0, dest_width, dest_height); \
- src_comp_rowstride = gst_video_format_get_row_stride (format_enum, 0, src_width); \
- dest_comp_rowstride = gst_video_format_get_row_stride (format_enum, 0, dest_width); \
- src_comp_height = gst_video_format_get_component_height (format_enum, 0, b_src_height); \
- src_comp_width = gst_video_format_get_component_width (format_enum, 0, b_src_width); \
- comp_xpos = (xpos == 0) ? 0 : gst_video_format_get_component_width (format_enum, 0, xpos); \
- comp_ypos = (ypos == 0) ? 0 : gst_video_format_get_component_height (format_enum, 0, ypos); \
- comp_xoffset = (xoffset == 0) ? 0 : gst_video_format_get_component_width (format_enum, 0, xoffset); \
- comp_yoffset = (yoffset == 0) ? 0 : gst_video_format_get_component_height (format_enum, 0, yoffset); \
+ b_src = GST_VIDEO_FRAME_COMP_DATA (srcframe, 0); \
+ b_dest = GST_VIDEO_FRAME_COMP_DATA (destframe, 0); \
+ src_comp_rowstride = GST_VIDEO_FRAME_COMP_STRIDE (srcframe, 0); \
+ dest_comp_rowstride = GST_VIDEO_FRAME_COMP_STRIDE (destframe, 0); \
+ src_comp_width = GST_VIDEO_FORMAT_INFO_SCALE_WIDTH(info, 0, b_src_width); \
+ src_comp_height = GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT(info, 0, b_src_height); \
+ comp_xpos = (xpos == 0) ? 0 : GST_VIDEO_FORMAT_INFO_SCALE_WIDTH (info, 0, xpos); \
+ comp_ypos = (ypos == 0) ? 0 : GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT (info, 0, ypos); \
+ comp_xoffset = (xoffset == 0) ? 0 : GST_VIDEO_FORMAT_INFO_SCALE_WIDTH (info, 0, xoffset); \
+ comp_yoffset = (yoffset == 0) ? 0 : GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT (info, 0, yoffset); \
_blend_##format_name (b_src + comp_xoffset + comp_yoffset * src_comp_rowstride, \
b_dest + comp_xpos + comp_ypos * dest_comp_rowstride, \
src_comp_rowstride, \
dest_comp_rowstride, src_comp_width, src_comp_height, \
src_alpha); \
\
- b_src = src + gst_video_format_get_component_offset (format_enum, 1, src_width, src_height); \
- b_dest = dest + gst_video_format_get_component_offset (format_enum, 1, dest_width, dest_height); \
- src_comp_rowstride = gst_video_format_get_row_stride (format_enum, 1, src_width); \
- dest_comp_rowstride = gst_video_format_get_row_stride (format_enum, 1, dest_width); \
- src_comp_height = gst_video_format_get_component_height (format_enum, 1, b_src_height); \
- src_comp_width = gst_video_format_get_component_width (format_enum, 1, b_src_width); \
- comp_xpos = (xpos == 0) ? 0 : gst_video_format_get_component_width (format_enum, 1, xpos); \
- comp_ypos = (ypos == 0) ? 0 : gst_video_format_get_component_height (format_enum, 1, ypos); \
- comp_xoffset = (xoffset == 0) ? 0 : gst_video_format_get_component_width (format_enum, 1, xoffset); \
- comp_yoffset = (yoffset == 0) ? 0 : gst_video_format_get_component_height (format_enum, 1, yoffset); \
+ b_src = GST_VIDEO_FRAME_COMP_DATA (srcframe, 1); \
+ b_dest = GST_VIDEO_FRAME_COMP_DATA (destframe, 1); \
+ src_comp_rowstride = GST_VIDEO_FRAME_COMP_STRIDE (srcframe, 1); \
+ dest_comp_rowstride = GST_VIDEO_FRAME_COMP_STRIDE (destframe, 1); \
+ src_comp_width = GST_VIDEO_FORMAT_INFO_SCALE_WIDTH(info, 1, b_src_width); \
+ src_comp_height = GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT(info, 1, b_src_height); \
+ comp_xpos = (xpos == 0) ? 0 : GST_VIDEO_FORMAT_INFO_SCALE_WIDTH (info, 1, xpos); \
+ comp_ypos = (ypos == 0) ? 0 : GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT (info, 1, ypos); \
+ comp_xoffset = (xoffset == 0) ? 0 : GST_VIDEO_FORMAT_INFO_SCALE_WIDTH (info, 1, xoffset); \
+ comp_yoffset = (yoffset == 0) ? 0 : GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT (info, 1, yoffset); \
_blend_##format_name (b_src + comp_xoffset + comp_yoffset * src_comp_rowstride, \
b_dest + comp_xpos + comp_ypos * dest_comp_rowstride, \
src_comp_rowstride, \
dest_comp_rowstride, src_comp_width, src_comp_height, \
src_alpha); \
\
- b_src = src + gst_video_format_get_component_offset (format_enum, 2, src_width, src_height); \
- b_dest = dest + gst_video_format_get_component_offset (format_enum, 2, dest_width, dest_height); \
- src_comp_rowstride = gst_video_format_get_row_stride (format_enum, 2, src_width); \
- dest_comp_rowstride = gst_video_format_get_row_stride (format_enum, 2, dest_width); \
- src_comp_height = gst_video_format_get_component_height (format_enum, 2, b_src_height); \
- src_comp_width = gst_video_format_get_component_width (format_enum, 2, b_src_width); \
- comp_xpos = (xpos == 0) ? 0 : gst_video_format_get_component_width (format_enum, 2, xpos); \
- comp_ypos = (ypos == 0) ? 0 : gst_video_format_get_component_height (format_enum, 2, ypos); \
- comp_xoffset = (xoffset == 0) ? 0 : gst_video_format_get_component_width (format_enum, 2, xoffset); \
- comp_yoffset = (yoffset == 0) ? 0 : gst_video_format_get_component_height (format_enum, 2, yoffset); \
+ b_src = GST_VIDEO_FRAME_COMP_DATA (srcframe, 2); \
+ b_dest = GST_VIDEO_FRAME_COMP_DATA (destframe, 2); \
+ src_comp_rowstride = GST_VIDEO_FRAME_COMP_STRIDE (srcframe, 2); \
+ dest_comp_rowstride = GST_VIDEO_FRAME_COMP_STRIDE (destframe, 2); \
+ src_comp_width = GST_VIDEO_FORMAT_INFO_SCALE_WIDTH(info, 2, b_src_width); \
+ src_comp_height = GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT(info, 2, b_src_height); \
+ comp_xpos = (xpos == 0) ? 0 : GST_VIDEO_FORMAT_INFO_SCALE_WIDTH (info, 2, xpos); \
+ comp_ypos = (ypos == 0) ? 0 : GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT (info, 2, ypos); \
+ comp_xoffset = (xoffset == 0) ? 0 : GST_VIDEO_FORMAT_INFO_SCALE_WIDTH (info, 2, xoffset); \
+ comp_yoffset = (yoffset == 0) ? 0 : GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT (info, 2, yoffset); \
_blend_##format_name (b_src + comp_xoffset + comp_yoffset * src_comp_rowstride, \
b_dest + comp_xpos + comp_ypos * dest_comp_rowstride, \
src_comp_rowstride, \
#define PLANAR_YUV_FILL_CHECKER(format_name, format_enum, MEMSET) \
static void \
-fill_checker_##format_name (guint8 * dest, gint width, gint height) \
+fill_checker_##format_name (GstVideoFrame * frame) \
{ \
gint i, j; \
static const int tab[] = { 80, 160, 80, 160 }; \
gint comp_width, comp_height; \
gint rowstride; \
\
- p = dest + gst_video_format_get_component_offset (format_enum, 0, width, height); \
- comp_width = gst_video_format_get_component_width (format_enum, 0, width); \
- comp_height = gst_video_format_get_component_height (format_enum, 0, height); \
- rowstride = gst_video_format_get_row_stride (format_enum, 0, width); \
+ p = GST_VIDEO_FRAME_COMP_DATA (frame, 0); \
+ comp_width = GST_VIDEO_FRAME_COMP_WIDTH (frame, 0); \
+ comp_height = GST_VIDEO_FRAME_COMP_HEIGHT (frame, 0); \
+ rowstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0); \
\
for (i = 0; i < comp_height; i++) { \
for (j = 0; j < comp_width; j++) { \
p += rowstride - comp_width; \
} \
\
- p = dest + gst_video_format_get_component_offset (format_enum, 1, width, height); \
- comp_width = gst_video_format_get_component_width (format_enum, 1, width); \
- comp_height = gst_video_format_get_component_height (format_enum, 1, height); \
- rowstride = gst_video_format_get_row_stride (format_enum, 1, width); \
+ p = GST_VIDEO_FRAME_COMP_DATA (frame, 1); \
+ comp_width = GST_VIDEO_FRAME_COMP_WIDTH (frame, 1); \
+ comp_height = GST_VIDEO_FRAME_COMP_HEIGHT (frame, 1); \
+ rowstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 1); \
\
for (i = 0; i < comp_height; i++) { \
MEMSET (p, 0x80, comp_width); \
p += rowstride; \
} \
\
- p = dest + gst_video_format_get_component_offset (format_enum, 2, width, height); \
- comp_width = gst_video_format_get_component_width (format_enum, 2, width); \
- comp_height = gst_video_format_get_component_height (format_enum, 2, height); \
- rowstride = gst_video_format_get_row_stride (format_enum, 2, width); \
+ p = GST_VIDEO_FRAME_COMP_DATA (frame, 2); \
+ comp_width = GST_VIDEO_FRAME_COMP_WIDTH (frame, 2); \
+ comp_height = GST_VIDEO_FRAME_COMP_HEIGHT (frame, 2); \
+ rowstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 2); \
\
for (i = 0; i < comp_height; i++) { \
MEMSET (p, 0x80, comp_width); \
#define PLANAR_YUV_FILL_COLOR(format_name,format_enum,MEMSET) \
static void \
-fill_color_##format_name (guint8 * dest, gint width, gint height, \
+fill_color_##format_name (GstVideoFrame * frame, \
gint colY, gint colU, gint colV) \
{ \
guint8 *p; \
gint rowstride; \
gint i; \
\
- p = dest + gst_video_format_get_component_offset (format_enum, 0, width, height); \
- comp_width = gst_video_format_get_component_width (format_enum, 0, width); \
- comp_height = gst_video_format_get_component_height (format_enum, 0, height); \
- rowstride = gst_video_format_get_row_stride (format_enum, 0, width); \
+ p = GST_VIDEO_FRAME_COMP_DATA (frame, 0); \
+ comp_width = GST_VIDEO_FRAME_COMP_WIDTH (frame, 0); \
+ comp_height = GST_VIDEO_FRAME_COMP_HEIGHT (frame, 0); \
+ rowstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0); \
\
for (i = 0; i < comp_height; i++) { \
MEMSET (p, colY, comp_width); \
p += rowstride; \
} \
\
- p = dest + gst_video_format_get_component_offset (format_enum, 1, width, height); \
- comp_width = gst_video_format_get_component_width (format_enum, 1, width); \
- comp_height = gst_video_format_get_component_height (format_enum, 1, height); \
- rowstride = gst_video_format_get_row_stride (format_enum, 1, width); \
+ p = GST_VIDEO_FRAME_COMP_DATA (frame, 1); \
+ comp_width = GST_VIDEO_FRAME_COMP_WIDTH (frame, 1); \
+ comp_height = GST_VIDEO_FRAME_COMP_HEIGHT (frame, 1); \
+ rowstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 1); \
\
for (i = 0; i < comp_height; i++) { \
MEMSET (p, colU, comp_width); \
p += rowstride; \
} \
\
- p = dest + gst_video_format_get_component_offset (format_enum, 2, width, height); \
- comp_width = gst_video_format_get_component_width (format_enum, 2, width); \
- comp_height = gst_video_format_get_component_height (format_enum, 2, height); \
- rowstride = gst_video_format_get_row_stride (format_enum, 2, width); \
+ p = GST_VIDEO_FRAME_COMP_DATA (frame, 2); \
+ comp_width = GST_VIDEO_FRAME_COMP_WIDTH (frame, 2); \
+ comp_height = GST_VIDEO_FRAME_COMP_HEIGHT (frame, 2); \
+ rowstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 2); \
\
for (i = 0; i < comp_height; i++) { \
MEMSET (p, colV, comp_width); \
#define RGB_BLEND(name, bpp, MEMCPY, BLENDLOOP) \
static void \
-blend_##name (const guint8 * src, gint xpos, gint ypos, \
- gint src_width, gint src_height, gdouble src_alpha, \
- guint8 * dest, gint dest_width, gint dest_height) \
+blend_##name (GstVideoFrame * srcframe, gint xpos, gint ypos, \
+ gdouble src_alpha, GstVideoFrame * destframe) \
{ \
gint b_alpha; \
gint i; \
gint src_stride, dest_stride; \
+ gint dest_width, dest_height; \
+ guint8 *dest, *src; \
+ gint src_width, src_height; \
+ \
+ src_width = GST_VIDEO_FRAME_WIDTH (srcframe); \
+ src_height = GST_VIDEO_FRAME_HEIGHT (srcframe); \
+ \
+ src = GST_VIDEO_FRAME_PLANE_DATA (srcframe, 0); \
+ dest = GST_VIDEO_FRAME_PLANE_DATA (destframe, 0); \
\
- src_stride = GST_ROUND_UP_4 (src_width * bpp); \
- dest_stride = GST_ROUND_UP_4 (dest_width * bpp); \
+ dest_width = GST_VIDEO_FRAME_WIDTH (destframe); \
+ dest_height = GST_VIDEO_FRAME_HEIGHT (destframe); \
+ \
+ src_stride = GST_VIDEO_FRAME_COMP_STRIDE (srcframe, 0); \
+ dest_stride = GST_VIDEO_FRAME_COMP_STRIDE (destframe, 0); \
\
b_alpha = CLAMP ((gint) (src_alpha * 256), 0, 256); \
\
#define RGB_FILL_CHECKER_C(name, bpp, r, g, b) \
static void \
-fill_checker_##name##_c (guint8 * dest, gint width, gint height) \
+fill_checker_##name##_c (GstVideoFrame * frame) \
{ \
gint i, j; \
static const int tab[] = { 80, 160, 80, 160 }; \
- gint dest_add = GST_ROUND_UP_4 (width * bpp) - width * bpp; \
+ gint stride, dest_add, width, height; \
+ guint8 *dest; \
+ \
+ width = GST_VIDEO_FRAME_WIDTH (frame); \
+ height = GST_VIDEO_FRAME_HEIGHT (frame); \
+ dest = GST_VIDEO_FRAME_PLANE_DATA (frame, 0); \
+ stride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0); \
+ dest_add = stride - width * bpp; \
\
for (i = 0; i < height; i++) { \
for (j = 0; j < width; j++) { \
#define RGB_FILL_COLOR(name, bpp, MEMSET_RGB) \
static void \
-fill_color_##name (guint8 * dest, gint width, gint height, \
+fill_color_##name (GstVideoFrame * frame, \
gint colY, gint colU, gint colV) \
{ \
gint red, green, blue; \
gint i; \
- gint dest_stride = GST_ROUND_UP_4 (width * bpp); \
+ gint dest_stride; \
+ gint width, height; \
+ guint8 *dest; \
+ \
+ width = GST_VIDEO_FRAME_WIDTH (frame); \
+ height = GST_VIDEO_FRAME_HEIGHT (frame); \
+ dest = GST_VIDEO_FRAME_PLANE_DATA (frame, 0); \
+ dest_stride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0); \
\
red = YUV_TO_R (colY, colU, colV); \
green = YUV_TO_G (colY, colU, colV); \
#define PACKED_422_BLEND(name, MEMCPY, BLENDLOOP) \
static void \
-blend_##name (const guint8 * src, gint xpos, gint ypos, \
- gint src_width, gint src_height, gdouble src_alpha, \
- guint8 * dest, gint dest_width, gint dest_height) \
+blend_##name (GstVideoFrame * srcframe, gint xpos, gint ypos, \
+ gdouble src_alpha, GstVideoFrame * destframe) \
{ \
gint b_alpha; \
gint i; \
gint src_stride, dest_stride; \
+ gint dest_width, dest_height; \
+ guint8 *src, *dest; \
+ gint src_width, src_height; \
+ \
+ src_width = GST_VIDEO_FRAME_WIDTH (srcframe); \
+ src_height = GST_VIDEO_FRAME_HEIGHT (srcframe); \
+ \
+ dest_width = GST_VIDEO_FRAME_WIDTH (destframe); \
+ dest_height = GST_VIDEO_FRAME_HEIGHT (destframe); \
+ \
+ src = GST_VIDEO_FRAME_PLANE_DATA (srcframe, 0); \
+ dest = GST_VIDEO_FRAME_PLANE_DATA (destframe, 0); \
\
- src_stride = GST_ROUND_UP_4 (src_width * 2); \
- dest_stride = GST_ROUND_UP_4 (dest_width * 2); \
+ src_stride = GST_VIDEO_FRAME_COMP_STRIDE (srcframe, 0); \
+ dest_stride = GST_VIDEO_FRAME_COMP_STRIDE (destframe, 0); \
\
b_alpha = CLAMP ((gint) (src_alpha * 256), 0, 256); \
\
#define PACKED_422_FILL_CHECKER_C(name, Y1, U, Y2, V) \
static void \
-fill_checker_##name##_c (guint8 * dest, gint width, gint height) \
+fill_checker_##name##_c (GstVideoFrame * frame) \
{ \
gint i, j; \
static const int tab[] = { 80, 160, 80, 160 }; \
gint dest_add; \
+ gint width, height; \
+ guint8 *dest; \
\
+ width = GST_VIDEO_FRAME_WIDTH (frame); \
width = GST_ROUND_UP_2 (width); \
- dest_add = GST_ROUND_UP_4 (width * 2) - width * 2; \
+ height = GST_VIDEO_FRAME_HEIGHT (frame); \
+ dest = GST_VIDEO_FRAME_COMP_DATA (frame, 0); \
+ dest_add = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0) - width * 2; \
width /= 2; \
\
for (i = 0; i < height; i++) { \
#define PACKED_422_FILL_COLOR(name, Y1, U, Y2, V) \
static void \
-fill_color_##name (guint8 * dest, gint width, gint height, \
+fill_color_##name (GstVideoFrame * frame, \
gint colY, gint colU, gint colV) \
{ \
gint i; \
gint dest_stride; \
guint32 val; \
+ gint width, height; \
+ guint8 *dest; \
\
+ width = GST_VIDEO_FRAME_WIDTH (frame); \
width = GST_ROUND_UP_2 (width); \
- dest_stride = GST_ROUND_UP_4 (width * 2); \
+ height = GST_VIDEO_FRAME_HEIGHT (frame); \
+ dest = GST_VIDEO_FRAME_COMP_DATA (frame, 0); \
+ dest_stride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0); \
width /= 2; \
\
val = GUINT32_FROM_BE ((colY << Y1) | (colY << Y2) | (colU << U) | (colV << V)); \
#define __BLEND_H__
#include <gst/gst.h>
+#include <gst/video/video.h>
-typedef void (*BlendFunction) (const guint8 * src, gint xpos, gint ypos, gint src_width, gint src_height, gdouble src_alpha, guint8 * dest, gint dest_width, gint dest_height);
-typedef void (*FillCheckerFunction) (guint8 * dest, gint width, gint height);
-typedef void (*FillColorFunction) (guint8 * dest, gint width, gint height, gint c1, gint c2, gint c3);
+typedef void (*BlendFunction) (GstVideoFrame *srcframe, gint xpos, gint ypos, gdouble src_alpha, GstVideoFrame * destframe);
+typedef void (*FillCheckerFunction) (GstVideoFrame * frame);
+typedef void (*FillColorFunction) (GstVideoFrame * frame, gint c1, gint c2, gint c3);
extern BlendFunction gst_video_mixer_blend_argb;
extern BlendFunction gst_video_mixer_blend_bgra;
+++ /dev/null
-/* Generic video mixer plugin
- * Copyright (C) 2004 Wim Taymans <wim@fluendo.com>
- *
- * This library is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Library General Public
- * License as published by the Free Software Foundation; either
- * version 2 of the License, or (at your option) any later version.
- *
- * This library is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Library General Public License for more details.
- *
- * You should have received a copy of the GNU Library General Public
- * License along with this library; if not, write to the
- * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
- * Boston, MA 02111-1307, USA.
- */
-
-/**
- * SECTION:element-videomixer
- *
- * Videomixer can accept AYUV, ARGB and BGRA video streams. For each of the requested
- * sink pads it will compare the incoming geometry and framerate to define the
- * output parameters. Indeed output video frames will have the geometry of the
- * biggest incoming video stream and the framerate of the fastest incoming one.
- *
- * All sink pads must be either AYUV, ARGB or BGRA, but a mixture of them is not
- * supported. The src pad will have the same colorspace as the sinks.
- * No colorspace conversion is done.
- *
- * Individual parameters for each input stream can be configured on the
- * #GstVideoMixerPad.
- *
- * <refsect2>
- * <title>Sample pipelines</title>
- * |[
- * gst-launch-0.10 \
- * videotestsrc pattern=1 ! \
- * video/x-raw-yuv,format=\(fourcc\)AYUV,framerate=\(fraction\)10/1,width=100,height=100 ! \
- * videobox border-alpha=0 top=-70 bottom=-70 right=-220 ! \
- * videomixer name=mix sink_0::alpha=0.7 sink_1::alpha=0.5 ! \
- * ffmpegcolorspace ! xvimagesink \
- * videotestsrc ! \
- * video/x-raw-yuv,format=\(fourcc\)AYUV,framerate=\(fraction\)5/1,width=320,height=240 ! mix.
- * ]| A pipeline to demonstrate videomixer used together with videobox.
- * This should show a 320x240 pixels video test source with some transparency
- * showing the background checker pattern. Another video test source with just
- * the snow pattern of 100x100 pixels is overlayed on top of the first one on
- * the left vertically centered with a small transparency showing the first
- * video test source behind and the checker pattern under it. Note that the
- * framerate of the output video is 10 frames per second.
- * |[
- * gst-launch videotestsrc pattern=1 ! \
- * video/x-raw-rgb, framerate=\(fraction\)10/1, width=100, height=100 ! \
- * videomixer name=mix ! ffmpegcolorspace ! ximagesink \
- * videotestsrc ! \
- * video/x-raw-rgb, framerate=\(fraction\)5/1, width=320, height=240 ! mix.
- * ]| A pipeline to demostrate bgra mixing. (This does not demonstrate alpha blending).
- * |[
- * gst-launch videotestsrc pattern=1 ! \
- * video/x-raw-yuv,format =\(fourcc\)I420, framerate=\(fraction\)10/1, width=100, height=100 ! \
- * videomixer name=mix ! ffmpegcolorspace ! ximagesink \
- * videotestsrc ! \
- * video/x-raw-yuv,format=\(fourcc\)I420, framerate=\(fraction\)5/1, width=320, height=240 ! mix.
- * ]| A pipeline to test I420
- * |[
- * gst-launch videotestsrc pattern="snow" ! video/x-raw-yuv, framerate=\(fraction\)10/1, width=200, height=150 ! videomixer name=mix sink_1::xpos=20 sink_1::ypos=20 sink_1::alpha=0.5 ! ffmpegcolorspace ! xvimagesink videotestsrc ! video/x-raw-yuv, framerate=\(fraction\)10/1, width=640, height=360 ! mix.
- * ]| Set position and alpha on the mixer using #GstVideoMixerPad properties.
- * </refsect2>
- */
-
-#ifdef HAVE_CONFIG_H
-#include "config.h"
-#endif
-
-#include <gst/gst.h>
-#include <gst/base/gstcollectpads.h>
-#include <gst/controller/gstcontroller.h>
-#include <gst/video/video.h>
-
-#ifdef HAVE_STDLIB_H
-#include <stdlib.h>
-#endif
-#ifdef HAVE_STRING_H
-#include <string.h>
-#endif
-
-#include "videomixer.h"
-#include "videomixer2.h"
-
-#include "gst/glib-compat-private.h"
-
-#ifdef DISABLE_ORC
-#define orc_memset memset
-#else
-#include <orc/orcfunctions.h>
-#endif
-
-GST_DEBUG_CATEGORY_STATIC (gst_videomixer_debug);
-#define GST_CAT_DEFAULT gst_videomixer_debug
-
-#define GST_VIDEO_MIXER_GET_STATE_LOCK(mix) \
- (GST_VIDEO_MIXER(mix)->state_lock)
-#define GST_VIDEO_MIXER_STATE_LOCK(mix) \
- (g_mutex_lock(GST_VIDEO_MIXER_GET_STATE_LOCK (mix)))
-#define GST_VIDEO_MIXER_STATE_UNLOCK(mix) \
- (g_mutex_unlock(GST_VIDEO_MIXER_GET_STATE_LOCK (mix)))
-
-static GType gst_videomixer_get_type (void);
-
-static void gst_videomixer_pad_get_property (GObject * object, guint prop_id,
- GValue * value, GParamSpec * pspec);
-static void gst_videomixer_pad_set_property (GObject * object, guint prop_id,
- const GValue * value, GParamSpec * pspec);
-
-static gboolean gst_videomixer_src_event (GstPad * pad, GstEvent * event);
-static gboolean gst_videomixer_sink_event (GstPad * pad, GstEvent * event);
-
-static void gst_videomixer_sort_pads (GstVideoMixer * mix);
-
-#define DEFAULT_PAD_ZORDER 0
-#define DEFAULT_PAD_XPOS 0
-#define DEFAULT_PAD_YPOS 0
-#define DEFAULT_PAD_ALPHA 1.0
-enum
-{
- PROP_PAD_0,
- PROP_PAD_ZORDER,
- PROP_PAD_XPOS,
- PROP_PAD_YPOS,
- PROP_PAD_ALPHA
-};
-
-GType gst_videomixer_pad_get_type (void);
-G_DEFINE_TYPE (GstVideoMixerPad, gst_videomixer_pad, GST_TYPE_PAD);
-
-static void
-gst_videomixer_pad_class_init (GstVideoMixerPadClass * klass)
-{
- GObjectClass *gobject_class = (GObjectClass *) klass;
-
- gobject_class->set_property = gst_videomixer_pad_set_property;
- gobject_class->get_property = gst_videomixer_pad_get_property;
-
- g_object_class_install_property (gobject_class, PROP_PAD_ZORDER,
- g_param_spec_uint ("zorder", "Z-Order", "Z Order of the picture",
- 0, 10000, DEFAULT_PAD_ZORDER,
- G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
- g_object_class_install_property (gobject_class, PROP_PAD_XPOS,
- g_param_spec_int ("xpos", "X Position", "X Position of the picture",
- G_MININT, G_MAXINT, DEFAULT_PAD_XPOS,
- G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
- g_object_class_install_property (gobject_class, PROP_PAD_YPOS,
- g_param_spec_int ("ypos", "Y Position", "Y Position of the picture",
- G_MININT, G_MAXINT, DEFAULT_PAD_YPOS,
- G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
- g_object_class_install_property (gobject_class, PROP_PAD_ALPHA,
- g_param_spec_double ("alpha", "Alpha", "Alpha of the picture", 0.0, 1.0,
- DEFAULT_PAD_ALPHA,
- G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
-}
-
-static void
-gst_videomixer_pad_get_property (GObject * object, guint prop_id,
- GValue * value, GParamSpec * pspec)
-{
- GstVideoMixerPad *pad = GST_VIDEO_MIXER_PAD (object);
-
- switch (prop_id) {
- case PROP_PAD_ZORDER:
- g_value_set_uint (value, pad->zorder);
- break;
- case PROP_PAD_XPOS:
- g_value_set_int (value, pad->xpos);
- break;
- case PROP_PAD_YPOS:
- g_value_set_int (value, pad->ypos);
- break;
- case PROP_PAD_ALPHA:
- g_value_set_double (value, pad->alpha);
- break;
- default:
- G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
- break;
- }
-}
-
-static void
-gst_videomixer_pad_set_property (GObject * object, guint prop_id,
- const GValue * value, GParamSpec * pspec)
-{
- GstVideoMixerPad *pad = GST_VIDEO_MIXER_PAD (object);
- GstVideoMixer *mix = GST_VIDEO_MIXER (gst_pad_get_parent (GST_PAD (pad)));
-
- switch (prop_id) {
- case PROP_PAD_ZORDER:
- GST_VIDEO_MIXER_STATE_LOCK (mix);
- pad->zorder = g_value_get_uint (value);
- gst_videomixer_sort_pads (mix);
- GST_VIDEO_MIXER_STATE_UNLOCK (mix);
- break;
- case PROP_PAD_XPOS:
- pad->xpos = g_value_get_int (value);
- break;
- case PROP_PAD_YPOS:
- pad->ypos = g_value_get_int (value);
- break;
- case PROP_PAD_ALPHA:
- pad->alpha = g_value_get_double (value);
- break;
- default:
- G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
- break;
- }
-
- gst_object_unref (mix);
-}
-
-static void
-gst_videomixer_update_qos (GstVideoMixer * mix, gdouble proportion,
- GstClockTimeDiff diff, GstClockTime timestamp)
-{
- GST_DEBUG_OBJECT (mix,
- "Updating QoS: proportion %lf, diff %s%" GST_TIME_FORMAT ", timestamp %"
- GST_TIME_FORMAT, proportion, (diff < 0) ? "-" : "",
- GST_TIME_ARGS (ABS (diff)), GST_TIME_ARGS (timestamp));
-
- GST_OBJECT_LOCK (mix);
- mix->proportion = proportion;
- if (G_LIKELY (timestamp != GST_CLOCK_TIME_NONE)) {
- if (G_UNLIKELY (diff > 0))
- mix->earliest_time =
- timestamp + 2 * diff + gst_util_uint64_scale_int (GST_SECOND,
- mix->fps_d, mix->fps_n);
- else
- mix->earliest_time = timestamp + diff;
- } else {
- mix->earliest_time = GST_CLOCK_TIME_NONE;
- }
- GST_OBJECT_UNLOCK (mix);
-}
-
-static void
-gst_videomixer_reset_qos (GstVideoMixer * mix)
-{
- gst_videomixer_update_qos (mix, 0.5, 0, GST_CLOCK_TIME_NONE);
-}
-
-static void
-gst_videomixer_read_qos (GstVideoMixer * mix, gdouble * proportion,
- GstClockTime * time)
-{
- GST_OBJECT_LOCK (mix);
- *proportion = mix->proportion;
- *time = mix->earliest_time;
- GST_OBJECT_UNLOCK (mix);
-}
-
-/* Perform qos calculations before processing the next frame. Returns TRUE if
- * the frame should be processed, FALSE if the frame can be dropped entirely */
-static gboolean
-gst_videomixer_do_qos (GstVideoMixer * mix, GstClockTime timestamp)
-{
- GstClockTime qostime, earliest_time;
- gdouble proportion;
-
- /* no timestamp, can't do QoS => process frame */
- if (G_UNLIKELY (!GST_CLOCK_TIME_IS_VALID (timestamp))) {
- GST_LOG_OBJECT (mix, "invalid timestamp, can't do QoS, process frame");
- return TRUE;
- }
-
- /* get latest QoS observation values */
- gst_videomixer_read_qos (mix, &proportion, &earliest_time);
-
- /* skip qos if we have no observation (yet) => process frame */
- if (G_UNLIKELY (!GST_CLOCK_TIME_IS_VALID (earliest_time))) {
- GST_LOG_OBJECT (mix, "no observation yet, process frame");
- return TRUE;
- }
-
- /* qos is done on running time */
- qostime =
- gst_segment_to_running_time (&mix->segment, GST_FORMAT_TIME, timestamp);
-
- /* see how our next timestamp relates to the latest qos timestamp */
- GST_LOG_OBJECT (mix, "qostime %" GST_TIME_FORMAT ", earliest %"
- GST_TIME_FORMAT, GST_TIME_ARGS (qostime), GST_TIME_ARGS (earliest_time));
-
- if (qostime != GST_CLOCK_TIME_NONE && qostime <= earliest_time) {
- GST_DEBUG_OBJECT (mix, "we are late, drop frame");
- return FALSE;
- }
-
- GST_LOG_OBJECT (mix, "process frame");
- return TRUE;
-}
-
-static void
-gst_videomixer_set_master_geometry (GstVideoMixer * mix)
-{
- GSList *walk;
- gint width = 0, height = 0, fps_n = 0, fps_d = 0, par_n = 0, par_d = 0;
- GstVideoMixerPad *master = NULL;
-
- walk = mix->sinkpads;
- while (walk) {
- GstVideoMixerPad *mixpad = GST_VIDEO_MIXER_PAD (walk->data);
-
- walk = g_slist_next (walk);
-
- /* Biggest input geometry will be our output geometry */
- width = MAX (width, mixpad->in_width);
- height = MAX (height, mixpad->in_height);
-
- /* If mix framerate < mixpad framerate, using fractions */
- GST_DEBUG_OBJECT (mixpad, "comparing framerate %d/%d to mixpad's %d/%d",
- fps_n, fps_d, mixpad->fps_n, mixpad->fps_d);
- if ((!fps_n && !fps_d) ||
- ((gint64) fps_n * mixpad->fps_d < (gint64) mixpad->fps_n * fps_d)) {
- fps_n = mixpad->fps_n;
- fps_d = mixpad->fps_d;
- par_n = mixpad->par_n;
- par_d = mixpad->par_d;
- GST_DEBUG_OBJECT (mixpad, "becomes the master pad");
- master = mixpad;
- }
- }
-
- /* set results */
- if (mix->master != master || mix->in_width != width
- || mix->in_height != height || mix->fps_n != fps_n
- || mix->fps_d != fps_d || mix->par_n != par_n || mix->par_d != par_d) {
- mix->setcaps = TRUE;
- mix->sendseg = TRUE;
- gst_videomixer_reset_qos (mix);
- mix->master = master;
- mix->in_width = width;
- mix->in_height = height;
- mix->fps_n = fps_n;
- mix->fps_d = fps_d;
- mix->par_n = par_n;
- mix->par_d = par_d;
- }
-}
-
-static gboolean
-gst_videomixer_pad_sink_setcaps (GstPad * pad, GstCaps * vscaps)
-{
- GstVideoMixer *mix;
- GstVideoMixerPad *mixpad;
- GstStructure *structure;
- gint in_width, in_height;
- gboolean ret = FALSE;
- const GValue *framerate, *par;
-
- GST_INFO_OBJECT (pad, "Setting caps %" GST_PTR_FORMAT, vscaps);
-
- mix = GST_VIDEO_MIXER (gst_pad_get_parent (pad));
- mixpad = GST_VIDEO_MIXER_PAD (pad);
-
- if (!mixpad)
- goto beach;
-
- structure = gst_caps_get_structure (vscaps, 0);
-
- if (!gst_structure_get_int (structure, "width", &in_width)
- || !gst_structure_get_int (structure, "height", &in_height)
- || (framerate = gst_structure_get_value (structure, "framerate")) == NULL)
- goto beach;
- par = gst_structure_get_value (structure, "pixel-aspect-ratio");
-
- GST_VIDEO_MIXER_STATE_LOCK (mix);
- mixpad->fps_n = gst_value_get_fraction_numerator (framerate);
- mixpad->fps_d = gst_value_get_fraction_denominator (framerate);
- if (par) {
- mixpad->par_n = gst_value_get_fraction_numerator (par);
- mixpad->par_d = gst_value_get_fraction_denominator (par);
- } else {
- mixpad->par_n = mixpad->par_d = 1;
- }
-
- mixpad->in_width = in_width;
- mixpad->in_height = in_height;
-
- gst_videomixer_set_master_geometry (mix);
- GST_VIDEO_MIXER_STATE_UNLOCK (mix);
-
- ret = TRUE;
-
-beach:
- gst_object_unref (mix);
-
- return ret;
-}
-
-static GstCaps *
-gst_videomixer_pad_sink_getcaps (GstPad * pad)
-{
- GstVideoMixer *mix;
- GstVideoMixerPad *mixpad;
- GstCaps *res = NULL;
- GstCaps *mastercaps;
- GstStructure *st;
-
- mix = GST_VIDEO_MIXER (gst_pad_get_parent (pad));
- mixpad = GST_VIDEO_MIXER_PAD (pad);
-
- if (!mixpad)
- goto beach;
-
- /* Get downstream allowed caps */
- res = gst_pad_get_allowed_caps (mix->srcpad);
- if (G_UNLIKELY (res == NULL)) {
- res = gst_caps_copy (gst_pad_get_pad_template_caps (pad));
- goto beach;
- }
-
- GST_VIDEO_MIXER_STATE_LOCK (mix);
-
- /* Return as-is if not other sinkpad set as master */
- if (mix->master == NULL) {
- GST_VIDEO_MIXER_STATE_UNLOCK (mix);
- goto beach;
- }
-
- mastercaps = gst_pad_get_fixed_caps_func (GST_PAD (mix->master));
-
- /* If master pad caps aren't negotiated yet, return downstream
- * allowed caps */
- if (!GST_CAPS_IS_SIMPLE (mastercaps)) {
- GST_VIDEO_MIXER_STATE_UNLOCK (mix);
- gst_caps_unref (mastercaps);
- goto beach;
- }
-
- gst_caps_unref (res);
- res = gst_caps_make_writable (mastercaps);
- st = gst_caps_get_structure (res, 0);
- gst_structure_set (st, "width", GST_TYPE_INT_RANGE, 1, G_MAXINT,
- "height", GST_TYPE_INT_RANGE, 1, G_MAXINT,
- "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL);
- if (!gst_structure_has_field (st, "pixel-aspect-ratio"))
- gst_structure_set (st, "pixel-aspect-ratio", GST_TYPE_FRACTION, 1, 1, NULL);
-
- GST_VIDEO_MIXER_STATE_UNLOCK (mix);
-
-
-beach:
- GST_DEBUG_OBJECT (pad, "Returning %" GST_PTR_FORMAT, res);
-
- return res;
-}
-
-/*
-* We accept the caps if it has the same format as other sink pads in
-* the element.
-*/
-static gboolean
-gst_videomixer_pad_sink_acceptcaps (GstPad * pad, GstCaps * vscaps)
-{
- gboolean ret;
- GstVideoMixer *mix;
- GstCaps *acceptedCaps;
-
- mix = GST_VIDEO_MIXER (gst_pad_get_parent (pad));
- GST_DEBUG_OBJECT (pad, "%" GST_PTR_FORMAT, vscaps);
- GST_VIDEO_MIXER_STATE_LOCK (mix);
-
- if (mix->master) {
- acceptedCaps = gst_pad_get_fixed_caps_func (GST_PAD (mix->master));
- acceptedCaps = gst_caps_make_writable (acceptedCaps);
- GST_LOG_OBJECT (pad, "master's caps %" GST_PTR_FORMAT, acceptedCaps);
- if (GST_CAPS_IS_SIMPLE (acceptedCaps)) {
- GstStructure *s;
- s = gst_caps_get_structure (acceptedCaps, 0);
- gst_structure_set (s, "width", GST_TYPE_INT_RANGE, 1, G_MAXINT,
- "height", GST_TYPE_INT_RANGE, 1, G_MAXINT,
- "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL);
- if (!gst_structure_has_field (s, "pixel-aspect-ratio"))
- gst_structure_set (s, "pixel-aspect-ratio", GST_TYPE_FRACTION, 1, 1,
- NULL);
- }
- } else {
- acceptedCaps = gst_pad_get_fixed_caps_func (pad);
- }
-
- GST_INFO_OBJECT (pad, "vscaps: %" GST_PTR_FORMAT, vscaps);
- GST_INFO_OBJECT (pad, "acceptedCaps: %" GST_PTR_FORMAT, acceptedCaps);
-
- ret = gst_caps_can_intersect (vscaps, acceptedCaps);
- GST_INFO_OBJECT (pad, "%saccepted caps %" GST_PTR_FORMAT, (ret ? "" : "not "),
- vscaps);
- gst_caps_unref (acceptedCaps);
- GST_VIDEO_MIXER_STATE_UNLOCK (mix);
- gst_object_unref (mix);
- return ret;
-}
-
-
-
-static void
-gst_videomixer_pad_init (GstVideoMixerPad * mixerpad)
-{
- /* setup some pad functions */
- gst_pad_set_setcaps_function (GST_PAD (mixerpad),
- gst_videomixer_pad_sink_setcaps);
- gst_pad_set_acceptcaps_function (GST_PAD (mixerpad),
- GST_DEBUG_FUNCPTR (gst_videomixer_pad_sink_acceptcaps));
- gst_pad_set_getcaps_function (GST_PAD (mixerpad),
- gst_videomixer_pad_sink_getcaps);
-
- mixerpad->zorder = DEFAULT_PAD_ZORDER;
- mixerpad->xpos = DEFAULT_PAD_XPOS;
- mixerpad->ypos = DEFAULT_PAD_YPOS;
- mixerpad->alpha = DEFAULT_PAD_ALPHA;
-}
-
-/* VideoMixer signals and args */
-enum
-{
- /* FILL ME */
- LAST_SIGNAL
-};
-
-#define DEFAULT_BACKGROUND VIDEO_MIXER_BACKGROUND_CHECKER
-enum
-{
- PROP_0,
- PROP_BACKGROUND
-};
-
-#define GST_TYPE_VIDEO_MIXER_BACKGROUND (gst_video_mixer_background_get_type())
-static GType
-gst_video_mixer_background_get_type (void)
-{
- static GType video_mixer_background_type = 0;
-
- static const GEnumValue video_mixer_background[] = {
- {VIDEO_MIXER_BACKGROUND_CHECKER, "Checker pattern", "checker"},
- {VIDEO_MIXER_BACKGROUND_BLACK, "Black", "black"},
- {VIDEO_MIXER_BACKGROUND_WHITE, "White", "white"},
- {VIDEO_MIXER_BACKGROUND_TRANSPARENT,
- "Transparent Background to enable further mixing", "transparent"},
- {0, NULL, NULL},
- };
-
- if (!video_mixer_background_type) {
- video_mixer_background_type =
- g_enum_register_static ("GstVideoMixerBackground",
- video_mixer_background);
- }
- return video_mixer_background_type;
-}
-
-static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src",
- GST_PAD_SRC,
- GST_PAD_ALWAYS,
- GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("AYUV") ";" GST_VIDEO_CAPS_BGRA ";"
- GST_VIDEO_CAPS_ARGB ";" GST_VIDEO_CAPS_RGBA ";" GST_VIDEO_CAPS_ABGR ";"
- GST_VIDEO_CAPS_YUV ("Y444") ";" GST_VIDEO_CAPS_YUV ("Y42B") ";"
- GST_VIDEO_CAPS_YUV ("YUY2") ";" GST_VIDEO_CAPS_YUV ("UYVY") ";"
- GST_VIDEO_CAPS_YUV ("YVYU") ";"
- GST_VIDEO_CAPS_YUV ("I420") ";" GST_VIDEO_CAPS_YUV ("YV12") ";"
- GST_VIDEO_CAPS_YUV ("Y41B") ";" GST_VIDEO_CAPS_RGB ";"
- GST_VIDEO_CAPS_BGR ";" GST_VIDEO_CAPS_xRGB ";" GST_VIDEO_CAPS_xBGR ";"
- GST_VIDEO_CAPS_RGBx ";" GST_VIDEO_CAPS_BGRx)
- );
-
-static GstStaticPadTemplate sink_factory = GST_STATIC_PAD_TEMPLATE ("sink_%d",
- GST_PAD_SINK,
- GST_PAD_REQUEST,
- GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("AYUV") ";" GST_VIDEO_CAPS_BGRA ";"
- GST_VIDEO_CAPS_ARGB ";" GST_VIDEO_CAPS_RGBA ";" GST_VIDEO_CAPS_ABGR ";"
- GST_VIDEO_CAPS_YUV ("Y444") ";" GST_VIDEO_CAPS_YUV ("Y42B") ";"
- GST_VIDEO_CAPS_YUV ("YUY2") ";" GST_VIDEO_CAPS_YUV ("UYVY") ";"
- GST_VIDEO_CAPS_YUV ("YVYU") ";"
- GST_VIDEO_CAPS_YUV ("I420") ";" GST_VIDEO_CAPS_YUV ("YV12") ";"
- GST_VIDEO_CAPS_YUV ("Y41B") ";" GST_VIDEO_CAPS_RGB ";"
- GST_VIDEO_CAPS_BGR ";" GST_VIDEO_CAPS_xRGB ";" GST_VIDEO_CAPS_xBGR ";"
- GST_VIDEO_CAPS_RGBx ";" GST_VIDEO_CAPS_BGRx)
- );
-
-static void gst_videomixer_finalize (GObject * object);
-
-static GstCaps *gst_videomixer_getcaps (GstPad * pad);
-static gboolean gst_videomixer_setcaps (GstPad * pad, GstCaps * caps);
-static gboolean gst_videomixer_query (GstPad * pad, GstQuery * query);
-
-static GstFlowReturn gst_videomixer_collected (GstCollectPads * pads,
- GstVideoMixer * mix);
-static GstPad *gst_videomixer_request_new_pad (GstElement * element,
- GstPadTemplate * templ, const gchar * name);
-static void gst_videomixer_release_pad (GstElement * element, GstPad * pad);
-
-static void gst_videomixer_set_property (GObject * object, guint prop_id,
- const GValue * value, GParamSpec * pspec);
-static void gst_videomixer_get_property (GObject * object, guint prop_id,
- GValue * value, GParamSpec * pspec);
-static GstStateChangeReturn gst_videomixer_change_state (GstElement * element,
- GstStateChange transition);
-
-/*static guint gst_videomixer_signals[LAST_SIGNAL] = { 0 }; */
-
-static void gst_videomixer_child_proxy_init (gpointer g_iface,
- gpointer iface_data);
-static void _do_init (GType object_type);
-
-GST_BOILERPLATE_FULL (GstVideoMixer, gst_videomixer, GstElement,
- GST_TYPE_ELEMENT, _do_init);
-
-static void
-_do_init (GType object_type)
-{
- static const GInterfaceInfo child_proxy_info = {
- (GInterfaceInitFunc) gst_videomixer_child_proxy_init,
- NULL,
- NULL
- };
-
- g_type_add_interface_static (object_type, GST_TYPE_CHILD_PROXY,
- &child_proxy_info);
- GST_INFO ("GstChildProxy interface registered");
-}
-
-static GstObject *
-gst_videomixer_child_proxy_get_child_by_index (GstChildProxy * child_proxy,
- guint index)
-{
- GstVideoMixer *mix = GST_VIDEO_MIXER (child_proxy);
- GstObject *obj;
-
- GST_VIDEO_MIXER_STATE_LOCK (mix);
- if ((obj = g_slist_nth_data (mix->sinkpads, index)))
- gst_object_ref (obj);
- GST_VIDEO_MIXER_STATE_UNLOCK (mix);
- return obj;
-}
-
-static guint
-gst_videomixer_child_proxy_get_children_count (GstChildProxy * child_proxy)
-{
- guint count = 0;
- GstVideoMixer *mix = GST_VIDEO_MIXER (child_proxy);
-
- GST_VIDEO_MIXER_STATE_LOCK (mix);
- count = mix->numpads;
- GST_VIDEO_MIXER_STATE_UNLOCK (mix);
- GST_INFO_OBJECT (mix, "Children Count: %d", count);
- return count;
-}
-
-static void
-gst_videomixer_child_proxy_init (gpointer g_iface, gpointer iface_data)
-{
- GstChildProxyInterface *iface = g_iface;
-
- GST_INFO ("intializing child proxy interface");
- iface->get_child_by_index = gst_videomixer_child_proxy_get_child_by_index;
- iface->get_children_count = gst_videomixer_child_proxy_get_children_count;
-}
-
-static void
-gst_videomixer_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_static_pad_template (element_class, &src_factory);
- gst_element_class_add_static_pad_template (element_class, &sink_factory);
-
- gst_element_class_set_details_simple (element_class, "Video mixer",
- "Filter/Editor/Video",
- "Mix multiple video streams", "Wim Taymans <wim@fluendo.com>");
-}
-
-static void
-gst_videomixer_class_init (GstVideoMixerClass * klass)
-{
- GObjectClass *gobject_class = (GObjectClass *) klass;
- GstElementClass *gstelement_class = (GstElementClass *) klass;
-
- gobject_class->finalize = gst_videomixer_finalize;
-
- gobject_class->get_property = gst_videomixer_get_property;
- gobject_class->set_property = gst_videomixer_set_property;
-
- g_object_class_install_property (gobject_class, PROP_BACKGROUND,
- g_param_spec_enum ("background", "Background", "Background type",
- GST_TYPE_VIDEO_MIXER_BACKGROUND,
- DEFAULT_BACKGROUND, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
-
- gstelement_class->request_new_pad =
- GST_DEBUG_FUNCPTR (gst_videomixer_request_new_pad);
- gstelement_class->release_pad =
- GST_DEBUG_FUNCPTR (gst_videomixer_release_pad);
- gstelement_class->change_state =
- GST_DEBUG_FUNCPTR (gst_videomixer_change_state);
-
- /* Register the pad class */
- (void) (GST_TYPE_VIDEO_MIXER_PAD);
- /* Register the background enum */
- (void) (GST_TYPE_VIDEO_MIXER_BACKGROUND);
-}
-
-static void
-gst_videomixer_collect_free (GstVideoMixerCollect * mixcol)
-{
- if (mixcol->buffer) {
- gst_buffer_unref (mixcol->buffer);
- mixcol->buffer = NULL;
- }
-}
-
-static void
-gst_videomixer_reset (GstVideoMixer * mix)
-{
- GSList *walk;
-
- mix->in_width = 0;
- mix->in_height = 0;
- mix->out_width = 0;
- mix->out_height = 0;
- mix->fps_n = mix->fps_d = 0;
- mix->par_n = mix->par_d = 1;
- mix->setcaps = FALSE;
- mix->sendseg = FALSE;
-
- mix->segment_position = 0;
- gst_segment_init (&mix->segment, GST_FORMAT_TIME);
-
- gst_videomixer_reset_qos (mix);
-
- mix->fmt = GST_VIDEO_FORMAT_UNKNOWN;
-
- mix->last_ts = 0;
- mix->last_duration = -1;
-
- /* clean up collect data */
- walk = mix->collect->data;
- while (walk) {
- GstVideoMixerCollect *data = (GstVideoMixerCollect *) walk->data;
-
- gst_videomixer_collect_free (data);
- walk = g_slist_next (walk);
- }
-
- mix->next_sinkpad = 0;
- mix->flush_stop_pending = FALSE;
-}
-
-static void
-gst_videomixer_init (GstVideoMixer * mix, GstVideoMixerClass * g_class)
-{
- GstElementClass *klass = GST_ELEMENT_GET_CLASS (mix);
-
- mix->srcpad =
- gst_pad_new_from_template (gst_element_class_get_pad_template (klass,
- "src"), "src");
- gst_pad_set_getcaps_function (GST_PAD (mix->srcpad),
- GST_DEBUG_FUNCPTR (gst_videomixer_getcaps));
- gst_pad_set_setcaps_function (GST_PAD (mix->srcpad),
- GST_DEBUG_FUNCPTR (gst_videomixer_setcaps));
- gst_pad_set_query_function (GST_PAD (mix->srcpad),
- GST_DEBUG_FUNCPTR (gst_videomixer_query));
- gst_pad_set_event_function (GST_PAD (mix->srcpad),
- GST_DEBUG_FUNCPTR (gst_videomixer_src_event));
- gst_element_add_pad (GST_ELEMENT (mix), mix->srcpad);
-
- mix->collect = gst_collect_pads_new ();
- mix->background = DEFAULT_BACKGROUND;
-
- gst_collect_pads_set_function (mix->collect,
- (GstCollectPadsFunction) GST_DEBUG_FUNCPTR (gst_videomixer_collected),
- mix);
-
- mix->state_lock = g_mutex_new ();
- /* initialize variables */
- gst_videomixer_reset (mix);
-}
-
-static void
-gst_videomixer_finalize (GObject * object)
-{
- GstVideoMixer *mix = GST_VIDEO_MIXER (object);
-
- gst_object_unref (mix->collect);
- g_mutex_free (mix->state_lock);
-
- G_OBJECT_CLASS (parent_class)->finalize (object);
-}
-
-static gboolean
-gst_videomixer_query_duration (GstVideoMixer * mix, GstQuery * query)
-{
- gint64 max;
- gboolean res;
- GstFormat format;
- GstIterator *it;
- gboolean done;
-
- /* parse format */
- gst_query_parse_duration (query, &format, NULL);
-
- max = -1;
- res = TRUE;
- done = FALSE;
-
- /* Take maximum of all durations */
- it = gst_element_iterate_sink_pads (GST_ELEMENT_CAST (mix));
- while (!done) {
- GstIteratorResult ires;
- gpointer item;
-
- ires = gst_iterator_next (it, &item);
- switch (ires) {
- case GST_ITERATOR_DONE:
- done = TRUE;
- break;
- case GST_ITERATOR_OK:
- {
- GstPad *pad = GST_PAD_CAST (item);
- gint64 duration;
-
- /* ask sink peer for duration */
- res &= gst_pad_query_peer_duration (pad, &format, &duration);
- /* take max from all valid return values */
- if (res) {
- /* valid unknown length, stop searching */
- if (duration == -1) {
- max = duration;
- done = TRUE;
- }
- /* else see if bigger than current max */
- else if (duration > max)
- max = duration;
- }
- gst_object_unref (pad);
- break;
- }
- case GST_ITERATOR_RESYNC:
- max = -1;
- res = TRUE;
- gst_iterator_resync (it);
- break;
- default:
- res = FALSE;
- done = TRUE;
- break;
- }
- }
- gst_iterator_free (it);
-
- if (res) {
- /* and store the max */
- GST_DEBUG_OBJECT (mix, "Total duration in format %s: %"
- GST_TIME_FORMAT, gst_format_get_name (format), GST_TIME_ARGS (max));
- gst_query_set_duration (query, format, max);
- }
-
- return res;
-}
-
-static gboolean
-gst_videomixer_query_latency (GstVideoMixer * mix, GstQuery * query)
-{
- GstClockTime min, max;
- gboolean live;
- gboolean res;
- GstIterator *it;
- gboolean done;
-
- res = TRUE;
- done = FALSE;
- live = FALSE;
- min = 0;
- max = GST_CLOCK_TIME_NONE;
-
- /* Take maximum of all latency values */
- it = gst_element_iterate_sink_pads (GST_ELEMENT_CAST (mix));
- while (!done) {
- GstIteratorResult ires;
- gpointer item;
-
- ires = gst_iterator_next (it, &item);
- switch (ires) {
- case GST_ITERATOR_DONE:
- done = TRUE;
- break;
- case GST_ITERATOR_OK:
- {
- GstPad *pad = GST_PAD_CAST (item);
-
- GstQuery *peerquery;
-
- GstClockTime min_cur, max_cur;
-
- gboolean live_cur;
-
- peerquery = gst_query_new_latency ();
-
- /* Ask peer for latency */
- res &= gst_pad_peer_query (pad, peerquery);
-
- /* take max from all valid return values */
- if (res) {
- gst_query_parse_latency (peerquery, &live_cur, &min_cur, &max_cur);
-
- if (min_cur > min)
- min = min_cur;
-
- if (max_cur != GST_CLOCK_TIME_NONE &&
- ((max != GST_CLOCK_TIME_NONE && max_cur > max) ||
- (max == GST_CLOCK_TIME_NONE)))
- max = max_cur;
-
- live = live || live_cur;
- }
-
- gst_query_unref (peerquery);
- gst_object_unref (pad);
- break;
- }
- case GST_ITERATOR_RESYNC:
- live = FALSE;
- min = 0;
- max = GST_CLOCK_TIME_NONE;
- res = TRUE;
- gst_iterator_resync (it);
- break;
- default:
- res = FALSE;
- done = TRUE;
- break;
- }
- }
- gst_iterator_free (it);
-
- if (res) {
- /* store the results */
- GST_DEBUG_OBJECT (mix, "Calculated total latency: live %s, min %"
- GST_TIME_FORMAT ", max %" GST_TIME_FORMAT,
- (live ? "yes" : "no"), GST_TIME_ARGS (min), GST_TIME_ARGS (max));
- gst_query_set_latency (query, live, min, max);
- }
-
- return res;
-}
-
-static gboolean
-gst_videomixer_query (GstPad * pad, GstQuery * query)
-{
- GstVideoMixer *mix = GST_VIDEO_MIXER (gst_pad_get_parent (pad));
- gboolean res = FALSE;
-
- switch (GST_QUERY_TYPE (query)) {
- case GST_QUERY_POSITION:
- {
- GstFormat format;
-
- gst_query_parse_position (query, &format, NULL);
-
- switch (format) {
- case GST_FORMAT_TIME:
- /* FIXME, bring to stream time, might be tricky */
- gst_query_set_position (query, format, mix->last_ts);
- res = TRUE;
- break;
- default:
- break;
- }
- break;
- }
- case GST_QUERY_DURATION:
- res = gst_videomixer_query_duration (mix, query);
- break;
- case GST_QUERY_LATENCY:
- res = gst_videomixer_query_latency (mix, query);
- break;
- default:
- /* FIXME, needs a custom query handler because we have multiple
- * sinkpads, send to the master pad until then */
- res = gst_pad_query (GST_PAD_CAST (mix->master), query);
- break;
- }
-
- gst_object_unref (mix);
- return res;
-}
-
-static GstCaps *
-gst_videomixer_getcaps (GstPad * pad)
-{
- GstVideoMixer *mix = GST_VIDEO_MIXER (gst_pad_get_parent (pad));
- GstCaps *caps;
- GstStructure *structure;
- int numCaps;
-
- if (mix->master) {
- caps =
- gst_caps_copy (gst_pad_get_pad_template_caps (GST_PAD (mix->master)));
- } else {
- caps = gst_caps_copy (gst_pad_get_pad_template_caps (mix->srcpad));
- }
-
- numCaps = gst_caps_get_size (caps) - 1;
- for (; numCaps >= 0; numCaps--) {
- structure = gst_caps_get_structure (caps, numCaps);
- if (mix->out_width != 0) {
- gst_structure_set (structure, "width", G_TYPE_INT, mix->out_width, NULL);
- }
- if (mix->out_height != 0) {
- gst_structure_set (structure, "height", G_TYPE_INT, mix->out_height,
- NULL);
- }
- if (mix->fps_d != 0) {
- gst_structure_set (structure,
- "framerate", GST_TYPE_FRACTION, mix->fps_n, mix->fps_d, NULL);
- }
- }
-
- gst_object_unref (mix);
-
- return caps;
-}
-
-static gboolean
-gst_videomixer_setcaps (GstPad * pad, GstCaps * caps)
-{
- GstVideoMixer *mixer = GST_VIDEO_MIXER (gst_pad_get_parent_element (pad));
- gboolean ret = FALSE;
-
- GST_INFO_OBJECT (mixer, "set src caps: %" GST_PTR_FORMAT, caps);
-
- mixer->blend = NULL;
- mixer->overlay = NULL;
- mixer->fill_checker = NULL;
- mixer->fill_color = NULL;
-
- if (!gst_video_format_parse_caps (caps, &mixer->fmt, NULL, NULL))
- goto done;
-
- switch (mixer->fmt) {
- case GST_VIDEO_FORMAT_AYUV:
- mixer->blend = gst_video_mixer_blend_ayuv;
- mixer->overlay = gst_video_mixer_overlay_ayuv;
- mixer->fill_checker = gst_video_mixer_fill_checker_ayuv;
- mixer->fill_color = gst_video_mixer_fill_color_ayuv;
- ret = TRUE;
- break;
- case GST_VIDEO_FORMAT_ARGB:
- mixer->blend = gst_video_mixer_blend_argb;
- mixer->overlay = gst_video_mixer_overlay_argb;
- mixer->fill_checker = gst_video_mixer_fill_checker_argb;
- mixer->fill_color = gst_video_mixer_fill_color_argb;
- ret = TRUE;
- break;
- case GST_VIDEO_FORMAT_BGRA:
- mixer->blend = gst_video_mixer_blend_bgra;
- mixer->overlay = gst_video_mixer_overlay_bgra;
- mixer->fill_checker = gst_video_mixer_fill_checker_bgra;
- mixer->fill_color = gst_video_mixer_fill_color_bgra;
- ret = TRUE;
- break;
- case GST_VIDEO_FORMAT_ABGR:
- mixer->blend = gst_video_mixer_blend_abgr;
- mixer->overlay = gst_video_mixer_overlay_abgr;
- mixer->fill_checker = gst_video_mixer_fill_checker_abgr;
- mixer->fill_color = gst_video_mixer_fill_color_abgr;
- ret = TRUE;
- break;
- case GST_VIDEO_FORMAT_RGBA:
- mixer->blend = gst_video_mixer_blend_rgba;
- mixer->overlay = gst_video_mixer_overlay_rgba;
- mixer->fill_checker = gst_video_mixer_fill_checker_rgba;
- mixer->fill_color = gst_video_mixer_fill_color_rgba;
- ret = TRUE;
- break;
- case GST_VIDEO_FORMAT_Y444:
- mixer->blend = gst_video_mixer_blend_y444;
- mixer->overlay = mixer->blend;
- mixer->fill_checker = gst_video_mixer_fill_checker_y444;
- mixer->fill_color = gst_video_mixer_fill_color_y444;
- ret = TRUE;
- break;
- case GST_VIDEO_FORMAT_Y42B:
- mixer->blend = gst_video_mixer_blend_y42b;
- mixer->overlay = mixer->blend;
- mixer->fill_checker = gst_video_mixer_fill_checker_y42b;
- mixer->fill_color = gst_video_mixer_fill_color_y42b;
- ret = TRUE;
- break;
- case GST_VIDEO_FORMAT_YUY2:
- mixer->blend = gst_video_mixer_blend_yuy2;
- mixer->overlay = mixer->blend;
- mixer->fill_checker = gst_video_mixer_fill_checker_yuy2;
- mixer->fill_color = gst_video_mixer_fill_color_yuy2;
- ret = TRUE;
- break;
- case GST_VIDEO_FORMAT_UYVY:
- mixer->blend = gst_video_mixer_blend_uyvy;
- mixer->overlay = mixer->blend;
- mixer->fill_checker = gst_video_mixer_fill_checker_uyvy;
- mixer->fill_color = gst_video_mixer_fill_color_uyvy;
- ret = TRUE;
- break;
- case GST_VIDEO_FORMAT_YVYU:
- mixer->blend = gst_video_mixer_blend_yvyu;
- mixer->overlay = mixer->blend;
- mixer->fill_checker = gst_video_mixer_fill_checker_yvyu;
- mixer->fill_color = gst_video_mixer_fill_color_yvyu;
- ret = TRUE;
- break;
- case GST_VIDEO_FORMAT_I420:
- mixer->blend = gst_video_mixer_blend_i420;
- mixer->overlay = mixer->blend;
- mixer->fill_checker = gst_video_mixer_fill_checker_i420;
- mixer->fill_color = gst_video_mixer_fill_color_i420;
- ret = TRUE;
- break;
- case GST_VIDEO_FORMAT_YV12:
- mixer->blend = gst_video_mixer_blend_yv12;
- mixer->overlay = mixer->blend;
- mixer->fill_checker = gst_video_mixer_fill_checker_yv12;
- mixer->fill_color = gst_video_mixer_fill_color_yv12;
- ret = TRUE;
- break;
- case GST_VIDEO_FORMAT_Y41B:
- mixer->blend = gst_video_mixer_blend_y41b;
- mixer->overlay = mixer->blend;
- mixer->fill_checker = gst_video_mixer_fill_checker_y41b;
- mixer->fill_color = gst_video_mixer_fill_color_y41b;
- ret = TRUE;
- break;
- case GST_VIDEO_FORMAT_RGB:
- mixer->blend = gst_video_mixer_blend_rgb;
- mixer->overlay = mixer->blend;
- mixer->fill_checker = gst_video_mixer_fill_checker_rgb;
- mixer->fill_color = gst_video_mixer_fill_color_rgb;
- ret = TRUE;
- break;
- case GST_VIDEO_FORMAT_BGR:
- mixer->blend = gst_video_mixer_blend_bgr;
- mixer->overlay = mixer->blend;
- mixer->fill_checker = gst_video_mixer_fill_checker_bgr;
- mixer->fill_color = gst_video_mixer_fill_color_bgr;
- ret = TRUE;
- break;
- case GST_VIDEO_FORMAT_xRGB:
- mixer->blend = gst_video_mixer_blend_xrgb;
- mixer->overlay = mixer->blend;
- mixer->fill_checker = gst_video_mixer_fill_checker_xrgb;
- mixer->fill_color = gst_video_mixer_fill_color_xrgb;
- ret = TRUE;
- break;
- case GST_VIDEO_FORMAT_xBGR:
- mixer->blend = gst_video_mixer_blend_xbgr;
- mixer->overlay = mixer->blend;
- mixer->fill_checker = gst_video_mixer_fill_checker_xbgr;
- mixer->fill_color = gst_video_mixer_fill_color_xbgr;
- ret = TRUE;
- break;
- case GST_VIDEO_FORMAT_RGBx:
- mixer->blend = gst_video_mixer_blend_rgbx;
- mixer->overlay = mixer->blend;
- mixer->fill_checker = gst_video_mixer_fill_checker_rgbx;
- mixer->fill_color = gst_video_mixer_fill_color_rgbx;
- ret = TRUE;
- break;
- case GST_VIDEO_FORMAT_BGRx:
- mixer->blend = gst_video_mixer_blend_bgrx;
- mixer->overlay = mixer->blend;
- mixer->fill_checker = gst_video_mixer_fill_checker_bgrx;
- mixer->fill_color = gst_video_mixer_fill_color_bgrx;
- ret = TRUE;
- break;
- default:
- break;
- }
-
-done:
- gst_object_unref (mixer);
-
- return ret;
-}
-
-static GstPad *
-gst_videomixer_request_new_pad (GstElement * element,
- GstPadTemplate * templ, const gchar * req_name)
-{
- GstVideoMixer *mix = NULL;
- GstVideoMixerPad *mixpad = NULL;
- GstElementClass *klass = GST_ELEMENT_GET_CLASS (element);
-
- g_return_val_if_fail (templ != NULL, NULL);
-
- if (G_UNLIKELY (templ->direction != GST_PAD_SINK)) {
- g_warning ("videomixer: request pad that is not a SINK pad");
- return NULL;
- }
-
- g_return_val_if_fail (GST_IS_VIDEO_MIXER (element), NULL);
-
- mix = GST_VIDEO_MIXER (element);
-
- if (templ == gst_element_class_get_pad_template (klass, "sink_%d")) {
- gint serial = 0;
- gchar *name = NULL;
- GstVideoMixerCollect *mixcol = NULL;
-
- GST_VIDEO_MIXER_STATE_LOCK (mix);
- if (req_name == NULL || strlen (req_name) < 6
- || !g_str_has_prefix (req_name, "sink_")) {
- /* no name given when requesting the pad, use next available int */
- serial = mix->next_sinkpad++;
- } else {
- /* parse serial number from requested padname */
- serial = atoi (&req_name[5]);
- if (serial >= mix->next_sinkpad)
- mix->next_sinkpad = serial + 1;
- }
- /* create new pad with the name */
- name = g_strdup_printf ("sink_%d", serial);
- mixpad = g_object_new (GST_TYPE_VIDEO_MIXER_PAD, "name", name, "direction",
- templ->direction, "template", templ, NULL);
- g_free (name);
-
- mixpad->zorder = mix->numpads;
- mixpad->xpos = DEFAULT_PAD_XPOS;
- mixpad->ypos = DEFAULT_PAD_YPOS;
- mixpad->alpha = DEFAULT_PAD_ALPHA;
-
- mixcol = (GstVideoMixerCollect *)
- gst_collect_pads_add_pad (mix->collect, GST_PAD (mixpad),
- sizeof (GstVideoMixerCollect));
-
- /* FIXME: hacked way to override/extend the event function of
- * GstCollectPads; because it sets its own event function giving the
- * element no access to events */
- mix->collect_event =
- (GstPadEventFunction) GST_PAD_EVENTFUNC (GST_PAD (mixpad));
- gst_pad_set_event_function (GST_PAD (mixpad),
- GST_DEBUG_FUNCPTR (gst_videomixer_sink_event));
-
- /* Keep track of each other */
- mixcol->mixpad = mixpad;
- mixpad->mixcol = mixcol;
-
- /* Keep an internal list of mixpads for zordering */
- mix->sinkpads = g_slist_append (mix->sinkpads, mixpad);
- mix->numpads++;
- GST_VIDEO_MIXER_STATE_UNLOCK (mix);
- } else {
- g_warning ("videomixer: this is not our template!");
- return NULL;
- }
-
- /* add the pad to the element */
- gst_element_add_pad (element, GST_PAD (mixpad));
- gst_child_proxy_child_added (GST_OBJECT (mix), GST_OBJECT (mixpad));
-
- return GST_PAD (mixpad);
-}
-
-static void
-gst_videomixer_release_pad (GstElement * element, GstPad * pad)
-{
- GstVideoMixer *mix = NULL;
- GstVideoMixerPad *mixpad;
-
- mix = GST_VIDEO_MIXER (element);
- GST_VIDEO_MIXER_STATE_LOCK (mix);
- if (G_UNLIKELY (g_slist_find (mix->sinkpads, pad) == NULL)) {
- g_warning ("Unknown pad %s", GST_PAD_NAME (pad));
- goto error;
- }
-
- mixpad = GST_VIDEO_MIXER_PAD (pad);
-
- mix->sinkpads = g_slist_remove (mix->sinkpads, pad);
- gst_videomixer_collect_free (mixpad->mixcol);
- gst_collect_pads_remove_pad (mix->collect, pad);
- gst_child_proxy_child_removed (GST_OBJECT (mix), GST_OBJECT (mixpad));
- /* determine possibly new geometry and master */
- gst_videomixer_set_master_geometry (mix);
- mix->numpads--;
- GST_VIDEO_MIXER_STATE_UNLOCK (mix);
-
- gst_element_remove_pad (element, pad);
- return;
-error:
- GST_VIDEO_MIXER_STATE_UNLOCK (mix);
-}
-
-static int
-pad_zorder_compare (const GstVideoMixerPad * pad1,
- const GstVideoMixerPad * pad2)
-{
- return pad1->zorder - pad2->zorder;
-}
-
-static void
-gst_videomixer_sort_pads (GstVideoMixer * mix)
-{
- mix->sinkpads = g_slist_sort (mix->sinkpads,
- (GCompareFunc) pad_zorder_compare);
-}
-
-/* try to get a buffer on all pads. As long as the queued value is
- * negative, we skip buffers */
-static gboolean
-gst_videomixer_fill_queues (GstVideoMixer * mix)
-{
- GSList *walk = NULL;
- gboolean eos = TRUE;
-
- g_return_val_if_fail (GST_IS_VIDEO_MIXER (mix), FALSE);
-
- /* try to make sure we have a buffer from each usable pad first */
- walk = mix->collect->data;
- while (walk) {
- GstCollectData *data = (GstCollectData *) walk->data;
- GstVideoMixerCollect *mixcol = (GstVideoMixerCollect *) data;
- GstVideoMixerPad *mixpad = mixcol->mixpad;
-
- walk = g_slist_next (walk);
-
- if (mixcol->buffer == NULL) {
- GstBuffer *buf = NULL;
-
- GST_LOG_OBJECT (mix, "we need a new buffer");
-
- buf = gst_collect_pads_peek (mix->collect, data);
-
- if (buf) {
- guint64 duration;
-
- mixcol->buffer = buf;
- duration = GST_BUFFER_DURATION (mixcol->buffer);
-
- GST_LOG_OBJECT (mix, "we have a buffer with duration %" GST_TIME_FORMAT
- ", queued %" GST_TIME_FORMAT, GST_TIME_ARGS (duration),
- GST_TIME_ARGS (mixpad->queued));
-
- /* no duration on the buffer, use the framerate */
- if (!GST_CLOCK_TIME_IS_VALID (duration)) {
- if (mixpad->fps_n == 0) {
- duration = GST_CLOCK_TIME_NONE;
- } else {
- duration =
- gst_util_uint64_scale_int (GST_SECOND, mixpad->fps_d,
- mixpad->fps_n);
- }
- }
- if (GST_CLOCK_TIME_IS_VALID (duration))
- mixpad->queued += duration;
- else if (!mixpad->queued)
- mixpad->queued = GST_CLOCK_TIME_NONE;
-
- GST_LOG_OBJECT (mix, "now queued: %" GST_TIME_FORMAT,
- GST_TIME_ARGS (mixpad->queued));
- } else {
- GST_LOG_OBJECT (mix, "pop returned a NULL buffer");
- }
- }
- if (mix->sendseg && (mixpad == mix->master)) {
- GstEvent *event;
- gint64 stop, start;
- GstSegment *segment = &data->segment;
-
- /* FIXME, use rate/applied_rate as set on all sinkpads.
- * - currently we just set rate as received from last seek-event
- * We could potentially figure out the duration as well using
- * the current segment positions and the stated stop positions.
- * Also we just start from stream time 0 which is rather
- * weird. For non-synchronized mixing, the time should be
- * the min of the stream times of all received segments,
- * rationale being that the duration is at least going to
- * be as long as the earliest stream we start mixing. This
- * would also be correct for synchronized mixing but then
- * the later streams would be delayed until the stream times
- * match.
- */
- GST_INFO_OBJECT (mix, "_sending play segment");
-
- start = segment->accum;
-
- /* get the duration of the segment if we can and add it to the accumulated
- * time on the segment. */
- if (segment->stop != -1 && segment->start != -1)
- stop = start + (segment->stop - segment->start);
- else
- stop = -1;
-
- gst_segment_set_newsegment (&mix->segment, FALSE, segment->rate,
- segment->format, start, stop, start + mix->segment_position);
- event =
- gst_event_new_new_segment_full (FALSE, segment->rate, 1.0,
- segment->format, start, stop, start + mix->segment_position);
- gst_pad_push_event (mix->srcpad, event);
- mix->sendseg = FALSE;
- }
-
- if (mixcol->buffer != NULL && GST_CLOCK_TIME_IS_VALID (mixpad->queued)) {
- /* got a buffer somewhere so we're not eos */
- eos = FALSE;
- }
- }
-
- return eos;
-}
-
-/* blend all buffers present on the pads */
-static void
-gst_videomixer_blend_buffers (GstVideoMixer * mix, GstBuffer * outbuf)
-{
- GSList *walk;
- BlendFunction blend;
- if (mix->background == VIDEO_MIXER_BACKGROUND_TRANSPARENT) {
- blend = mix->overlay;
- } else {
- blend = mix->blend;
- }
-
-
- walk = mix->sinkpads;
- while (walk) { /* We walk with this list because it's ordered */
- GstVideoMixerPad *pad = GST_VIDEO_MIXER_PAD (walk->data);
- GstVideoMixerCollect *mixcol = pad->mixcol;
-
- walk = g_slist_next (walk);
-
- if (mixcol->buffer != NULL) {
- GstClockTime timestamp;
- gint64 stream_time;
- GstSegment *seg;
-
- seg = &mixcol->collect.segment;
-
- timestamp = GST_BUFFER_TIMESTAMP (mixcol->buffer);
-
- stream_time =
- gst_segment_to_stream_time (seg, GST_FORMAT_TIME, timestamp);
-
- /* sync object properties on stream time */
- if (GST_CLOCK_TIME_IS_VALID (stream_time))
- gst_object_sync_values (G_OBJECT (pad), stream_time);
-
- blend (GST_BUFFER_DATA (mixcol->buffer),
- pad->xpos, pad->ypos, pad->in_width, pad->in_height, pad->alpha,
- GST_BUFFER_DATA (outbuf), mix->out_width, mix->out_height);
- }
- }
-}
-
-/* remove buffers from the queue that were expired in the
- * interval of the master, we also prepare the queued value
- * in the pad so that we can skip and fill buffers later on */
-static void
-gst_videomixer_update_queues (GstVideoMixer * mix)
-{
- GSList *walk;
- gint64 interval;
-
- interval = mix->master->queued;
- if (interval <= 0) {
- if (mix->fps_n == 0) {
- interval = G_MAXINT64;
- } else {
- interval = gst_util_uint64_scale_int (GST_SECOND, mix->fps_d, mix->fps_n);
- }
- GST_LOG_OBJECT (mix, "set interval to %" G_GINT64_FORMAT " nanoseconds",
- interval);
- }
-
- walk = mix->sinkpads;
- while (walk) {
- GstVideoMixerPad *pad = GST_VIDEO_MIXER_PAD (walk->data);
- GstVideoMixerCollect *mixcol = pad->mixcol;
-
- walk = g_slist_next (walk);
-
- if (mixcol->buffer != NULL) {
- pad->queued -= interval;
- GST_LOG_OBJECT (pad, "queued now %" G_GINT64_FORMAT, pad->queued);
- if (pad->queued <= 0) {
- GstBuffer *buffer =
- gst_collect_pads_pop (mix->collect, &mixcol->collect);
-
- GST_LOG_OBJECT (pad, "unreffing buffer");
- if (buffer)
- gst_buffer_unref (buffer);
- else
- GST_WARNING_OBJECT (pad,
- "Buffer was removed by GstCollectPads in the meantime");
-
- gst_buffer_unref (mixcol->buffer);
- mixcol->buffer = NULL;
- }
- }
- }
-}
-
-static GstFlowReturn
-gst_videomixer_collected (GstCollectPads * pads, GstVideoMixer * mix)
-{
- GstFlowReturn ret = GST_FLOW_OK;
- GstBuffer *outbuf = NULL;
- size_t outsize = 0;
- gboolean eos = FALSE;
- GstClockTime timestamp = GST_CLOCK_TIME_NONE;
- GstClockTime duration = GST_CLOCK_TIME_NONE;
-
- g_return_val_if_fail (GST_IS_VIDEO_MIXER (mix), GST_FLOW_ERROR);
-
- /* This must be set, otherwise we have no caps */
- if (G_UNLIKELY (mix->in_width == 0))
- return GST_FLOW_NOT_NEGOTIATED;
-
- if (g_atomic_int_compare_and_exchange (&mix->flush_stop_pending, TRUE, FALSE)) {
- GST_DEBUG_OBJECT (mix, "pending flush stop");
- gst_pad_push_event (mix->srcpad, gst_event_new_flush_stop ());
- }
-
- GST_LOG_OBJECT (mix, "all pads are collected");
- GST_VIDEO_MIXER_STATE_LOCK (mix);
-
- eos = gst_videomixer_fill_queues (mix);
-
- if (eos) {
- /* Push EOS downstream */
- GST_LOG_OBJECT (mix, "all our sinkpads are EOS, pushing downstream");
- gst_pad_push_event (mix->srcpad, gst_event_new_eos ());
- ret = GST_FLOW_WRONG_STATE;
- goto error;
- }
-
- /* If geometry has changed we need to set new caps on the buffer */
- if (mix->in_width != mix->out_width || mix->in_height != mix->out_height
- || mix->setcaps) {
- GstCaps *newcaps = NULL;
-
- newcaps = gst_caps_make_writable
- (gst_pad_get_negotiated_caps (GST_PAD (mix->master)));
- gst_caps_set_simple (newcaps,
- "width", G_TYPE_INT, mix->in_width,
- "height", G_TYPE_INT, mix->in_height,
- "pixel-aspect-ratio", GST_TYPE_FRACTION, mix->par_n, mix->par_d, NULL);
-
- mix->out_width = mix->in_width;
- mix->out_height = mix->in_height;
- mix->setcaps = FALSE;
-
- /* Calculating out buffer size from input size */
- gst_pad_set_caps (mix->srcpad, newcaps);
- gst_caps_unref (newcaps);
- }
-
- /* Get timestamp & duration */
- if (mix->master->mixcol->buffer != NULL) {
- GstClockTime in_ts;
- GstSegment *seg;
- GstVideoMixerCollect *mixcol = mix->master->mixcol;
-
- seg = &mixcol->collect.segment;
- in_ts = GST_BUFFER_TIMESTAMP (mixcol->buffer);
-
- timestamp = gst_segment_to_running_time (seg, GST_FORMAT_TIME, in_ts);
- duration = GST_BUFFER_DURATION (mixcol->buffer);
-
- mix->last_ts = timestamp;
- mix->last_duration = duration;
- } else {
- timestamp = mix->last_ts;
- duration = mix->last_duration;
- }
-
- if (GST_CLOCK_TIME_IS_VALID (duration))
- mix->last_ts += duration;
-
- if (!gst_videomixer_do_qos (mix, timestamp)) {
- gst_videomixer_update_queues (mix);
- GST_VIDEO_MIXER_STATE_UNLOCK (mix);
- ret = GST_FLOW_OK;
- goto beach;
- }
-
- /* allocate an output buffer */
- outsize =
- gst_video_format_get_size (mix->fmt, mix->out_width, mix->out_height);
- ret =
- gst_pad_alloc_buffer_and_set_caps (mix->srcpad, GST_BUFFER_OFFSET_NONE,
- outsize, GST_PAD_CAPS (mix->srcpad), &outbuf);
-
- /* This must be set at this point, otherwise we have no src caps */
- g_assert (mix->blend != NULL);
-
- if (ret != GST_FLOW_OK) {
- goto error;
- }
-
- GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
- GST_BUFFER_DURATION (outbuf) = duration;
-
- switch (mix->background) {
- case VIDEO_MIXER_BACKGROUND_CHECKER:
- mix->fill_checker (GST_BUFFER_DATA (outbuf), mix->out_width,
- mix->out_height);
- break;
- case VIDEO_MIXER_BACKGROUND_BLACK:
- mix->fill_color (GST_BUFFER_DATA (outbuf), mix->out_width,
- mix->out_height, 16, 128, 128);
- break;
- case VIDEO_MIXER_BACKGROUND_WHITE:
- mix->fill_color (GST_BUFFER_DATA (outbuf), mix->out_width,
- mix->out_height, 240, 128, 128);
- break;
- case VIDEO_MIXER_BACKGROUND_TRANSPARENT:
- orc_memset (GST_BUFFER_DATA (outbuf), 0,
- gst_video_format_get_row_stride (mix->fmt, 0,
- mix->out_width) * mix->out_height);
- break;
- }
-
- gst_videomixer_blend_buffers (mix, outbuf);
-
- gst_videomixer_update_queues (mix);
- GST_VIDEO_MIXER_STATE_UNLOCK (mix);
-
- ret = gst_pad_push (mix->srcpad, outbuf);
-
-beach:
- return ret;
-
- /* ERRORS */
-error:
- {
- if (outbuf)
- gst_buffer_unref (outbuf);
-
- GST_VIDEO_MIXER_STATE_UNLOCK (mix);
- goto beach;
- }
-}
-
-static gboolean
-forward_event_func (GstPad * pad, GValue * ret, GstEvent * event)
-{
- gst_event_ref (event);
- GST_LOG_OBJECT (pad, "About to send event %s", GST_EVENT_TYPE_NAME (event));
- if (!gst_pad_push_event (pad, event)) {
- g_value_set_boolean (ret, FALSE);
- GST_WARNING_OBJECT (pad, "Sending event %p (%s) failed.",
- event, GST_EVENT_TYPE_NAME (event));
- } else {
- GST_LOG_OBJECT (pad, "Sent event %p (%s).",
- event, GST_EVENT_TYPE_NAME (event));
- }
- gst_object_unref (pad);
- return TRUE;
-}
-
-/* forwards the event to all sinkpads, takes ownership of the
- * event
- *
- * Returns: TRUE if the event could be forwarded on all
- * sinkpads.
- */
-static gboolean
-forward_event (GstVideoMixer * mix, GstEvent * event)
-{
- GstIterator *it;
- GValue vret = { 0 };
-
- GST_LOG_OBJECT (mix, "Forwarding event %p (%s)", event,
- GST_EVENT_TYPE_NAME (event));
-
- g_value_init (&vret, G_TYPE_BOOLEAN);
- g_value_set_boolean (&vret, TRUE);
- it = gst_element_iterate_sink_pads (GST_ELEMENT_CAST (mix));
- gst_iterator_fold (it, (GstIteratorFoldFunction) forward_event_func, &vret,
- event);
- gst_iterator_free (it);
- gst_event_unref (event);
-
- return g_value_get_boolean (&vret);
-}
-
-static gboolean
-gst_videomixer_src_event (GstPad * pad, GstEvent * event)
-{
- GstVideoMixer *mix = GST_VIDEO_MIXER (gst_pad_get_parent (pad));
- gboolean result;
-
- switch (GST_EVENT_TYPE (event)) {
- case GST_EVENT_QOS:{
- GstClockTimeDiff diff;
- GstClockTime timestamp;
- gdouble proportion;
-
- gst_event_parse_qos (event, &proportion, &diff, ×tamp);
-
- gst_videomixer_update_qos (mix, proportion, diff, timestamp);
- gst_event_unref (event);
-
- /* TODO: The QoS event should be transformed and send upstream */
- result = TRUE;
- break;
- }
- case GST_EVENT_SEEK:
- {
- GstSeekFlags flags;
- GstSeekType curtype;
- gint64 cur;
-
- /* parse the seek parameters */
- gst_event_parse_seek (event, NULL, NULL, &flags, &curtype,
- &cur, NULL, NULL);
-
- /* check if we are flushing */
- if (flags & GST_SEEK_FLAG_FLUSH) {
- /* make sure we accept nothing anymore and return WRONG_STATE */
- gst_collect_pads_set_flushing (mix->collect, TRUE);
-
- /* flushing seek, start flush downstream, the flush will be done
- * when all pads received a FLUSH_STOP. */
- gst_pad_push_event (mix->srcpad, gst_event_new_flush_start ());
- }
-
- /* now wait for the collected to be finished and mark a new
- * segment */
- GST_OBJECT_LOCK (mix->collect);
- if (curtype == GST_SEEK_TYPE_SET)
- mix->segment_position = cur;
- else
- mix->segment_position = 0;
- mix->sendseg = TRUE;
-
- if (flags & GST_SEEK_FLAG_FLUSH) {
- gst_collect_pads_set_flushing (mix->collect, FALSE);
-
- /* we can't send FLUSH_STOP here since upstream could start pushing data
- * after we unlock mix->collect.
- * We set flush_stop_pending to TRUE instead and send FLUSH_STOP after
- * forwarding the seek upstream or from gst_videomixer_collected,
- * whichever happens first.
- */
- mix->flush_stop_pending = TRUE;
- }
-
- GST_OBJECT_UNLOCK (mix->collect);
- gst_videomixer_reset_qos (mix);
-
- result = forward_event (mix, event);
-
- if (g_atomic_int_compare_and_exchange (&mix->flush_stop_pending,
- TRUE, FALSE)) {
- GST_DEBUG_OBJECT (mix, "pending flush stop");
- gst_pad_push_event (mix->srcpad, gst_event_new_flush_stop ());
- }
-
- break;
- }
- case GST_EVENT_NAVIGATION:
- /* navigation is rather pointless. */
- result = FALSE;
- break;
- default:
- /* just forward the rest for now */
- result = forward_event (mix, event);
- break;
- }
- gst_object_unref (mix);
-
- return result;
-}
-
-static gboolean
-gst_videomixer_sink_event (GstPad * pad, GstEvent * event)
-{
- GstVideoMixerPad *vpad = GST_VIDEO_MIXER_PAD (pad);
- GstVideoMixer *videomixer = GST_VIDEO_MIXER (gst_pad_get_parent (pad));
- gboolean ret;
-
- GST_DEBUG_OBJECT (pad, "Got %s event on pad %s:%s",
- GST_EVENT_TYPE_NAME (event), GST_DEBUG_PAD_NAME (pad));
-
- switch (GST_EVENT_TYPE (event)) {
- case GST_EVENT_FLUSH_STOP:
- /* mark a pending new segment. This event is synchronized
- * with the streaming thread so we can safely update the
- * variable without races. It's somewhat weird because we
- * assume the collectpads forwarded the FLUSH_STOP past us
- * and downstream (using our source pad, the bastard!).
- */
- videomixer->sendseg = TRUE;
- videomixer->flush_stop_pending = FALSE;
- gst_videomixer_reset_qos (videomixer);
-
- /* Reset pad state after FLUSH_STOP */
- if (vpad->mixcol->buffer)
- gst_buffer_unref (vpad->mixcol->buffer);
- vpad->mixcol->buffer = NULL;
- vpad->queued = 0;
- break;
- case GST_EVENT_NEWSEGMENT:
- if (!videomixer->master || vpad == videomixer->master) {
- videomixer->sendseg = TRUE;
- gst_videomixer_reset_qos (videomixer);
- }
- break;
- default:
- break;
- }
-
- /* now GstCollectPads can take care of the rest, e.g. EOS */
- ret = videomixer->collect_event (pad, event);
-
- gst_object_unref (videomixer);
- return ret;
-}
-
-
-static void
-gst_videomixer_get_property (GObject * object,
- guint prop_id, GValue * value, GParamSpec * pspec)
-{
- GstVideoMixer *mix = GST_VIDEO_MIXER (object);
-
- switch (prop_id) {
- case PROP_BACKGROUND:
- g_value_set_enum (value, mix->background);
- break;
- default:
- G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
- break;
- }
-}
-
-static void
-gst_videomixer_set_property (GObject * object,
- guint prop_id, const GValue * value, GParamSpec * pspec)
-{
- GstVideoMixer *mix = GST_VIDEO_MIXER (object);
-
- switch (prop_id) {
- case PROP_BACKGROUND:
- mix->background = g_value_get_enum (value);
- break;
- default:
- G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
- break;
- }
-}
-
-static GstStateChangeReturn
-gst_videomixer_change_state (GstElement * element, GstStateChange transition)
-{
- GstVideoMixer *mix;
- GstStateChangeReturn ret;
-
- g_return_val_if_fail (GST_IS_VIDEO_MIXER (element), GST_STATE_CHANGE_FAILURE);
-
- mix = GST_VIDEO_MIXER (element);
-
- switch (transition) {
- case GST_STATE_CHANGE_READY_TO_PAUSED:
- GST_LOG_OBJECT (mix, "starting collectpads");
- gst_collect_pads_start (mix->collect);
- break;
- case GST_STATE_CHANGE_PAUSED_TO_READY:
- GST_LOG_OBJECT (mix, "stopping collectpads");
- gst_collect_pads_stop (mix->collect);
- break;
- default:
- break;
- }
-
- ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
-
- switch (transition) {
- case GST_STATE_CHANGE_PAUSED_TO_READY:
- gst_videomixer_reset (mix);
- break;
- default:
- break;
- }
-
- return ret;
-}
-
-static gboolean
-plugin_init (GstPlugin * plugin)
-{
- GST_DEBUG_CATEGORY_INIT (gst_videomixer_debug, "videomixer", 0,
- "video mixer");
-
- gst_video_mixer_init_blend ();
-
- return gst_element_register (plugin, "videomixer", GST_RANK_PRIMARY,
- GST_TYPE_VIDEO_MIXER) && gst_videomixer2_register (plugin);
-}
-
-GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
- GST_VERSION_MINOR,
- "videomixer",
- "Video mixer", plugin_init, VERSION, GST_LICENSE, GST_PACKAGE_NAME,
- GST_PACKAGE_ORIGIN)
+++ /dev/null
-/* Generic video mixer plugin
- * Copyright (C) 2008 Wim Taymans <wim@fluendo.com>
- *
- * This library is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Library General Public
- * License as published by the Free Software Foundation; either
- * version 2 of the License, or (at your option) any later version.
- *
- * This library is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Library General Public License for more details.
- *
- * You should have received a copy of the GNU Library General Public
- * License along with this library; if not, write to the
- * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
- * Boston, MA 02111-1307, USA.
- */
-
-#ifndef __GST_VIDEO_MIXER_H__
-#define __GST_VIDEO_MIXER_H__
-
-#include <gst/gst.h>
-#include <gst/video/video.h>
-#include "videomixerpad.h"
-#include "blend.h"
-
-G_BEGIN_DECLS
-
-#define GST_TYPE_VIDEO_MIXER (gst_videomixer_get_type())
-#define GST_VIDEO_MIXER(obj) \
- (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_VIDEO_MIXER, GstVideoMixer))
-#define GST_VIDEO_MIXER_CLASS(klass) \
- (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_VIDEO_MIXER, GstVideoMixerClass))
-#define GST_IS_VIDEO_MIXER(obj) \
- (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_VIDEO_MIXER))
-#define GST_IS_VIDEO_MIXER_CLASS(klass) \
- (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_VIDEO_MIXER))
-
-typedef struct _GstVideoMixer GstVideoMixer;
-typedef struct _GstVideoMixerClass GstVideoMixerClass;
-
-/**
- * GstVideoMixerBackground:
- * @VIDEO_MIXER_BACKGROUND_CHECKER: checker pattern background
- * @VIDEO_MIXER_BACKGROUND_BLACK: solid color black background
- * @VIDEO_MIXER_BACKGROUND_WHITE: solid color white background
- * @VIDEO_MIXER_BACKGROUND_TRANSPARENT: background is left transparent and layers are composited using "A OVER B" composition rules. This is only applicable to AYUV and ARGB (and variants) as it preserves the alpha channel and allows for further mixing.
- *
- * The different backgrounds videomixer can blend over.
- */
-typedef enum
-{
- VIDEO_MIXER_BACKGROUND_CHECKER,
- VIDEO_MIXER_BACKGROUND_BLACK,
- VIDEO_MIXER_BACKGROUND_WHITE,
- VIDEO_MIXER_BACKGROUND_TRANSPARENT,
-}
-GstVideoMixerBackground;
-
-/**
- * GstVideoMixer:
- *
- * The opaque #GstVideoMixer structure.
- */
-struct _GstVideoMixer
-{
- GstElement element;
-
- /* pad */
- GstPad *srcpad;
-
- /* Lock to prevent the state to change while blending */
- GMutex *state_lock;
- /* Sink pads using Collect Pads from core's base library */
- GstCollectPads *collect;
- /* sinkpads, a GSList of GstVideoMixerPads */
- GSList *sinkpads;
-
- gint numpads;
-
- GstClockTime last_ts;
- GstClockTime last_duration;
-
- /* the master pad */
- GstVideoMixerPad *master;
-
- GstVideoFormat fmt;
-
- gint in_width, in_height;
- gint out_width, out_height;
- gboolean setcaps;
- gboolean sendseg;
-
- GstVideoMixerBackground background;
-
- gint fps_n;
- gint fps_d;
-
- gint par_n;
- gint par_d;
-
- /* Next available sinkpad index */
- gint next_sinkpad;
-
- /* sink event handling */
- GstPadEventFunction collect_event;
- guint64 segment_position;
-
- /* Current downstream segment */
- GstSegment segment;
-
- /* QoS stuff */
- gdouble proportion;
- GstClockTime earliest_time;
-
- BlendFunction blend, overlay;
- FillCheckerFunction fill_checker;
- FillColorFunction fill_color;
-
- gboolean flush_stop_pending;
-};
-
-struct _GstVideoMixerClass
-{
- GstElementClass parent_class;
-};
-
-GType gst_video_mixer_get_type (void);
-
-G_END_DECLS
-#endif /* __GST_VIDEO_MIXER_H__ */
*/
/**
- * SECTION:element-videomixer2
+ * SECTION:element-videomixer
*
* Videomixer2 can accept AYUV, ARGB and BGRA video streams. For each of the requested
* sink pads it will compare the incoming geometry and framerate to define the
* Individual parameters for each input stream can be configured on the
* #GstVideoMixer2Pad.
*
- * At this stage, videomixer2 is considered UNSTABLE. The API provided in the
- * properties may yet change in the near future. When videomixer2 is stable,
- * it will replace #videomixer
- *
* <refsect2>
* <title>Sample pipelines</title>
* |[
- * gst-launch-0.10 \
+ * gst-launch-0.11 \
* videotestsrc pattern=1 ! \
- * video/x-raw-yuv,format=\(fourcc\)AYUV,framerate=\(fraction\)10/1,width=100,height=100 ! \
+ * video/x-raw,format=AYUV,framerate=\(fraction\)10/1,width=100,height=100 ! \
* videobox border-alpha=0 top=-70 bottom=-70 right=-220 ! \
- * videomixer2 name=mix sink_0::alpha=0.7 sink_1::alpha=0.5 ! \
- * ffmpegcolorspace ! xvimagesink \
+ * videomixer name=mix sink_0::alpha=0.7 sink_1::alpha=0.5 ! \
+ * videoconvert ! xvimagesink \
* videotestsrc ! \
- * video/x-raw-yuv,format=\(fourcc\)AYUV,framerate=\(fraction\)5/1,width=320,height=240 ! mix.
+ * video/x-raw,format=AYUV,framerate=\(fraction\)5/1,width=320,height=240 ! mix.
* ]| A pipeline to demonstrate videomixer used together with videobox.
* This should show a 320x240 pixels video test source with some transparency
* showing the background checker pattern. Another video test source with just
* video test source behind and the checker pattern under it. Note that the
* framerate of the output video is 10 frames per second.
* |[
- * gst-launch videotestsrc pattern=1 ! \
- * video/x-raw-rgb, framerate=\(fraction\)10/1, width=100, height=100 ! \
- * videomixer2 name=mix ! ffmpegcolorspace ! ximagesink \
+ * gst-launch-0.11 videotestsrc pattern=1 ! \
+ * video/x-raw, framerate=\(fraction\)10/1, width=100, height=100 ! \
+ * videomixer name=mix ! videoconvert ! ximagesink \
* videotestsrc ! \
- * video/x-raw-rgb, framerate=\(fraction\)5/1, width=320, height=240 ! mix.
+ * video/x-raw, framerate=\(fraction\)5/1, width=320, height=240 ! mix.
* ]| A pipeline to demostrate bgra mixing. (This does not demonstrate alpha blending).
* |[
- * gst-launch videotestsrc pattern=1 ! \
- * video/x-raw-yuv,format =\(fourcc\)I420, framerate=\(fraction\)10/1, width=100, height=100 ! \
- * videomixer2 name=mix ! ffmpegcolorspace ! ximagesink \
+ * gst-launch-0.11 videotestsrc pattern=1 ! \
+ * video/x-raw,format =I420, framerate=\(fraction\)10/1, width=100, height=100 ! \
+ * videomixer name=mix ! videoconvert ! ximagesink \
* videotestsrc ! \
- * video/x-raw-yuv,format=\(fourcc\)I420, framerate=\(fraction\)5/1, width=320, height=240 ! mix.
+ * video/x-raw,format=I420, framerate=\(fraction\)5/1, width=320, height=240 ! mix.
* ]| A pipeline to test I420
* |[
- * gst-launch videomixer2 name=mixer sink_1::alpha=0.5 sink_1::xpos=50 sink_1::ypos=50 ! \
- * ffmpegcolorspace ! ximagesink \
+ * gst-launch-0.11 videomixer name=mixer sink_1::alpha=0.5 sink_1::xpos=50 sink_1::ypos=50 ! \
+ * videoconvert ! ximagesink \
* videotestsrc pattern=snow timestamp-offset=3000000000 ! \
- * "video/x-raw-yuv,format=(fourcc)AYUV,width=640,height=480,framerate=(fraction)30/1" ! \
+ * "video/x-raw,format=AYUV,width=640,height=480,framerate=(fraction)30/1" ! \
* timeoverlay ! queue2 ! mixer. \
* videotestsrc pattern=smpte ! \
- * "video/x-raw-yuv,format=(fourcc)AYUV,width=800,height=600,framerate=(fraction)10/1" ! \
+ * "video/x-raw,format=AYUV,width=800,height=600,framerate=(fraction)10/1" ! \
* timeoverlay ! queue2 ! mixer.
* ]| A pipeline to demonstrate synchronized mixing (the second stream starts after 3 seconds)
* </refsect2>
#include "config.h"
#endif
-/* FIXME 0.11: suppress warnings for deprecated API such as GStaticRecMutex
- * with newer GLib versions (>= 2.31.0) */
-#define GLIB_DISABLE_DEPRECATION_WARNINGS
-
#include <string.h>
#include "videomixer2.h"
#include "videomixer2pad.h"
-#include <gst/controller/gstcontroller.h>
-
-#include "gst/glib-compat-private.h"
-
#ifdef DISABLE_ORC
#define orc_memset memset
#else
#define GST_CAT_DEFAULT gst_videomixer2_debug
#define GST_VIDEO_MIXER2_GET_LOCK(mix) \
- (GST_VIDEO_MIXER2(mix)->lock)
+ (&GST_VIDEO_MIXER2(mix)->lock)
#define GST_VIDEO_MIXER2_LOCK(mix) \
(g_mutex_lock(GST_VIDEO_MIXER2_GET_LOCK (mix)))
#define GST_VIDEO_MIXER2_UNLOCK(mix) \
(g_mutex_unlock(GST_VIDEO_MIXER2_GET_LOCK (mix)))
+#define FORMATS " { AYUV, BGRA, ARGB, RGBA, ABGR, Y444, Y42B, YUY2, UYVY, "\
+ " YVYU, I420, YV12, Y41B, RGB, BGR, xRGB, xBGR, RGBx, BGRx } "
+
static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("AYUV") ";" GST_VIDEO_CAPS_BGRA ";"
- GST_VIDEO_CAPS_ARGB ";" GST_VIDEO_CAPS_RGBA ";" GST_VIDEO_CAPS_ABGR ";"
- GST_VIDEO_CAPS_YUV ("Y444") ";" GST_VIDEO_CAPS_YUV ("Y42B") ";"
- GST_VIDEO_CAPS_YUV ("YUY2") ";" GST_VIDEO_CAPS_YUV ("UYVY") ";"
- GST_VIDEO_CAPS_YUV ("YVYU") ";"
- GST_VIDEO_CAPS_YUV ("I420") ";" GST_VIDEO_CAPS_YUV ("YV12") ";"
- GST_VIDEO_CAPS_YUV ("Y41B") ";" GST_VIDEO_CAPS_RGB ";"
- GST_VIDEO_CAPS_BGR ";" GST_VIDEO_CAPS_xRGB ";" GST_VIDEO_CAPS_xBGR ";"
- GST_VIDEO_CAPS_RGBx ";" GST_VIDEO_CAPS_BGRx)
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE (FORMATS))
);
static GstStaticPadTemplate sink_factory = GST_STATIC_PAD_TEMPLATE ("sink_%d",
GST_PAD_SINK,
GST_PAD_REQUEST,
- GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("AYUV") ";" GST_VIDEO_CAPS_BGRA ";"
- GST_VIDEO_CAPS_ARGB ";" GST_VIDEO_CAPS_RGBA ";" GST_VIDEO_CAPS_ABGR ";"
- GST_VIDEO_CAPS_YUV ("Y444") ";" GST_VIDEO_CAPS_YUV ("Y42B") ";"
- GST_VIDEO_CAPS_YUV ("YUY2") ";" GST_VIDEO_CAPS_YUV ("UYVY") ";"
- GST_VIDEO_CAPS_YUV ("YVYU") ";"
- GST_VIDEO_CAPS_YUV ("I420") ";" GST_VIDEO_CAPS_YUV ("YV12") ";"
- GST_VIDEO_CAPS_YUV ("Y41B") ";" GST_VIDEO_CAPS_RGB ";"
- GST_VIDEO_CAPS_BGR ";" GST_VIDEO_CAPS_xRGB ";" GST_VIDEO_CAPS_xBGR ";"
- GST_VIDEO_CAPS_RGBx ";" GST_VIDEO_CAPS_BGRx)
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE (FORMATS))
);
static void gst_videomixer2_child_proxy_init (gpointer g_iface,
static void gst_videomixer2_release_pad (GstElement * element, GstPad * pad);
static void gst_videomixer2_reset_qos (GstVideoMixer2 * mix);
-static void
-_do_init (GType object_type)
-{
- static const GInterfaceInfo child_proxy_info = {
- (GInterfaceInitFunc) gst_videomixer2_child_proxy_init,
- NULL,
- NULL
- };
-
- g_type_add_interface_static (object_type, GST_TYPE_CHILD_PROXY,
- &child_proxy_info);
-}
-
struct _GstVideoMixer2Collect
{
GstCollectData2 collect; /* we extend the CollectData */
gst_buffer_replace (&cdata->buffer, NULL);
}
+static gboolean gst_videomixer2_src_setcaps (GstPad * pad, GstVideoMixer2 * mix,
+ GstCaps * caps);
+
static gboolean
gst_videomixer2_update_src_caps (GstVideoMixer2 * mix)
{
for (l = mix->sinkpads; l; l = l->next) {
GstVideoMixer2Pad *mpad = l->data;
gint this_width, this_height;
+ gint fps_n, fps_d;
+ gint width, height;
- if (mpad->fps_n == 0 || mpad->fps_d == 0 ||
- mpad->width == 0 || mpad->height == 0)
+ fps_n = GST_VIDEO_INFO_FPS_N (&mpad->info);
+ fps_d = GST_VIDEO_INFO_FPS_D (&mpad->info);
+ width = GST_VIDEO_INFO_WIDTH (&mpad->info);
+ height = GST_VIDEO_INFO_HEIGHT (&mpad->info);
+
+ if (fps_n == 0 || fps_d == 0 || width == 0 || height == 0)
continue;
- this_width = mpad->width + MAX (mpad->xpos, 0);
- this_height = mpad->height + MAX (mpad->ypos, 0);
+ this_width = width + MAX (mpad->xpos, 0);
+ this_height = height + MAX (mpad->ypos, 0);
if (best_width < this_width)
best_width = this_width;
if (best_height < this_height)
best_height = this_height;
- if (mpad->fps_d == 0)
+ if (fps_d == 0)
cur_fps = 0.0;
else
- gst_util_fraction_to_double (mpad->fps_n, mpad->fps_d, &cur_fps);
+ gst_util_fraction_to_double (fps_n, fps_d, &cur_fps);
+
if (best_fps < cur_fps) {
best_fps = cur_fps;
- best_fps_n = mpad->fps_n;
- best_fps_d = mpad->fps_d;
+ best_fps_n = fps_n;
+ best_fps_d = fps_d;
}
}
if (best_width > 0 && best_height > 0 && best_fps > 0) {
GstCaps *caps, *peercaps;
GstStructure *s;
+ GstVideoInfo info;
- if (mix->fps_n != best_fps_n || mix->fps_d != best_fps_d) {
- if (mix->segment.last_stop != -1) {
- mix->ts_offset = mix->segment.last_stop - mix->segment.start;
+ if (GST_VIDEO_INFO_FPS_N (&mix->info) != best_fps_n ||
+ GST_VIDEO_INFO_FPS_D (&mix->info) != best_fps_d) {
+ if (mix->segment.position != -1) {
+ mix->ts_offset = mix->segment.position - mix->segment.start;
mix->nframes = 0;
}
}
+ gst_video_info_set_format (&info, GST_VIDEO_INFO_FORMAT (&mix->info),
+ best_width, best_height);
+ info.fps_n = best_fps_n;
+ info.fps_d = best_fps_d;
+ info.par_n = GST_VIDEO_INFO_PAR_N (&mix->info);
+ info.par_d = GST_VIDEO_INFO_PAR_D (&mix->info);
- caps = gst_video_format_new_caps (mix->format,
- best_width, best_height, best_fps_n, best_fps_d,
- mix->par_n, mix->par_d);
+ caps = gst_video_info_to_caps (&info);
- peercaps = gst_pad_peer_get_caps (mix->srcpad);
+ peercaps = gst_pad_peer_query_caps (mix->srcpad, NULL);
if (peercaps) {
GstCaps *tmp;
gst_caps_unref (peercaps);
caps = tmp;
if (gst_caps_is_empty (caps)) {
+ GST_DEBUG_OBJECT (mix, "empty caps");
ret = FALSE;
GST_VIDEO_MIXER2_UNLOCK (mix);
goto done;
gst_structure_fixate_field_nearest_fraction (s, "framerate", best_fps_n,
best_fps_d);
- gst_structure_get_int (s, "width", &best_width);
- gst_structure_get_int (s, "height", &best_height);
- gst_structure_get_fraction (s, "fraction", &best_fps_n, &best_fps_d);
+ gst_structure_get_int (s, "width", &info.width);
+ gst_structure_get_int (s, "height", &info.height);
+ gst_structure_get_fraction (s, "fraction", &info.fps_n, &info.fps_d);
}
- mix->fps_n = best_fps_n;
- mix->fps_d = best_fps_d;
- mix->width = best_width;
- mix->height = best_height;
+ caps = gst_video_info_to_caps (&info);
GST_VIDEO_MIXER2_UNLOCK (mix);
- ret = gst_pad_set_caps (mix->srcpad, caps);
+ ret = gst_videomixer2_src_setcaps (mix->srcpad, mix, caps);
gst_caps_unref (caps);
} else {
GST_VIDEO_MIXER2_UNLOCK (mix);
static gboolean
-gst_videomixer2_pad_sink_setcaps (GstPad * pad, GstCaps * caps)
+gst_videomixer2_pad_sink_setcaps (GstPad * pad, GstObject * parent,
+ GstCaps * caps)
{
GstVideoMixer2 *mix;
GstVideoMixer2Pad *mixpad;
- GstVideoFormat fmt;
- gint width, height;
- gint fps_n = 0, fps_d = 0;
- gint par_n = 1, par_d = 1;
+ GstVideoInfo info;
gboolean ret = FALSE;
- GstStructure *s;
GST_INFO_OBJECT (pad, "Setting caps %" GST_PTR_FORMAT, caps);
- mix = GST_VIDEO_MIXER2 (gst_pad_get_parent (pad));
+ mix = GST_VIDEO_MIXER2 (parent);
mixpad = GST_VIDEO_MIXER2_PAD (pad);
- if (!gst_video_format_parse_caps (caps, &fmt, &width, &height) ||
- !gst_video_parse_caps_pixel_aspect_ratio (caps, &par_n, &par_d)) {
- GST_ERROR_OBJECT (pad, "Failed to parse caps");
- goto beach;
- }
-
- s = gst_caps_get_structure (caps, 0);
- if (gst_structure_has_field (s, "framerate")
- && !gst_video_parse_caps_framerate (caps, &fps_n, &fps_d)) {
+ if (!gst_video_info_from_caps (&info, caps)) {
GST_ERROR_OBJECT (pad, "Failed to parse caps");
goto beach;
}
GST_VIDEO_MIXER2_LOCK (mix);
- if (mix->format != GST_VIDEO_FORMAT_UNKNOWN) {
- if (mix->format != fmt || mix->par_n != par_n || mix->par_d != par_d) {
+ if (GST_VIDEO_INFO_FORMAT (&mix->info) != GST_VIDEO_FORMAT_UNKNOWN) {
+ if (GST_VIDEO_INFO_FORMAT (&mix->info) != GST_VIDEO_INFO_FORMAT (&info) ||
+ GST_VIDEO_INFO_PAR_N (&mix->info) != GST_VIDEO_INFO_PAR_N (&info) ||
+ GST_VIDEO_INFO_PAR_D (&mix->info) != GST_VIDEO_INFO_PAR_D (&info)) {
GST_ERROR_OBJECT (pad, "Caps not compatible with other pads' caps");
GST_VIDEO_MIXER2_UNLOCK (mix);
goto beach;
}
}
- mix->format = fmt;
- mix->par_n = par_n;
- mix->par_d = par_d;
- mixpad->fps_n = fps_n;
- mixpad->fps_d = fps_d;
- mixpad->width = width;
- mixpad->height = height;
+ mix->info = info;
+ mixpad->info = info;
GST_VIDEO_MIXER2_UNLOCK (mix);
ret = gst_videomixer2_update_src_caps (mix);
beach:
- gst_object_unref (mix);
-
return ret;
}
static GstCaps *
-gst_videomixer2_pad_sink_getcaps (GstPad * pad)
+gst_videomixer2_pad_sink_getcaps (GstPad * pad, GstObject * parent,
+ GstCaps * filter)
{
GstVideoMixer2 *mix;
GstCaps *srccaps;
GstStructure *s;
gint i, n;
- mix = GST_VIDEO_MIXER2 (gst_pad_get_parent (pad));
+ mix = GST_VIDEO_MIXER2 (parent);
+
+ srccaps = gst_pad_get_current_caps (GST_PAD (mix->srcpad));
+ if (srccaps == NULL)
+ srccaps = gst_pad_get_pad_template_caps (GST_PAD (mix->srcpad));
- srccaps = gst_pad_get_fixed_caps_func (GST_PAD (mix->srcpad));
srccaps = gst_caps_make_writable (srccaps);
n = gst_caps_get_size (srccaps);
}
static gboolean
-gst_videomixer2_pad_sink_acceptcaps (GstPad * pad, GstCaps * caps)
+gst_videomixer2_pad_sink_acceptcaps (GstPad * pad, GstObject * parent,
+ GstCaps * caps)
{
gboolean ret;
GstVideoMixer2 *mix;
gint i, n;
GstStructure *s;
- mix = GST_VIDEO_MIXER2 (gst_pad_get_parent (pad));
+ mix = GST_VIDEO_MIXER2 (parent);
GST_DEBUG_OBJECT (pad, "%" GST_PTR_FORMAT, caps);
- accepted_caps = gst_pad_get_fixed_caps_func (GST_PAD (mix->srcpad));
+ accepted_caps = gst_pad_get_current_caps (GST_PAD (mix->srcpad));
+ if (accepted_caps == NULL)
+ accepted_caps = gst_pad_get_pad_template_caps (GST_PAD (mix->srcpad));
+
accepted_caps = gst_caps_make_writable (accepted_caps);
GST_LOG_OBJECT (pad, "src caps %" GST_PTR_FORMAT, accepted_caps);
GST_INFO_OBJECT (pad, "%saccepted caps %" GST_PTR_FORMAT, (ret ? "" : "not "),
caps);
GST_INFO_OBJECT (pad, "acceptable caps are %" GST_PTR_FORMAT, accepted_caps);
-
gst_caps_unref (accepted_caps);
- gst_object_unref (mix);
+ return ret;
+}
+
+static gboolean
+gst_videomixer2_pad_sink_query (GstPad * pad, GstObject * parent,
+ GstQuery * query)
+{
+ gboolean ret = FALSE;
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_CAPS:
+ {
+ GstCaps *filter, *caps;
+
+ gst_query_parse_caps (query, &filter);
+ caps = gst_videomixer2_pad_sink_getcaps (pad, parent, filter);
+ gst_query_set_caps_result (query, caps);
+ gst_caps_unref (caps);
+ ret = TRUE;
+ break;
+ }
+ case GST_QUERY_ACCEPT_CAPS:
+ {
+ GstCaps *caps;
+
+ gst_query_parse_accept_caps (query, &caps);
+ ret = gst_videomixer2_pad_sink_acceptcaps (pad, parent, caps);
+ gst_query_set_accept_caps_result (query, ret);
+ ret = TRUE;
+ break;
+ }
+ default:
+ ret = gst_pad_query_default (pad, parent, query);
+ break;
+ }
return ret;
}
gst_videomixer2_pad_init (GstVideoMixer2Pad * mixerpad)
{
/* setup some pad functions */
- gst_pad_set_setcaps_function (GST_PAD (mixerpad),
- gst_videomixer2_pad_sink_setcaps);
- gst_pad_set_acceptcaps_function (GST_PAD (mixerpad),
- GST_DEBUG_FUNCPTR (gst_videomixer2_pad_sink_acceptcaps));
- gst_pad_set_getcaps_function (GST_PAD (mixerpad),
- gst_videomixer2_pad_sink_getcaps);
+ gst_pad_set_query_function (GST_PAD (mixerpad),
+ gst_videomixer2_pad_sink_query);
mixerpad->zorder = DEFAULT_PAD_ZORDER;
mixerpad->xpos = DEFAULT_PAD_XPOS;
return video_mixer_background_type;
}
-
-GST_BOILERPLATE_FULL (GstVideoMixer2, gst_videomixer2, GstElement,
- GST_TYPE_ELEMENT, _do_init);
+#define gst_videomixer2_parent_class parent_class
+G_DEFINE_TYPE_WITH_CODE (GstVideoMixer2, gst_videomixer2, GST_TYPE_ELEMENT,
+ G_IMPLEMENT_INTERFACE (GST_TYPE_CHILD_PROXY,
+ gst_videomixer2_child_proxy_init));
static void
gst_videomixer2_update_qos (GstVideoMixer2 * mix, gdouble proportion,
if (G_UNLIKELY (diff > 0))
mix->earliest_time =
timestamp + 2 * diff + gst_util_uint64_scale_int (GST_SECOND,
- mix->fps_d, mix->fps_n);
+ GST_VIDEO_INFO_FPS_D (&mix->info), GST_VIDEO_INFO_FPS_N (&mix->info));
else
mix->earliest_time = timestamp + diff;
} else {
{
GSList *l;
- mix->format = GST_VIDEO_FORMAT_UNKNOWN;
- mix->width = mix->height = 0;
- mix->fps_n = mix->fps_d = 0;
- mix->par_n = mix->par_d = 0;
+ gst_video_info_init (&mix->info);
mix->ts_offset = 0;
mix->nframes = 0;
gst_segment_init (&mix->segment, GST_FORMAT_TIME);
- mix->segment.last_stop = -1;
+ mix->segment.position = -1;
gst_videomixer2_reset_qos (mix);
mixcol->start_time = -1;
mixcol->end_time = -1;
- p->fps_n = p->fps_d = 0;
- p->width = p->height = 0;
+ gst_video_info_init (&p->info);
}
mix->newseg_pending = TRUE;
g_assert (start_time != -1 && end_time != -1);
/* Convert to the output segment rate */
- if (mix->segment.abs_rate != 1.0) {
- start_time *= mix->segment.abs_rate;
- end_time *= mix->segment.abs_rate;
+ if (ABS (mix->segment.rate) != 1.0) {
+ start_time *= ABS (mix->segment.rate);
+ end_time *= ABS (mix->segment.rate);
}
if (end_time >= output_start_time && start_time < output_end_time) {
GstBuffer ** outbuf)
{
GSList *l;
- GstFlowReturn ret;
guint outsize;
BlendFunction composite;
+ GstVideoFrame outframe;
- outsize = gst_video_format_get_size (mix->format, mix->width, mix->height);
- ret = gst_pad_alloc_buffer_and_set_caps (mix->srcpad, GST_BUFFER_OFFSET_NONE,
- outsize, GST_PAD_CAPS (mix->srcpad), outbuf);
- if (ret != GST_FLOW_OK)
- return ret;
+ outsize = GST_VIDEO_INFO_SIZE (&mix->info);
+ *outbuf = gst_buffer_new_allocate (NULL, outsize, 15);
GST_BUFFER_TIMESTAMP (*outbuf) = output_start_time;
GST_BUFFER_DURATION (*outbuf) = output_end_time - output_start_time;
+ gst_video_frame_map (&outframe, &mix->info, *outbuf, GST_MAP_READWRITE);
+
/* default to blending */
composite = mix->blend;
switch (mix->background) {
case VIDEO_MIXER2_BACKGROUND_CHECKER:
- mix->fill_checker (GST_BUFFER_DATA (*outbuf), mix->width, mix->height);
+ mix->fill_checker (&outframe);
break;
case VIDEO_MIXER2_BACKGROUND_BLACK:
- mix->fill_color (GST_BUFFER_DATA (*outbuf), mix->width,
- mix->height, 16, 128, 128);
+ mix->fill_color (&outframe, 16, 128, 128);
break;
case VIDEO_MIXER2_BACKGROUND_WHITE:
- mix->fill_color (GST_BUFFER_DATA (*outbuf), mix->width,
- mix->height, 240, 128, 128);
+ mix->fill_color (&outframe, 240, 128, 128);
break;
case VIDEO_MIXER2_BACKGROUND_TRANSPARENT:
- orc_memset (GST_BUFFER_DATA (*outbuf), 0,
- gst_video_format_get_row_stride (mix->format, 0,
- mix->width) * mix->height);
+ gst_buffer_memset (*outbuf, 0, 0, outsize);
/* use overlay to keep background transparent */
composite = mix->overlay;
break;
GstClockTime timestamp;
gint64 stream_time;
GstSegment *seg;
+ GstVideoFrame frame;
seg = &mixcol->collect.segment;
/* sync object properties on stream time */
if (GST_CLOCK_TIME_IS_VALID (stream_time))
- gst_object_sync_values (G_OBJECT (pad), stream_time);
+ gst_object_sync_values (GST_OBJECT (pad), stream_time);
+
+ gst_video_frame_map (&frame, &pad->info, mixcol->buffer, GST_MAP_READ);
- composite (GST_BUFFER_DATA (mixcol->buffer),
- pad->xpos, pad->ypos, pad->width, pad->height, pad->alpha,
- GST_BUFFER_DATA (*outbuf), mix->width, mix->height);
+ composite (&frame, pad->xpos, pad->ypos, pad->alpha, &outframe);
+
+ gst_video_frame_unmap (&frame);
}
}
+ gst_video_frame_unmap (&outframe);
return GST_FLOW_OK;
}
gint64 jitter;
/* If we're not negotiated yet... */
- if (mix->format == GST_VIDEO_FORMAT_UNKNOWN)
+ if (GST_VIDEO_INFO_FORMAT (&mix->info) == GST_VIDEO_FORMAT_UNKNOWN)
return GST_FLOW_NOT_NEGOTIATED;
if (g_atomic_int_compare_and_exchange (&mix->flush_stop_pending, TRUE, FALSE)) {
GST_DEBUG_OBJECT (mix, "pending flush stop");
- gst_pad_push_event (mix->srcpad, gst_event_new_flush_stop ());
+ gst_pad_push_event (mix->srcpad, gst_event_new_flush_stop (TRUE));
}
GST_VIDEO_MIXER2_LOCK (mix);
if (mix->newseg_pending) {
GST_DEBUG_OBJECT (mix, "Sending NEWSEGMENT event");
- if (!gst_pad_push_event (mix->srcpad, gst_event_new_new_segment_full (FALSE,
- mix->segment.rate, mix->segment.applied_rate,
- mix->segment.format, mix->segment.start, mix->segment.stop,
- mix->segment.time))) {
+ if (!gst_pad_push_event (mix->srcpad,
+ gst_event_new_segment (&mix->segment))) {
ret = GST_FLOW_ERROR;
goto done;
}
mix->newseg_pending = FALSE;
}
- if (mix->segment.last_stop == -1)
+ if (mix->segment.position == -1)
output_start_time = mix->segment.start;
else
- output_start_time = mix->segment.last_stop;
+ output_start_time = mix->segment.position;
if (output_start_time >= mix->segment.stop) {
GST_DEBUG_OBJECT (mix, "Segment done");
gst_pad_push_event (mix->srcpad, gst_event_new_eos ());
- ret = GST_FLOW_UNEXPECTED;
+ ret = GST_FLOW_EOS;
goto done;
}
output_end_time =
mix->ts_offset + gst_util_uint64_scale (mix->nframes + 1,
- GST_SECOND * mix->fps_d, mix->fps_n);
+ GST_SECOND * GST_VIDEO_INFO_FPS_D (&mix->info),
+ GST_VIDEO_INFO_FPS_N (&mix->info));
if (mix->segment.stop != -1)
output_end_time = MIN (output_end_time, mix->segment.stop);
} else if (res == -1) {
GST_DEBUG_OBJECT (mix, "All sinkpads are EOS -- forwarding");
gst_pad_push_event (mix->srcpad, gst_event_new_eos ());
- ret = GST_FLOW_UNEXPECTED;
+ ret = GST_FLOW_EOS;
goto done;
} else if (res == -2) {
GST_ERROR_OBJECT (mix, "Error collecting buffers");
ret = GST_FLOW_OK;
}
- gst_segment_set_last_stop (&mix->segment, GST_FORMAT_TIME, output_end_time);
+ mix->segment.position = output_end_time;
mix->nframes++;
GST_VIDEO_MIXER2_UNLOCK (mix);
return ret;
}
-static GstCaps *
-gst_videomixer2_src_getcaps (GstPad * pad)
+static gboolean
+gst_videomixer2_query_caps (GstPad * pad, GstObject * parent, GstQuery * query)
{
- GstVideoMixer2 *mix = GST_VIDEO_MIXER2 (gst_pad_get_parent (pad));
- GstCaps *caps;
+ GstCaps *filter, *caps;
+ GstVideoMixer2 *mix = GST_VIDEO_MIXER2 (parent);
GstStructure *s;
gint n;
- if (mix->format != GST_VIDEO_FORMAT_UNKNOWN) {
- caps = gst_caps_copy (GST_PAD_CAPS (mix->srcpad));
+ gst_query_parse_caps (query, &filter);
+
+ if (GST_VIDEO_INFO_FORMAT (&mix->info) != GST_VIDEO_FORMAT_UNKNOWN) {
+ caps = gst_pad_get_current_caps (mix->srcpad);
} else {
- caps = gst_caps_copy (gst_pad_get_pad_template_caps (mix->srcpad));
+ caps = gst_pad_get_pad_template_caps (mix->srcpad);
}
+ caps = gst_caps_make_writable (caps);
+
n = gst_caps_get_size (caps) - 1;
for (; n >= 0; n--) {
s = gst_caps_get_structure (caps, n);
gst_structure_set (s, "width", GST_TYPE_INT_RANGE, 1, G_MAXINT,
"height", GST_TYPE_INT_RANGE, 1, G_MAXINT, NULL);
- if (mix->fps_d != 0) {
+ if (GST_VIDEO_INFO_FPS_D (&mix->info) != 0) {
gst_structure_set (s,
"framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL);
}
}
+ gst_query_set_caps_result (query, caps);
- gst_object_unref (mix);
-
- return caps;
+ return TRUE;
}
static gboolean
gst_videomixer2_query_duration (GstVideoMixer2 * mix, GstQuery * query)
{
+ GValue item = { 0 };
gint64 max;
gboolean res;
GstFormat format;
/* Take maximum of all durations */
it = gst_element_iterate_sink_pads (GST_ELEMENT_CAST (mix));
while (!done) {
- GstIteratorResult ires;
- gpointer item;
-
- ires = gst_iterator_next (it, &item);
- switch (ires) {
+ switch (gst_iterator_next (it, &item)) {
case GST_ITERATOR_DONE:
done = TRUE;
break;
case GST_ITERATOR_OK:
{
- GstPad *pad = GST_PAD_CAST (item);
+ GstPad *pad;
gint64 duration;
+ pad = g_value_get_object (&item);
+
/* ask sink peer for duration */
- res &= gst_pad_query_peer_duration (pad, &format, &duration);
+ res &= gst_pad_peer_query_duration (pad, format, &duration);
/* take max from all valid return values */
if (res) {
/* valid unknown length, stop searching */
else if (duration > max)
max = duration;
}
- gst_object_unref (pad);
+ g_value_reset (&item);
break;
}
case GST_ITERATOR_RESYNC:
break;
}
}
+ g_value_unset (&item);
gst_iterator_free (it);
if (res) {
gboolean res;
GstIterator *it;
gboolean done;
+ GValue item = { 0 };
res = TRUE;
done = FALSE;
/* Take maximum of all latency values */
it = gst_element_iterate_sink_pads (GST_ELEMENT_CAST (mix));
while (!done) {
- GstIteratorResult ires;
- gpointer item;
-
- ires = gst_iterator_next (it, &item);
- switch (ires) {
+ switch (gst_iterator_next (it, &item)) {
case GST_ITERATOR_DONE:
done = TRUE;
break;
case GST_ITERATOR_OK:
{
- GstPad *pad = GST_PAD_CAST (item);
+ GstPad *pad = g_value_get_object (&item);
GstQuery *peerquery;
GstClockTime min_cur, max_cur;
gboolean live_cur;
}
gst_query_unref (peerquery);
- gst_object_unref (pad);
+ g_value_reset (&item);
break;
}
case GST_ITERATOR_RESYNC:
break;
}
}
+ g_value_unset (&item);
gst_iterator_free (it);
if (res) {
}
static gboolean
-gst_videomixer2_src_query (GstPad * pad, GstQuery * query)
+gst_videomixer2_src_query (GstPad * pad, GstObject * parent, GstQuery * query)
{
- GstVideoMixer2 *mix = GST_VIDEO_MIXER2 (gst_pad_get_parent (pad));
+ GstVideoMixer2 *mix = GST_VIDEO_MIXER2 (parent);
gboolean res = FALSE;
switch (GST_QUERY_TYPE (query)) {
case GST_FORMAT_TIME:
gst_query_set_position (query, format,
gst_segment_to_stream_time (&mix->segment, GST_FORMAT_TIME,
- mix->segment.last_stop));
+ mix->segment.position));
res = TRUE;
break;
default:
case GST_QUERY_LATENCY:
res = gst_videomixer2_query_latency (mix, query);
break;
+ case GST_QUERY_CAPS:
+ res = gst_videomixer2_query_caps (pad, parent, query);
+ break;
default:
/* FIXME, needs a custom query handler because we have multiple
* sinkpads */
gst_query_unref (query);
break;
}
-
- gst_object_unref (mix);
return res;
}
static gboolean
-gst_videomixer2_src_event (GstPad * pad, GstEvent * event)
+gst_videomixer2_src_event (GstPad * pad, GstObject * parent, GstEvent * event)
{
- GstVideoMixer2 *mix = GST_VIDEO_MIXER2 (gst_pad_get_parent (pad));
+ GstVideoMixer2 *mix = GST_VIDEO_MIXER2 (parent);
gboolean result;
switch (GST_EVENT_TYPE (event)) {
- case GST_EVENT_QOS:{
+ case GST_EVENT_QOS:
+ {
+ GstQOSType type;
GstClockTimeDiff diff;
GstClockTime timestamp;
gdouble proportion;
- gst_event_parse_qos (event, &proportion, &diff, ×tamp);
+ gst_event_parse_qos (event, &type, &proportion, &diff, ×tamp);
gst_videomixer2_update_qos (mix, proportion, diff, timestamp);
}
/* Convert to the output segment rate */
- if (mix->segment.abs_rate != abs_rate) {
- if (mix->segment.abs_rate != 1.0 && p->mixcol->buffer) {
- p->mixcol->start_time /= mix->segment.abs_rate;
- p->mixcol->end_time /= mix->segment.abs_rate;
+ if (ABS (mix->segment.rate) != abs_rate) {
+ if (ABS (mix->segment.rate) != 1.0 && p->mixcol->buffer) {
+ p->mixcol->start_time /= ABS (mix->segment.rate);
+ p->mixcol->end_time /= ABS (mix->segment.rate);
}
if (abs_rate != 1.0 && p->mixcol->buffer) {
p->mixcol->start_time *= abs_rate;
}
GST_VIDEO_MIXER2_UNLOCK (mix);
- gst_segment_set_seek (&mix->segment, rate, fmt, flags, start_type, start,
+ gst_segment_do_seek (&mix->segment, rate, fmt, flags, start_type, start,
stop_type, stop, NULL);
- mix->segment.last_stop = -1;
+ mix->segment.position = -1;
mix->ts_offset = 0;
mix->nframes = 0;
mix->newseg_pending = TRUE;
if (g_atomic_int_compare_and_exchange (&mix->flush_stop_pending, TRUE,
FALSE)) {
GST_DEBUG_OBJECT (mix, "pending flush stop");
- gst_pad_push_event (mix->srcpad, gst_event_new_flush_stop ());
+ gst_pad_push_event (mix->srcpad, gst_event_new_flush_stop (TRUE));
}
break;
result = gst_videomixer2_push_sink_event (mix, event);
break;
}
- gst_object_unref (mix);
return result;
}
static gboolean
-gst_videomixer2_src_setcaps (GstPad * pad, GstCaps * caps)
+gst_videomixer2_src_setcaps (GstPad * pad, GstVideoMixer2 * mix, GstCaps * caps)
{
- GstVideoMixer2 *mix = GST_VIDEO_MIXER2 (gst_pad_get_parent_element (pad));
gboolean ret = FALSE;
- GstVideoFormat fmt;
- gint width, height;
- gint fps_n, fps_d;
- gint par_n, par_d;
+ GstVideoInfo info;
GST_INFO_OBJECT (pad, "set src caps: %" GST_PTR_FORMAT, caps);
mix->fill_checker = NULL;
mix->fill_color = NULL;
- if (!gst_video_format_parse_caps (caps, &fmt, &width, &height) ||
- !gst_video_parse_caps_framerate (caps, &fps_n, &fps_d) ||
- !gst_video_parse_caps_pixel_aspect_ratio (caps, &par_n, &par_d))
+ if (!gst_video_info_from_caps (&info, caps))
goto done;
GST_VIDEO_MIXER2_LOCK (mix);
- if (mix->fps_n != fps_n || mix->fps_d != fps_d) {
- if (mix->segment.last_stop != -1) {
- mix->ts_offset = mix->segment.last_stop - mix->segment.start;
+ if (GST_VIDEO_INFO_FPS_N (&mix->info) != GST_VIDEO_INFO_FPS_N (&info) ||
+ GST_VIDEO_INFO_FPS_D (&mix->info) != GST_VIDEO_INFO_FPS_D (&info)) {
+ if (mix->segment.position != -1) {
+ mix->ts_offset = mix->segment.position - mix->segment.start;
mix->nframes = 0;
}
gst_videomixer2_reset_qos (mix);
}
- mix->format = fmt;
- mix->width = width;
- mix->height = height;
- mix->fps_n = fps_n;
- mix->fps_d = fps_d;
- mix->par_n = par_n;
- mix->par_d = par_d;
+ mix->info = info;
- switch (mix->format) {
+ switch (GST_VIDEO_INFO_FORMAT (&mix->info)) {
case GST_VIDEO_FORMAT_AYUV:
mix->blend = gst_video_mixer_blend_ayuv;
mix->overlay = gst_video_mixer_overlay_ayuv;
}
GST_VIDEO_MIXER2_UNLOCK (mix);
+ ret = gst_pad_set_caps (pad, caps);
done:
- gst_object_unref (mix);
return ret;
}
end_time = GST_BUFFER_DURATION (buf);
if (end_time == -1)
- end_time = gst_util_uint64_scale_int (GST_SECOND, pad->fps_d, pad->fps_n);
+ end_time =
+ gst_util_uint64_scale_int (GST_SECOND,
+ GST_VIDEO_INFO_FPS_D (&pad->info), GST_VIDEO_INFO_FPS_N (&pad->info));
if (end_time == -1) {
*outbuf = buf;
return GST_FLOW_OK;
GST_FORMAT_TIME, end_time);
/* Convert to the output segment rate */
- if (mix->segment.abs_rate != 1.0) {
- start_time *= mix->segment.abs_rate;
- end_time *= mix->segment.abs_rate;
+ if (ABS (mix->segment.rate) != 1.0) {
+ start_time *= ABS (mix->segment.rate);
+ end_time *= ABS (mix->segment.rate);
}
if (mixcol->buffer != NULL && end_time < mixcol->end_time) {
/* return FALSE => event will be forwarded */
switch (GST_EVENT_TYPE (event)) {
- case GST_EVENT_NEWSEGMENT:{
- GstFormat fmt;
- gst_event_parse_new_segment (event, NULL, NULL, &fmt, NULL, NULL, NULL);
+ case GST_EVENT_CAPS:
+ {
+ GstCaps *caps;
+
+ gst_event_parse_caps (event, &caps);
+ ret =
+ gst_videomixer2_pad_sink_setcaps (GST_PAD (pad), GST_OBJECT (mix),
+ caps);
+ gst_event_unref (event);
+ break;
+ }
+ case GST_EVENT_SEGMENT:{
+ GstSegment seg;
+ gst_event_copy_segment (event, &seg);
- g_assert (fmt == GST_FORMAT_TIME);
- /* eat NEWSEGMENT events, collectpads2 unrefs the event */
- ret = FALSE;
+ g_assert (seg.format == GST_FORMAT_TIME);
+ /* eat SEGMENT events */
+ ret = TRUE;
+ gst_event_unref (event);
break;
}
case GST_EVENT_FLUSH_STOP:
pad->mixcol->end_time = -1;
gst_segment_init (&mix->segment, GST_FORMAT_TIME);
- mix->segment.last_stop = -1;
+ mix->segment.position = -1;
mix->ts_offset = 0;
mix->nframes = 0;
- gst_pad_push_event (mix->srcpad, event);
+ ret = gst_pad_event_default (cdata->pad, GST_OBJECT (mix), event);
+ break;
+ case GST_EVENT_EOS:
+ gst_event_unref (event);
+ ret = TRUE;
break;
default:
- gst_pad_push_event (mix->srcpad, event);
+ ret = gst_pad_event_default (cdata->pad, GST_OBJECT (mix), event);
break;
}
}
static gboolean
-forward_event_func (GstPad * pad, GValue * ret, GstEvent * event)
+forward_event_func (GValue * item, GValue * ret, GstEvent * event)
{
+ GstPad *pad = g_value_get_object (item);
gst_event_ref (event);
GST_LOG_OBJECT (pad, "About to send event %s", GST_EVENT_TYPE_NAME (event));
if (!gst_pad_push_event (pad, event)) {
GST_LOG_OBJECT (pad, "Sent event %p (%s).",
event, GST_EVENT_TYPE_NAME (event));
}
- gst_object_unref (pad);
return TRUE;
}
static GstPad *
gst_videomixer2_request_new_pad (GstElement * element,
- GstPadTemplate * templ, const gchar * req_name)
+ GstPadTemplate * templ, const gchar * req_name, const GstCaps * caps)
{
GstVideoMixer2 *mix;
GstVideoMixer2Pad *mixpad;
gst_child_proxy_child_removed (GST_OBJECT (mix), GST_OBJECT (mixpad));
mix->numpads--;
- update_caps = mix->format != GST_VIDEO_FORMAT_UNKNOWN;
+ update_caps = GST_VIDEO_INFO_FORMAT (&mix->info) != GST_VIDEO_FORMAT_UNKNOWN;
GST_VIDEO_MIXER2_UNLOCK (mix);
gst_collect_pads2_remove_pad (mix->collect, pad);
GstVideoMixer2 *mix = GST_VIDEO_MIXER2 (o);
gst_object_unref (mix->collect);
- g_mutex_free (mix->lock);
+ g_mutex_clear (&mix->lock);
G_OBJECT_CLASS (parent_class)->finalize (o);
}
/* GObject boilerplate */
static void
-gst_videomixer2_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_static_pad_template (element_class, &src_factory);
- gst_element_class_add_static_pad_template (element_class, &sink_factory);
-
- gst_element_class_set_details_simple (element_class, "Video mixer 2",
- "Filter/Editor/Video",
- "Mix multiple video streams", "Wim Taymans <wim@fluendo.com>, "
- "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
-}
-
-static void
gst_videomixer2_class_init (GstVideoMixer2Class * klass)
{
GObjectClass *gobject_class = (GObjectClass *) klass;
gstelement_class->change_state =
GST_DEBUG_FUNCPTR (gst_videomixer2_change_state);
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&src_factory));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&sink_factory));
+
+ gst_element_class_set_details_simple (gstelement_class, "Video mixer 2",
+ "Filter/Editor/Video",
+ "Mix multiple video streams", "Wim Taymans <wim@fluendo.com>, "
+ "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
+
/* Register the pad class */
g_type_class_ref (GST_TYPE_VIDEO_MIXER2_PAD);
}
static void
-gst_videomixer2_init (GstVideoMixer2 * mix, GstVideoMixer2Class * g_class)
+gst_videomixer2_init (GstVideoMixer2 * mix)
{
GstElementClass *klass = GST_ELEMENT_GET_CLASS (mix);
mix->srcpad =
gst_pad_new_from_template (gst_element_class_get_pad_template (klass,
"src"), "src");
- gst_pad_set_getcaps_function (GST_PAD (mix->srcpad),
- GST_DEBUG_FUNCPTR (gst_videomixer2_src_getcaps));
- gst_pad_set_setcaps_function (GST_PAD (mix->srcpad),
- GST_DEBUG_FUNCPTR (gst_videomixer2_src_setcaps));
gst_pad_set_query_function (GST_PAD (mix->srcpad),
GST_DEBUG_FUNCPTR (gst_videomixer2_src_query));
gst_pad_set_event_function (GST_PAD (mix->srcpad),
gst_collect_pads2_set_clip_function (mix->collect,
(GstCollectPads2ClipFunction) gst_videomixer2_sink_clip, mix);
- mix->lock = g_mutex_new ();
+ g_mutex_init (&mix->lock);
/* initialize variables */
gst_videomixer2_reset (mix);
}
/* Element registration */
-gboolean
-gst_videomixer2_register (GstPlugin * plugin)
+static gboolean
+plugin_init (GstPlugin * plugin)
{
- GST_DEBUG_CATEGORY_INIT (gst_videomixer2_debug, "videomixer2", 0,
- "video mixer 2");
+ GST_DEBUG_CATEGORY_INIT (gst_videomixer2_debug, "videomixer", 0,
+ "video mixer");
+
+ gst_video_mixer_init_blend ();
- return gst_element_register (plugin, "videomixer2", GST_RANK_SECONDARY,
+ return gst_element_register (plugin, "videomixer", GST_RANK_PRIMARY,
GST_TYPE_VIDEO_MIXER2);
}
+
+GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
+ GST_VERSION_MINOR,
+ "videomixer",
+ "Video mixer", plugin_init, VERSION, GST_LICENSE, GST_PACKAGE_NAME,
+ GST_PACKAGE_ORIGIN)
GstPad *srcpad;
/* Lock to prevent the state to change while blending */
- GMutex *lock;
+ GMutex lock;
/* Sink pads using Collect Pads 2*/
GstCollectPads2 *collect;
gint next_sinkpad;
/* Output caps */
- GstVideoFormat format;
- gint width, height;
- gint fps_n;
- gint fps_d;
- gint par_n;
- gint par_d;
+ GstVideoInfo info;
gboolean newseg_pending;
gboolean flush_stop_pending;
};
GType gst_videomixer2_get_type (void);
-gboolean gst_videomixer2_register (GstPlugin * plugin);
G_END_DECLS
#endif /* __GST_VIDEO_MIXER2_H__ */
/* < private > */
/* caps */
- gint width, height;
- gint fps_n;
- gint fps_d;
+ GstVideoInfo info;
/* properties */
gint xpos, ypos;
+++ /dev/null
-/* Video mixer pad
- * Copyright (C) 2008 Wim Taymans <wim@fluendo.com>
- *
- * This library is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Library General Public
- * License as published by the Free Software Foundation; either
- * version 2 of the License, or (at your option) any later version.
- *
- * This library is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Library General Public License for more details.
- *
- * You should have received a copy of the GNU Library General Public
- * License along with this library; if not, write to the
- * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
- * Boston, MA 02111-1307, USA.
- */
-
-#ifndef __GST_VIDEO_MIXER_PAD_H__
-#define __GST_VIDEO_MIXER_PAD_H__
-
-#include <gst/gst.h>
-#include <gst/base/gstcollectpads.h>
-
-G_BEGIN_DECLS
-
-#define GST_TYPE_VIDEO_MIXER_PAD (gst_videomixer_pad_get_type())
-#define GST_VIDEO_MIXER_PAD(obj) \
- (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_VIDEO_MIXER_PAD, GstVideoMixerPad))
-#define GST_VIDEO_MIXER_PAD_CLASS(klass) \
- (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_VIDEO_MIXER_PAD, GstVideoMixerPadiClass))
-#define GST_IS_VIDEO_MIXER_PAD(obj) \
- (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_VIDEO_MIXER_PAD))
-#define GST_IS_VIDEO_MIXER_PAD_CLASS(klass) \
- (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_VIDEO_MIXER_PAD))
-
-typedef struct _GstVideoMixerPad GstVideoMixerPad;
-typedef struct _GstVideoMixerPadClass GstVideoMixerPadClass;
-typedef struct _GstVideoMixerCollect GstVideoMixerCollect;
-
-struct _GstVideoMixerCollect
-{
- GstCollectData collect; /* we extend the CollectData */
-
- GstBuffer *buffer; /* the queued buffer for this pad */
-
- GstVideoMixerPad *mixpad;
-};
-
-/* all information needed for one video stream */
-struct _GstVideoMixerPad
-{
- GstPad parent; /* subclass the pad */
-
- gint64 queued;
-
- guint in_width, in_height;
- gint fps_n;
- gint fps_d;
- gint par_n;
- gint par_d;
-
- gint xpos, ypos;
- guint zorder;
- gint blend_mode;
- gdouble alpha;
-
- GstVideoMixerCollect *mixcol;
-};
-
-struct _GstVideoMixerPadClass
-{
- GstPadClass parent_class;
-};
-
-G_END_DECLS
-#endif /* __GST_VIDEO_MIXER_PAD_H__ */
$(GST_CFLAGS)
libgstwavenc_la_LIBADD = \
$(GST_PLUGINS_BASE_LIBS) \
+ -lgstaudio-@GST_MAJORMINOR@ \
-lgstriff-@GST_MAJORMINOR@ \
$(GST_LIBS)
libgstwavenc_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
#include <string.h>
#include "gstwavenc.h"
+#include <gst/audio/audio.h>
#include <gst/riff/riff-media.h>
GST_DEBUG_CATEGORY_STATIC (wavenc_debug);
/* Max. of two channels, more channels need WAVFORMATEX with
* channel layout, which we do not support yet */
#define SINK_CAPS \
- "audio/x-raw-int, " \
+ "audio/x-raw, " \
"rate = (int) [ 1, MAX ], " \
- "channels = (int) [ 1, 2 ], " \
- "endianness = (int) LITTLE_ENDIAN, " \
- "width = (int) 32, " \
- "depth = (int) 32, " \
- "signed = (boolean) true" \
- "; " \
- "audio/x-raw-int, " \
- "rate = (int) [ 1, MAX ], " \
- "channels = (int) [ 1, 2 ], " \
- "endianness = (int) LITTLE_ENDIAN, " \
- "width = (int) 24, " \
- "depth = (int) 24, " \
- "signed = (boolean) true" \
+ "channels = (int) 1, " \
+ "format = (string) { S32LE, S24LE, S16LE, U8, F32LE, F64LE }, " \
+ "layout = (string) interleaved" \
"; " \
- "audio/x-raw-int, " \
+ "audio/x-raw, " \
"rate = (int) [ 1, MAX ], " \
- "channels = (int) [ 1, 2 ], " \
- "endianness = (int) LITTLE_ENDIAN, " \
- "width = (int) 16, " \
- "depth = (int) 16, " \
- "signed = (boolean) true" \
+ "channels = (int) 2, " \
+ "channel-mask = (bitmask) 0x3, " \
+ "format = (string) { S32LE, S24LE, S16LE, U8, F32LE, F64LE }, " \
+ "layout = (string) interleaved" \
"; " \
- "audio/x-raw-int, " \
- "rate = (int) [ 1, MAX ], " \
- "channels = (int) [ 1, 2 ], " \
- "width = (int) 8, " \
- "depth = (int) 8, " \
- "signed = (boolean) false" \
- "; " \
- "audio/x-raw-float, " \
- "rate = (int) [ 1, MAX ], " \
- "channels = (int) [ 1, 2 ], " \
- "endianness = (int) LITTLE_ENDIAN, " \
- "width = (int) { 32, 64 }; " \
"audio/x-alaw, " \
"rate = (int) [ 8000, 192000 ], " \
"channels = (int) [ 1, 2 ], " \
GST_STATIC_CAPS ("audio/x-wav")
);
-GST_BOILERPLATE (GstWavEnc, gst_wavenc, GstElement, GST_TYPE_ELEMENT);
+#define gst_wavenc_parent_class parent_class
+G_DEFINE_TYPE (GstWavEnc, gst_wavenc, GST_TYPE_ELEMENT);
-static GstFlowReturn gst_wavenc_chain (GstPad * pad, GstBuffer * buf);
-static gboolean gst_wavenc_event (GstPad * pad, GstEvent * event);
+static GstFlowReturn gst_wavenc_chain (GstPad * pad, GstObject * parent,
+ GstBuffer * buf);
+static gboolean gst_wavenc_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
static GstStateChangeReturn gst_wavenc_change_state (GstElement * element,
GstStateChange transition);
static gboolean gst_wavenc_sink_setcaps (GstPad * pad, GstCaps * caps);
static void
-gst_wavenc_base_init (gpointer g_class)
+gst_wavenc_class_init (GstWavEncClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
+ GstElementClass *element_class;
+
+ element_class = (GstElementClass *) klass;
+
+ element_class->change_state = GST_DEBUG_FUNCPTR (gst_wavenc_change_state);
gst_element_class_set_details_simple (element_class, "WAV audio muxer",
"Codec/Muxer/Audio",
"Encode raw audio into WAV", "Iain Holmes <iain@prettypeople.org>");
- gst_element_class_add_static_pad_template (element_class, &src_factory);
- gst_element_class_add_static_pad_template (element_class, &sink_factory);
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&src_factory));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&sink_factory));
GST_DEBUG_CATEGORY_INIT (wavenc_debug, "wavenc", 0, "WAV encoder element");
}
static void
-gst_wavenc_class_init (GstWavEncClass * klass)
-{
- GstElementClass *element_class;
-
- element_class = (GstElementClass *) klass;
-
- element_class->change_state = GST_DEBUG_FUNCPTR (gst_wavenc_change_state);
-}
-
-static void
-gst_wavenc_init (GstWavEnc * wavenc, GstWavEncClass * klass)
+gst_wavenc_init (GstWavEnc * wavenc)
{
wavenc->sinkpad = gst_pad_new_from_static_template (&sink_factory, "sink");
gst_pad_set_chain_function (wavenc->sinkpad,
GST_DEBUG_FUNCPTR (gst_wavenc_chain));
gst_pad_set_event_function (wavenc->sinkpad,
GST_DEBUG_FUNCPTR (gst_wavenc_event));
- gst_pad_set_setcaps_function (wavenc->sinkpad,
- GST_DEBUG_FUNCPTR (gst_wavenc_sink_setcaps));
gst_pad_use_fixed_caps (wavenc->sinkpad);
gst_element_add_pad (GST_ELEMENT (wavenc), wavenc->sinkpad);
{
struct wave_header wave;
GstBuffer *buf;
+ GstMapInfo map;
guint8 *header;
buf = gst_buffer_new_and_alloc (WAV_HEADER_LEN);
- header = GST_BUFFER_DATA (buf);
+ gst_buffer_map (buf, &map, GST_MAP_WRITE);
+ header = map.data;
memset (header, 0, WAV_HEADER_LEN);
wave.common.wChannels = wavenc->channels;
memcpy (header + 36, (char *) wave.data.id, 4);
GST_WRITE_UINT32_LE (header + 40, wave.data.len);
- gst_buffer_set_caps (buf, GST_PAD_CAPS (wavenc->srcpad));
+ gst_buffer_unmap (buf, &map);
return buf;
}
{
GstFlowReturn ret;
GstBuffer *outbuf;
+ GstSegment segment;
/* seek to beginning of file */
- gst_pad_push_event (wavenc->srcpad,
- gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_BYTES, 0, -1, 0));
+ gst_segment_init (&segment, GST_FORMAT_BYTES);
+ gst_pad_push_event (wavenc->srcpad, gst_event_new_segment (&segment));
GST_DEBUG_OBJECT (wavenc, "writing header with datasize=%u", audio_data_size);
GstWavEnc *wavenc;
GstStructure *structure;
const gchar *name;
- gint chans, rate, width;
+ gint chans, rate;
+ GstCaps *ccaps;
wavenc = GST_WAVENC (gst_pad_get_parent (pad));
- if (wavenc->sent_header && !gst_caps_can_intersect (caps, GST_PAD_CAPS (pad))) {
+ ccaps = gst_pad_get_current_caps (pad);
+ if (wavenc->sent_header && ccaps && !gst_caps_can_intersect (caps, ccaps)) {
+ gst_caps_unref (ccaps);
GST_WARNING_OBJECT (wavenc, "cannot change format in middle of stream");
goto fail;
}
+ if (ccaps)
+ gst_caps_unref (ccaps);
GST_DEBUG_OBJECT (wavenc, "got caps: %" GST_PTR_FORMAT, caps);
goto fail;
}
- if (strcmp (name, "audio/x-raw-int") == 0) {
- if (!gst_structure_get_int (structure, "width", &width)) {
- GST_WARNING_OBJECT (wavenc, "caps incomplete");
+ if (strcmp (name, "audio/x-raw") == 0) {
+ GstAudioInfo info;
+
+ if (!gst_audio_info_from_caps (&info, caps))
goto fail;
- }
- wavenc->format = GST_RIFF_WAVE_FORMAT_PCM;
- wavenc->width = width;
- } else if (strcmp (name, "audio/x-raw-float") == 0) {
- if (!gst_structure_get_int (structure, "width", &width)) {
- GST_WARNING_OBJECT (wavenc, "caps incomplete");
+
+ if (GST_AUDIO_INFO_IS_INTEGER (&info))
+ wavenc->format = GST_RIFF_WAVE_FORMAT_PCM;
+ else if (GST_AUDIO_INFO_IS_FLOAT (&info))
+ wavenc->format = GST_RIFF_WAVE_FORMAT_IEEE_FLOAT;
+ else
goto fail;
- }
- wavenc->format = GST_RIFF_WAVE_FORMAT_IEEE_FLOAT;
- wavenc->width = width;
+
+ wavenc->width = GST_AUDIO_INFO_WIDTH (&info);
} else if (strcmp (name, "audio/x-alaw") == 0) {
wavenc->format = GST_RIFF_WAVE_FORMAT_ALAW;
wavenc->width = 8;
#endif
static gboolean
-gst_wavenc_event (GstPad * pad, GstEvent * event)
+gst_wavenc_event (GstPad * pad, GstObject * parent, GstEvent * event)
{
gboolean res = TRUE;
GstWavEnc *wavenc;
- wavenc = GST_WAVENC (gst_pad_get_parent (pad));
+ wavenc = GST_WAVENC (parent);
switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_CAPS:
+ {
+ GstCaps *caps;
+
+ gst_event_parse_caps (event, &caps);
+ gst_wavenc_sink_setcaps (pad, caps);
+
+ /* have our own src caps */
+ gst_event_unref (event);
+ break;
+ }
case GST_EVENT_EOS:{
GST_DEBUG_OBJECT (wavenc, "got EOS");
#if 0
wavenc->finished_properly = TRUE;
/* and forward the EOS event */
- res = gst_pad_event_default (pad, event);
+ res = gst_pad_event_default (pad, parent, event);
break;
}
- case GST_EVENT_NEWSEGMENT:
+ case GST_EVENT_SEGMENT:
/* Just drop it, it's probably in TIME format
* anyway. We'll send our own newsegment event */
gst_event_unref (event);
break;
default:
- res = gst_pad_event_default (pad, event);
+ res = gst_pad_event_default (pad, parent, event);
break;
}
- gst_object_unref (wavenc);
return res;
}
static GstFlowReturn
-gst_wavenc_chain (GstPad * pad, GstBuffer * buf)
+gst_wavenc_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
{
- GstWavEnc *wavenc = GST_WAVENC (GST_PAD_PARENT (pad));
+ GstWavEnc *wavenc = GST_WAVENC (parent);
GstFlowReturn flow = GST_FLOW_OK;
- g_return_val_if_fail (wavenc->channels > 0, GST_FLOW_WRONG_STATE);
+ g_return_val_if_fail (wavenc->channels > 0, GST_FLOW_FLUSHING);
if (!wavenc->sent_header) {
/* use bogus size initially, we'll write the real
wavenc->sent_header = TRUE;
}
- GST_LOG_OBJECT (wavenc, "pushing %u bytes raw audio, ts=%" GST_TIME_FORMAT,
- GST_BUFFER_SIZE (buf), GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)));
+ GST_LOG_OBJECT (wavenc,
+ "pushing %" G_GSIZE_FORMAT " bytes raw audio, ts=%" GST_TIME_FORMAT,
+ gst_buffer_get_size (buf), GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)));
- buf = gst_buffer_make_metadata_writable (buf);
+ buf = gst_buffer_make_writable (buf);
- gst_buffer_set_caps (buf, GST_PAD_CAPS (wavenc->srcpad));
GST_BUFFER_OFFSET (buf) = WAV_HEADER_LEN + wavenc->length;
GST_BUFFER_OFFSET_END (buf) = GST_BUFFER_OFFSET_NONE;
- wavenc->length += GST_BUFFER_SIZE (buf);
+ wavenc->length += gst_buffer_get_size (buf);
flow = gst_pad_push (wavenc->srcpad, buf);
break;
}
- ret = parent_class->change_state (element, transition);
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
if (ret != GST_STATE_CHANGE_SUCCESS)
return ret;
#include "config.h"
#endif
-/* FIXME 0.11: suppress warnings for deprecated API such as GStaticRecMutex
- * with newer GLib versions (>= 2.31.0) */
-#define GLIB_DISABLE_DEPRECATION_WARNINGS
-
#include <string.h>
#include <math.h>
static void gst_wavparse_dispose (GObject * object);
-static gboolean gst_wavparse_sink_activate (GstPad * sinkpad);
-static gboolean gst_wavparse_sink_activate_pull (GstPad * sinkpad,
- gboolean active);
+static gboolean gst_wavparse_sink_activate (GstPad * sinkpad,
+ GstObject * parent);
+static gboolean gst_wavparse_sink_activate_mode (GstPad * sinkpad,
+ GstObject * parent, GstPadMode mode, gboolean active);
static gboolean gst_wavparse_send_event (GstElement * element,
GstEvent * event);
static GstStateChangeReturn gst_wavparse_change_state (GstElement * element,
GstStateChange transition);
-static const GstQueryType *gst_wavparse_get_query_types (GstPad * pad);
-static gboolean gst_wavparse_pad_query (GstPad * pad, GstQuery * query);
-static gboolean gst_wavparse_pad_convert (GstPad * pad,
- GstFormat src_format,
+static gboolean gst_wavparse_pad_query (GstPad * pad, GstObject * parent,
+ GstQuery * query);
+static gboolean gst_wavparse_pad_convert (GstPad * pad, GstFormat src_format,
gint64 src_value, GstFormat * dest_format, gint64 * dest_value);
-static GstFlowReturn gst_wavparse_chain (GstPad * pad, GstBuffer * buf);
-static gboolean gst_wavparse_sink_event (GstPad * pad, GstEvent * event);
+static GstFlowReturn gst_wavparse_chain (GstPad * pad, GstObject * parent,
+ GstBuffer * buf);
+static gboolean gst_wavparse_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
static void gst_wavparse_loop (GstPad * pad);
-static gboolean gst_wavparse_srcpad_event (GstPad * pad, GstEvent * event);
+static gboolean gst_wavparse_srcpad_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
static void gst_wavparse_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
};
static GstStaticPadTemplate sink_template_factory =
-GST_STATIC_PAD_TEMPLATE ("wavparse_sink",
+GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS ("audio/x-wav")
);
-#define DEBUG_INIT(bla) \
+#define DEBUG_INIT \
GST_DEBUG_CATEGORY_INIT (wavparse_debug, "wavparse", 0, "WAV parser");
-GST_BOILERPLATE_FULL (GstWavParse, gst_wavparse, GstElement,
- GST_TYPE_ELEMENT, DEBUG_INIT);
-
-static void
-gst_wavparse_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
- GstPadTemplate *src_template;
-
- /* register pads */
- gst_element_class_add_static_pad_template (element_class,
- &sink_template_factory);
-
- src_template = gst_pad_template_new ("wavparse_src", GST_PAD_SRC,
- GST_PAD_SOMETIMES, gst_riff_create_audio_template_caps ());
- gst_element_class_add_pad_template (element_class, src_template);
- gst_object_unref (src_template);
-
- gst_element_class_set_details_simple (element_class, "WAV audio demuxer",
- "Codec/Demuxer/Audio",
- "Parse a .wav file into raw audio",
- "Erik Walthinsen <omega@cse.ogi.edu>");
-}
+#define gst_wavparse_parent_class parent_class
+G_DEFINE_TYPE_WITH_CODE (GstWavParse, gst_wavparse, GST_TYPE_ELEMENT,
+ DEBUG_INIT);
static void
gst_wavparse_class_init (GstWavParseClass * klass)
{
GstElementClass *gstelement_class;
GObjectClass *object_class;
+ GstPadTemplate *src_template;
gstelement_class = (GstElementClass *) klass;
object_class = (GObjectClass *) klass;
gstelement_class->change_state = gst_wavparse_change_state;
gstelement_class->send_event = gst_wavparse_send_event;
+
+ /* register pads */
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&sink_template_factory));
+
+ src_template = gst_pad_template_new ("src", GST_PAD_SRC,
+ GST_PAD_ALWAYS, gst_riff_create_audio_template_caps ());
+ gst_element_class_add_pad_template (gstelement_class, src_template);
+
+ gst_element_class_set_details_simple (gstelement_class, "WAV audio demuxer",
+ "Codec/Demuxer/Audio",
+ "Parse a .wav file into raw audio",
+ "Erik Walthinsen <omega@cse.ogi.edu>");
}
static void
if (wav->start_segment)
gst_event_unref (wav->start_segment);
wav->start_segment = NULL;
- if (wav->close_segment)
- gst_event_unref (wav->close_segment);
- wav->close_segment = NULL;
}
static void
}
static void
-gst_wavparse_init (GstWavParse * wavparse, GstWavParseClass * g_class)
+gst_wavparse_init (GstWavParse * wavparse)
{
gst_wavparse_reset (wavparse);
gst_pad_new_from_static_template (&sink_template_factory, "sink");
gst_pad_set_activate_function (wavparse->sinkpad,
GST_DEBUG_FUNCPTR (gst_wavparse_sink_activate));
- gst_pad_set_activatepull_function (wavparse->sinkpad,
- GST_DEBUG_FUNCPTR (gst_wavparse_sink_activate_pull));
+ gst_pad_set_activatemode_function (wavparse->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_wavparse_sink_activate_mode));
gst_pad_set_chain_function (wavparse->sinkpad,
GST_DEBUG_FUNCPTR (gst_wavparse_chain));
gst_pad_set_event_function (wavparse->sinkpad,
GST_DEBUG_FUNCPTR (gst_wavparse_sink_event));
gst_element_add_pad (GST_ELEMENT_CAST (wavparse), wavparse->sinkpad);
- /* src, will be created later */
- wavparse->srcpad = NULL;
-}
-
-static void
-gst_wavparse_destroy_sourcepad (GstWavParse * wavparse)
-{
- if (wavparse->srcpad) {
- gst_element_remove_pad (GST_ELEMENT_CAST (wavparse), wavparse->srcpad);
- wavparse->srcpad = NULL;
- }
-}
-
-static void
-gst_wavparse_create_sourcepad (GstWavParse * wavparse)
-{
- GstElementClass *klass = GST_ELEMENT_GET_CLASS (wavparse);
- GstPadTemplate *src_template;
-
- /* destroy previous one */
- gst_wavparse_destroy_sourcepad (wavparse);
-
- /* source */
- src_template = gst_element_class_get_pad_template (klass, "wavparse_src");
- wavparse->srcpad = gst_pad_new_from_template (src_template, "src");
+ /* src */
+ wavparse->srcpad =
+ gst_pad_new_from_template (gst_element_class_get_pad_template
+ (GST_ELEMENT_GET_CLASS (wavparse), "src"), "src");
gst_pad_use_fixed_caps (wavparse->srcpad);
- gst_pad_set_query_type_function (wavparse->srcpad,
- GST_DEBUG_FUNCPTR (gst_wavparse_get_query_types));
gst_pad_set_query_function (wavparse->srcpad,
GST_DEBUG_FUNCPTR (gst_wavparse_pad_query));
gst_pad_set_event_function (wavparse->srcpad,
GST_DEBUG_FUNCPTR (gst_wavparse_srcpad_event));
-
- GST_DEBUG_OBJECT (wavparse, "srcpad created");
+ gst_element_add_pad (GST_ELEMENT_CAST (wavparse), wavparse->srcpad);
}
/* FIXME: why is that not in use? */
/* Note: gst_riff_create_audio_caps might need to fix values in
* the header header depending on the format, so call it first */
- caps = gst_riff_create_audio_caps (header->format, NULL, header, NULL);
+ /* FIXME: Need to handle the channel reorder map */
+ caps = gst_riff_create_audio_caps (header->format, NULL, header, NULL, NULL);
g_free (header);
if (caps == NULL)
if (cur_type != GST_SEEK_TYPE_NONE)
res =
gst_pad_query_convert (wav->srcpad, format, cur,
- &wav->segment.format, &cur);
+ wav->segment.format, &cur);
if (res && stop_type != GST_SEEK_TYPE_NONE)
res =
gst_pad_query_convert (wav->srcpad, format, stop,
- &wav->segment.format, &stop);
+ wav->segment.format, &stop);
if (!res)
goto no_format;
if (!event || wav->state != GST_WAVPARSE_DATA) {
if (wav->start_segment)
gst_event_unref (wav->start_segment);
- wav->start_segment =
+ // TODO
+/* wav->start_segment =
gst_event_new_new_segment (FALSE, wav->segment.rate,
wav->segment.format, wav->segment.last_stop, wav->segment.duration,
- wav->segment.last_stop);
+ wav->segment.last_stop);*/
res = TRUE;
} else {
/* convert seek positions to byte positions in data sections */
* as it completes one iteration (and thus might block when the sink is
* blocking in preroll). */
if (flush) {
- if (wav->srcpad) {
- GST_DEBUG_OBJECT (wav, "sending flush start");
- gst_pad_push_event (wav->srcpad, gst_event_new_flush_start ());
- }
+ GST_DEBUG_OBJECT (wav, "sending flush start");
+ gst_pad_push_event (wav->srcpad, gst_event_new_flush_start ());
} else {
gst_pad_pause_task (wav->sinkpad);
}
GST_PAD_STREAM_LOCK (wav->sinkpad);
/* save current position */
- last_stop = wav->segment.last_stop;
+ last_stop = wav->segment.position;
GST_DEBUG_OBJECT (wav, "stopped streaming at %" G_GINT64_FORMAT, last_stop);
* right values in the segment to perform the seek */
if (event) {
GST_DEBUG_OBJECT (wav, "configuring seek");
- gst_segment_set_seek (&seeksegment, rate, format, flags,
+ gst_segment_do_seek (&seeksegment, rate, format, flags,
cur_type, cur, stop_type, stop, &update);
}
/* bring offset to bytes, if the bps is 0, we have the segment in BYTES and
* we can just copy the last_stop. If not, we use the bps to convert TIME to
* bytes. */
- if (!gst_wavparse_time_to_bytepos (wav, seeksegment.last_stop,
+ if (!gst_wavparse_time_to_bytepos (wav, seeksegment.position,
(gint64 *) & wav->offset))
- wav->offset = seeksegment.last_stop;
+ wav->offset = seeksegment.position;
GST_LOG_OBJECT (wav, "offset=%" G_GUINT64_FORMAT, wav->offset);
wav->offset -= (wav->offset % wav->bytes_per_sample);
GST_LOG_OBJECT (wav, "offset=%" G_GUINT64_FORMAT, wav->offset);
/* make sure filesize is not exceeded due to rounding errors or so,
* same precaution as in _stream_headers */
bformat = GST_FORMAT_BYTES;
- if (gst_pad_query_peer_duration (wav->sinkpad, &bformat, &upstream_size))
+ if (gst_pad_peer_query_duration (wav->sinkpad, bformat, &upstream_size))
wav->end_offset = MIN (wav->end_offset, upstream_size);
/* this is the range of bytes we will use for playback */
wav->end_offset, GST_TIME_ARGS (seeksegment.start), GST_TIME_ARGS (stop));
/* prepare for streaming again */
- if (wav->srcpad) {
- if (flush) {
- /* if we sent a FLUSH_START, we now send a FLUSH_STOP */
- GST_DEBUG_OBJECT (wav, "sending flush stop");
- gst_pad_push_event (wav->srcpad, gst_event_new_flush_stop ());
- } else if (wav->segment_running) {
- /* we are running the current segment and doing a non-flushing seek,
- * close the segment first based on the previous last_stop. */
- GST_DEBUG_OBJECT (wav, "closing running segment %" G_GINT64_FORMAT
- " to %" G_GINT64_FORMAT, wav->segment.start, wav->segment.last_stop);
-
- /* queue the segment for sending in the stream thread */
- if (wav->close_segment)
- gst_event_unref (wav->close_segment);
- wav->close_segment = gst_event_new_new_segment (TRUE,
- wav->segment.rate, wav->segment.format,
- wav->segment.start, wav->segment.last_stop, wav->segment.start);
- }
+ if (flush) {
+ /* if we sent a FLUSH_START, we now send a FLUSH_STOP */
+ GST_DEBUG_OBJECT (wav, "sending flush stop");
+ gst_pad_push_event (wav->srcpad, gst_event_new_flush_stop (TRUE));
}
/* now we did the seek and can activate the new segment values */
if (wav->segment.flags & GST_SEEK_FLAG_SEGMENT) {
gst_element_post_message (GST_ELEMENT_CAST (wav),
gst_message_new_segment_start (GST_OBJECT_CAST (wav),
- wav->segment.format, wav->segment.last_stop));
+ wav->segment.format, wav->segment.position));
}
/* now create the newsegment */
GST_DEBUG_OBJECT (wav, "Creating newsegment from %" G_GINT64_FORMAT
- " to %" G_GINT64_FORMAT, wav->segment.last_stop, stop);
+ " to %" G_GINT64_FORMAT, wav->segment.position, stop);
/* store the newsegment event so it can be sent from the streaming thread. */
if (wav->start_segment)
gst_event_unref (wav->start_segment);
- wav->start_segment =
- gst_event_new_new_segment (FALSE, wav->segment.rate,
- wav->segment.format, wav->segment.last_stop, stop,
- wav->segment.last_stop);
+ wav->start_segment = gst_event_new_segment (&wav->segment);
/* mark discont if we are going to stream from another position. */
- if (last_stop != wav->segment.last_stop) {
+ if (last_stop != wav->segment.position) {
GST_DEBUG_OBJECT (wav, "mark DISCONT, we did a seek to another position");
wav->discont = TRUE;
}
/* and start the streaming task again */
- wav->segment_running = TRUE;
if (!wav->streaming) {
gst_pad_start_task (wav->sinkpad, (GstTaskFunction) gst_wavparse_loop,
wav->sinkpad);
if (gst_adapter_available (wav->adapter) < 8)
return FALSE;
- data = gst_adapter_peek (wav->adapter, 8);
+ data = gst_adapter_map (wav->adapter, 8);
*tag = GST_READ_UINT32_LE (data);
*size = GST_READ_UINT32_LE (data + 4);
+ gst_adapter_unmap (wav->adapter);
GST_DEBUG ("Next chunk size is %u bytes, type %" GST_FOURCC_FORMAT, *size,
GST_FOURCC_ARGS (*tag));
GstCaps *caps = NULL;
gchar *codec_name = NULL;
GstEvent **event_p;
- GstFormat bformat;
gint64 upstream_size = 0;
/* search for "_fmt" chunk, which should be first */
/* Note: gst_riff_create_audio_caps might need to fix values in
* the header header depending on the format, so call it first */
+ /* FIXME: Need to handle the channel reorder map */
caps = gst_riff_create_audio_caps (header->format, NULL, header, extra,
- NULL, &codec_name);
+ NULL, &codec_name, NULL);
if (extra)
gst_buffer_unref (extra);
wav->got_fmt = TRUE;
if (codec_name) {
- wav->tags = gst_tag_list_new ();
+ wav->tags = gst_tag_list_new_empty ();
gst_tag_list_add (wav->tags, GST_TAG_MERGE_REPLACE,
GST_TAG_AUDIO_CODEC, codec_name, NULL);
}
- bformat = GST_FORMAT_BYTES;
- gst_pad_query_peer_duration (wav->sinkpad, &bformat, &upstream_size);
+ gst_pad_peer_query_duration (wav->sinkpad, GST_FORMAT_BYTES, &upstream_size);
GST_DEBUG_OBJECT (wav, "upstream size %" G_GUINT64_FORMAT, upstream_size);
/* loop headers until we get data */
if (!gst_wavparse_peek_chunk_info (wav, &tag, &size))
goto exit;
} else {
+ GstMapInfo map;
+
if ((res =
gst_pad_pull_range (wav->sinkpad, wav->offset, 8,
&buf)) != GST_FLOW_OK)
goto header_read_error;
- tag = GST_READ_UINT32_LE (GST_BUFFER_DATA (buf));
- size = GST_READ_UINT32_LE (GST_BUFFER_DATA (buf) + 4);
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ tag = GST_READ_UINT32_LE (map.data);
+ size = GST_READ_UINT32_LE (map.data + 4);
+ gst_buffer_unmap (buf, &map);
}
GST_INFO_OBJECT (wav,
goto exit;
}
gst_adapter_flush (wav->adapter, 8);
- data = gst_adapter_peek (wav->adapter, data_size);
+ data = gst_adapter_map (wav->adapter, data_size);
wav->fact = GST_READ_UINT32_LE (data);
+ gst_adapter_unmap (wav->adapter);
gst_adapter_flush (wav->adapter, GST_ROUND_UP_2 (size));
} else {
gst_buffer_unref (buf);
gst_pad_pull_range (wav->sinkpad, wav->offset + 8,
data_size, &buf)) != GST_FLOW_OK)
goto header_read_error;
- wav->fact = GST_READ_UINT32_LE (GST_BUFFER_DATA (buf));
+ gst_buffer_extract (buf, 0, &wav->fact, 4);
+ wav->fact = GUINT32_FROM_LE (wav->fact);
gst_buffer_unref (buf);
}
GST_DEBUG_OBJECT (wav, "have fact %u", wav->fact);
case GST_RIFF_TAG_acid:{
const gst_riff_acid *acid = NULL;
const guint data_size = sizeof (gst_riff_acid);
+ gfloat tempo;
GST_INFO_OBJECT (wav, "Have acid chunk");
if (size < data_size) {
goto exit;
}
gst_adapter_flush (wav->adapter, 8);
- acid = (const gst_riff_acid *) gst_adapter_peek (wav->adapter,
+ acid = (const gst_riff_acid *) gst_adapter_map (wav->adapter,
data_size);
+ tempo = acid->tempo;
+ gst_adapter_unmap (wav->adapter);
} else {
+ GstMapInfo map;
gst_buffer_unref (buf);
if ((res =
gst_pad_pull_range (wav->sinkpad, wav->offset + 8,
size, &buf)) != GST_FLOW_OK)
goto header_read_error;
- acid = (const gst_riff_acid *) GST_BUFFER_DATA (buf);
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ acid = (const gst_riff_acid *) map.data;
+ tempo = acid->tempo;
+ gst_buffer_unmap (buf, &map);
}
/* send data as tags */
if (!wav->tags)
- wav->tags = gst_tag_list_new ();
+ wav->tags = gst_tag_list_new_empty ();
gst_tag_list_add (wav->tags, GST_TAG_MERGE_REPLACE,
- GST_TAG_BEATS_PER_MINUTE, acid->tempo, NULL);
+ GST_TAG_BEATS_PER_MINUTE, tempo, NULL);
size = GST_ROUND_UP_2 (size);
if (wav->streaming) {
if (gst_adapter_available (wav->adapter) < 12) {
goto exit;
}
- data = gst_adapter_peek (wav->adapter, 12);
+ data = gst_adapter_map (wav->adapter, 12);
ltag = GST_READ_UINT32_LE (data + 8);
+ gst_adapter_unmap (wav->adapter);
} else {
gst_buffer_unref (buf);
if ((res =
gst_pad_pull_range (wav->sinkpad, wav->offset, 12,
&buf)) != GST_FLOW_OK)
goto header_read_error;
- ltag = GST_READ_UINT32_LE (GST_BUFFER_DATA (buf) + 8);
+ gst_buffer_extract (buf, 8, <ag, 4);
+ ltag = GUINT32_FROM_LE (ltag);
}
switch (ltag) {
case GST_RIFF_LIST_INFO:{
if (gst_wavparse_calculate_duration (wav)) {
gst_segment_init (&wav->segment, GST_FORMAT_TIME);
if (!wav->ignore_length)
- gst_segment_set_duration (&wav->segment, GST_FORMAT_TIME, wav->duration);
+ wav->segment.duration = wav->duration;
} else {
/* no bitrate, let downstream peer do the math, we'll feed it bytes. */
gst_segment_init (&wav->segment, GST_FORMAT_BYTES);
if (!wav->ignore_length)
- gst_segment_set_duration (&wav->segment, GST_FORMAT_BYTES, wav->datasize);
+ wav->segment.duration = wav->datasize;
}
/* now we have all the info to perform a pending seek if any, if no
if (wav->caps) {
s = gst_caps_get_structure (wav->caps, 0);
- if (s && gst_structure_has_name (s, "audio/x-raw-int") && buf != NULL) {
+ if (s && gst_structure_has_name (s, "audio/x-raw") && buf != NULL) {
GstTypeFindProbability prob;
GstCaps *tf_caps;
}
}
- gst_wavparse_create_sourcepad (wav);
- gst_pad_set_active (wav->srcpad, TRUE);
gst_pad_set_caps (wav->srcpad, wav->caps);
gst_caps_replace (&wav->caps, NULL);
- gst_element_add_pad (GST_ELEMENT_CAST (wav), wav->srcpad);
- gst_element_no_more_pads (GST_ELEMENT_CAST (wav));
-
- if (wav->close_segment) {
- GST_DEBUG_OBJECT (wav, "Send close segment event on newpad");
- gst_pad_push_event (wav->srcpad, wav->close_segment);
- wav->close_segment = NULL;
- }
if (wav->start_segment) {
GST_DEBUG_OBJECT (wav, "Send start segment event on newpad");
gst_pad_push_event (wav->srcpad, wav->start_segment);
}
if (wav->tags) {
- gst_element_found_tags_for_pad (GST_ELEMENT_CAST (wav), wav->srcpad,
- wav->tags);
+ gst_pad_push_event (wav->srcpad, gst_event_new_tag (wav->tags));
wav->tags = NULL;
}
}
* amounts of data regardless of the playback rate */
desired =
MIN (gst_guint64_to_gdouble (wav->dataleft),
- wav->max_buf_size * wav->segment.abs_rate);
+ wav->max_buf_size * ABS (wav->segment.rate));
if (desired >= wav->blockalign && wav->blockalign > 0)
desired -= (desired % wav->blockalign);
goto pull_error;
/* we may get a short buffer at the end of the file */
- if (GST_BUFFER_SIZE (buf) < desired) {
- GST_LOG_OBJECT (wav, "Got only %u bytes of data", GST_BUFFER_SIZE (buf));
- if (GST_BUFFER_SIZE (buf) >= wav->blockalign) {
- buf = gst_buffer_make_metadata_writable (buf);
- GST_BUFFER_SIZE (buf) -= (GST_BUFFER_SIZE (buf) % wav->blockalign);
+ if (gst_buffer_get_size (buf) < desired) {
+ gsize size = gst_buffer_get_size (buf);
+
+ GST_LOG_OBJECT (wav, "Got only %" G_GSIZE_FORMAT " bytes of data", size);
+ if (size >= wav->blockalign) {
+ buf = gst_buffer_make_writable (buf);
+ gst_buffer_resize (buf, 0, size - (size % wav->blockalign));
} else {
gst_buffer_unref (buf);
goto found_eos;
}
}
- obtained = GST_BUFFER_SIZE (buf);
+ obtained = gst_buffer_get_size (buf);
/* our positions in bytes */
pos = wav->offset - wav->datastart;
nextpos = pos + obtained;
/* update offsets, does not overflow. */
+ buf = gst_buffer_make_writable (buf);
GST_BUFFER_OFFSET (buf) = pos / wav->bytes_per_sample;
GST_BUFFER_OFFSET_END (buf) = nextpos / wav->bytes_per_sample;
/* this will also push the segment events */
gst_wavparse_add_src_pad (wav, buf);
} else {
- /* If we have a pending close/start segment, send it now. */
- if (G_UNLIKELY (wav->close_segment != NULL)) {
- gst_pad_push_event (wav->srcpad, wav->close_segment);
- wav->close_segment = NULL;
- }
+ /* If we have a pending start segment, send it now. */
if (G_UNLIKELY (wav->start_segment != NULL)) {
gst_pad_push_event (wav->srcpad, wav->start_segment);
wav->start_segment = NULL;
/* update current running segment position */
if (G_LIKELY (next_timestamp >= wav->segment.start))
- gst_segment_set_last_stop (&wav->segment, GST_FORMAT_TIME,
- next_timestamp);
+ wav->segment.position = next_timestamp;
} else if (wav->fact) {
guint64 bps =
gst_util_uint64_scale_int (wav->datasize, wav->rate, wav->fact);
duration = GST_CLOCK_TIME_NONE;
/* update current running segment position with byte offset */
if (G_LIKELY (nextpos >= wav->segment.start))
- gst_segment_set_last_stop (&wav->segment, GST_FORMAT_BYTES, nextpos);
+ wav->segment.position = nextpos;
}
if ((pos > 0) && wav->vbr) {
/* don't set timestamps for VBR files if it's not the first buffer */
GST_BUFFER_TIMESTAMP (buf) = timestamp;
GST_BUFFER_DURATION (buf) = duration;
- /* don't forget to set the caps on the buffer */
- gst_buffer_set_caps (buf, GST_PAD_CAPS (wav->srcpad));
-
GST_LOG_OBJECT (wav,
"Got buffer. timestamp:%" GST_TIME_FORMAT " , duration:%" GST_TIME_FORMAT
- ", size:%u", GST_TIME_ARGS (timestamp), GST_TIME_ARGS (duration),
- GST_BUFFER_SIZE (buf));
+ ", size:%" G_GSIZE_FORMAT, GST_TIME_ARGS (timestamp),
+ GST_TIME_ARGS (duration), gst_buffer_get_size (buf));
if ((res = gst_pad_push (wav->srcpad, buf)) != GST_FLOW_OK)
goto push_error;
found_eos:
{
GST_DEBUG_OBJECT (wav, "found EOS");
- return GST_FLOW_UNEXPECTED;
+ return GST_FLOW_EOS;
}
pull_error:
{
/* check if we got EOS */
- if (res == GST_FLOW_UNEXPECTED)
+ if (res == GST_FLOW_EOS)
goto found_eos;
GST_WARNING_OBJECT (wav,
const gchar *reason = gst_flow_get_name (ret);
GST_DEBUG_OBJECT (wav, "pausing task, reason %s", reason);
- wav->segment_running = FALSE;
gst_pad_pause_task (pad);
- if (ret == GST_FLOW_UNEXPECTED) {
+ if (ret == GST_FLOW_EOS) {
+ /* handle end-of-stream/segment */
+ /* so align our position with the end of it, if there is one
+ * this ensures a subsequent will arrive at correct base/acc time */
+ if (wav->segment.format == GST_FORMAT_TIME) {
+ if (wav->segment.rate > 0.0 &&
+ GST_CLOCK_TIME_IS_VALID (wav->segment.stop))
+ wav->segment.position = wav->segment.stop;
+ else if (wav->segment.rate < 0.0)
+ wav->segment.position = wav->segment.start;
+ }
/* add pad before we perform EOS */
if (G_UNLIKELY (wav->first)) {
wav->first = FALSE;
gst_message_new_segment_done (GST_OBJECT_CAST (wav),
wav->segment.format, stop));
} else {
- if (wav->srcpad != NULL)
- gst_pad_push_event (wav->srcpad, gst_event_new_eos ());
+ gst_pad_push_event (wav->srcpad, gst_event_new_eos ());
}
- } else if (ret == GST_FLOW_NOT_LINKED || ret < GST_FLOW_UNEXPECTED) {
+ } else if (ret == GST_FLOW_NOT_LINKED || ret < GST_FLOW_EOS) {
/* for fatal errors we post an error message, post the error
* first so the app knows about the error first. */
GST_ELEMENT_ERROR (wav, STREAM, FAILED,
(_("Internal data flow error.")),
("streaming task paused, reason %s (%d)", reason, ret));
- if (wav->srcpad != NULL)
- gst_pad_push_event (wav->srcpad, gst_event_new_eos ());
+ gst_pad_push_event (wav->srcpad, gst_event_new_eos ());
}
return;
}
}
static GstFlowReturn
-gst_wavparse_chain (GstPad * pad, GstBuffer * buf)
+gst_wavparse_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
{
GstFlowReturn ret;
- GstWavParse *wav = GST_WAVPARSE (GST_PAD_PARENT (pad));
+ GstWavParse *wav = GST_WAVPARSE (parent);
- GST_LOG_OBJECT (wav, "adapter_push %u bytes", GST_BUFFER_SIZE (buf));
+ GST_LOG_OBJECT (wav, "adapter_push %" G_GSIZE_FORMAT " bytes",
+ gst_buffer_get_size (buf));
gst_adapter_push (wav->adapter, buf);
}
static gboolean
-gst_wavparse_sink_event (GstPad * pad, GstEvent * event)
+gst_wavparse_sink_event (GstPad * pad, GstObject * parent, GstEvent * event)
{
- GstWavParse *wav = GST_WAVPARSE (GST_PAD_PARENT (pad));
+ GstWavParse *wav = GST_WAVPARSE (parent);
gboolean ret = TRUE;
GST_LOG_OBJECT (wav, "handling %s event", GST_EVENT_TYPE_NAME (event));
switch (GST_EVENT_TYPE (event)) {
- case GST_EVENT_NEWSEGMENT:
+ case GST_EVENT_CAPS:
{
- GstFormat format;
- gdouble rate, arate;
- gint64 start, stop, time, offset = 0, end_offset = -1;
- gboolean update;
+ /* discard, we'll come up with proper src caps */
+ gst_event_unref (event);
+ break;
+ }
+ case GST_EVENT_SEGMENT:
+ {
+ gint64 start, stop, offset = 0, end_offset = -1;
GstSegment segment;
/* some debug output */
- gst_segment_init (&segment, GST_FORMAT_UNDEFINED);
- gst_event_parse_new_segment_full (event, &update, &rate, &arate, &format,
- &start, &stop, &time);
- gst_segment_set_newsegment_full (&segment, update, rate, arate, format,
- start, stop, time);
- GST_DEBUG_OBJECT (wav,
- "received format %d newsegment %" GST_SEGMENT_FORMAT, format,
+ gst_event_copy_segment (event, &segment);
+ GST_DEBUG_OBJECT (wav, "received newsegment %" GST_SEGMENT_FORMAT,
&segment);
if (wav->state != GST_WAVPARSE_DATA) {
/* now we are either committed to TIME or BYTE format,
* and we only expect a BYTE segment, e.g. following a seek */
- if (format == GST_FORMAT_BYTES) {
+ if (segment.format == GST_FORMAT_BYTES) {
+ /* handle (un)signed issues */
+ start = segment.start;
+ stop = segment.stop;
if (start > 0) {
offset = start;
start -= wav->datastart;
}
if (stop > 0) {
end_offset = stop;
- stop -= wav->datastart;
- stop = MAX (stop, 0);
+ segment.stop -= wav->datastart;
+ segment.stop = MAX (stop, 0);
}
if (wav->segment.format == GST_FORMAT_TIME) {
guint64 bps = wav->bps;
goto exit;
}
+ segment.start = start;
+ segment.stop = stop;
+
/* accept upstream's notion of segment and distribute along */
- gst_segment_set_newsegment_full (&wav->segment, update, rate, arate,
- wav->segment.format, start, stop, start);
+ segment.time = segment.start = segment.position;
+ segment.duration = wav->segment.duration;
+ segment.base = gst_segment_to_running_time (&wav->segment,
+ GST_FORMAT_TIME, wav->segment.position);
+
+ gst_segment_copy_into (&segment, &wav->segment);
+
/* also store the newsegment event for the streaming thread */
if (wav->start_segment)
gst_event_unref (wav->start_segment);
- wav->start_segment =
- gst_event_new_new_segment_full (update, rate, arate,
- wav->segment.format, start, stop, start);
- GST_DEBUG_OBJECT (wav, "Pushing newseg update %d, rate %g, "
- "applied rate %g, format %d, start %" G_GINT64_FORMAT ", "
- "stop %" G_GINT64_FORMAT, update, rate, arate, wav->segment.format,
- start, stop);
+ GST_DEBUG_OBJECT (wav, "Storing newseg %" GST_SEGMENT_FORMAT, &segment);
+ wav->start_segment = gst_event_new_segment (&segment);
/* stream leftover data in current segment */
gst_wavparse_flush_data (wav);
/* fall-through */
case GST_EVENT_FLUSH_STOP:
+ {
+ GstClockTime dur;
+
gst_adapter_clear (wav->adapter);
wav->discont = TRUE;
+ dur = wav->segment.duration;
+ gst_segment_init (&wav->segment, wav->segment.format);
+ wav->segment.duration = dur;
/* fall-through */
+ }
default:
- ret = gst_pad_event_default (wav->sinkpad, event);
+ ret = gst_pad_event_default (wav->sinkpad, parent, event);
break;
}
}
}
-static const GstQueryType *
-gst_wavparse_get_query_types (GstPad * pad)
-{
- static const GstQueryType types[] = {
- GST_QUERY_POSITION,
- GST_QUERY_DURATION,
- GST_QUERY_CONVERT,
- GST_QUERY_SEEKING,
- 0
- };
-
- return types;
-}
-
/* handle queries for location and length in requested format */
static gboolean
-gst_wavparse_pad_query (GstPad * pad, GstQuery * query)
+gst_wavparse_pad_query (GstPad * pad, GstObject * parent, GstQuery * query)
{
gboolean res = TRUE;
- GstWavParse *wav = GST_WAVPARSE (gst_pad_get_parent (pad));
+ GstWavParse *wav = GST_WAVPARSE (parent);
/* only if we know */
if (wav->state != GST_WAVPARSE_DATA) {
- gst_object_unref (wav);
return FALSE;
}
break;
}
default:
- res = gst_pad_query_default (pad, query);
+ res = gst_pad_query_default (pad, parent, query);
break;
}
- gst_object_unref (wav);
return res;
}
static gboolean
-gst_wavparse_srcpad_event (GstPad * pad, GstEvent * event)
+gst_wavparse_srcpad_event (GstPad * pad, GstObject * parent, GstEvent * event)
{
- GstWavParse *wavparse = GST_WAVPARSE (gst_pad_get_parent (pad));
+ GstWavParse *wavparse = GST_WAVPARSE (parent);
gboolean res = FALSE;
GST_DEBUG_OBJECT (wavparse, "%s event", GST_EVENT_TYPE_NAME (event));
res = gst_pad_push_event (wavparse->sinkpad, event);
break;
}
- gst_object_unref (wavparse);
return res;
}
static gboolean
-gst_wavparse_sink_activate (GstPad * sinkpad)
+gst_wavparse_sink_activate (GstPad * sinkpad, GstObject * parent)
{
- GstWavParse *wav = GST_WAVPARSE (gst_pad_get_parent (sinkpad));
- gboolean res;
+ GstWavParse *wav = GST_WAVPARSE (parent);
+ GstQuery *query;
+ gboolean pull_mode;
if (wav->adapter) {
gst_adapter_clear (wav->adapter);
wav->adapter = NULL;
}
- if (gst_pad_check_pull_range (sinkpad)) {
- GST_DEBUG ("going to pull mode");
- wav->streaming = FALSE;
- res = gst_pad_activate_pull (sinkpad, TRUE);
- } else {
- GST_DEBUG ("going to push (streaming) mode");
+ query = gst_query_new_scheduling ();
+
+ if (!gst_pad_peer_query (sinkpad, query)) {
+ gst_query_unref (query);
+ goto activate_push;
+ }
+
+ pull_mode = gst_query_has_scheduling_mode (query, GST_PAD_MODE_PULL);
+ gst_query_unref (query);
+
+ if (!pull_mode)
+ goto activate_push;
+
+ GST_DEBUG_OBJECT (sinkpad, "activating pull");
+ wav->streaming = FALSE;
+ return gst_pad_activate_mode (sinkpad, GST_PAD_MODE_PULL, TRUE);
+
+activate_push:
+ {
+ GST_DEBUG_OBJECT (sinkpad, "activating push");
wav->streaming = TRUE;
wav->adapter = gst_adapter_new ();
- res = gst_pad_activate_push (sinkpad, TRUE);
+ return gst_pad_activate_mode (sinkpad, GST_PAD_MODE_PUSH, TRUE);
}
- gst_object_unref (wav);
- return res;
}
static gboolean
-gst_wavparse_sink_activate_pull (GstPad * sinkpad, gboolean active)
+gst_wavparse_sink_activate_mode (GstPad * sinkpad, GstObject * parent,
+ GstPadMode mode, gboolean active)
{
- GstWavParse *wav = GST_WAVPARSE (GST_OBJECT_PARENT (sinkpad));
+ gboolean res;
- if (active) {
- /* if we have a scheduler we can start the task */
- wav->segment_running = TRUE;
- return gst_pad_start_task (sinkpad, (GstTaskFunction) gst_wavparse_loop,
- sinkpad);
- } else {
- wav->segment_running = FALSE;
- return gst_pad_stop_task (sinkpad);
+ switch (mode) {
+ case GST_PAD_MODE_PUSH:
+ res = TRUE;
+ break;
+ case GST_PAD_MODE_PULL:
+ if (active) {
+ /* if we have a scheduler we can start the task */
+ res = gst_pad_start_task (sinkpad, (GstTaskFunction) gst_wavparse_loop,
+ sinkpad);
+ } else {
+ res = gst_pad_stop_task (sinkpad);
+ }
+ break;
+ default:
+ res = FALSE;
+ break;
}
-};
+ return res;
+}
static GstStateChangeReturn
gst_wavparse_change_state (GstElement * element, GstStateChange transition)
case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
break;
case GST_STATE_CHANGE_PAUSED_TO_READY:
- gst_wavparse_destroy_sourcepad (wav);
gst_wavparse_reset (wav);
break;
case GST_STATE_CHANGE_READY_TO_NULL:
* the format for sure */
GstCaps *caps;
GstTagList *tags;
- GstEvent *close_segment;
GstEvent *start_segment;
/* WAVE decoding state */
gboolean got_fmt;
gboolean streaming;
- /* configured segment, start/stop expressed in time */
+ /* configured segment, start/stop expressed in time or bytes */
GstSegment segment;
- gboolean segment_running;
/* for late pad configuration */
gboolean first;
libgsty4menc_la_SOURCES = gsty4mencode.c
libgsty4menc_la_CFLAGS = $(GST_CFLAGS) $(GST_PLUGINS_BASE_CFLAGS)
-libgsty4menc_la_LIBADD = $(GST_LIBS)
+libgsty4menc_la_LIBADD = $(GST_LIBS) -lgstvideo-$(GST_MAJORMINOR)
libgsty4menc_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
libgsty4menc_la_LIBTOOLFLAGS = --tag=disable-static
GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("{ IYUV, I420, Y42B, Y41B, Y444 }"))
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ IYUV, I420, Y42B, Y41B, Y444 }"))
);
static void gst_y4m_encode_reset (GstY4mEncode * filter);
-static gboolean gst_y4m_encode_setcaps (GstPad * pad, GstCaps * vscaps);
-static GstFlowReturn gst_y4m_encode_chain (GstPad * pad, GstBuffer * buf);
+static gboolean gst_y4m_encode_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+static GstFlowReturn gst_y4m_encode_chain (GstPad * pad, GstObject * parent,
+ GstBuffer * buf);
static GstStateChangeReturn gst_y4m_encode_change_state (GstElement * element,
GstStateChange transition);
-GST_BOILERPLATE (GstY4mEncode, gst_y4m_encode, GstElement, GST_TYPE_ELEMENT);
-
-
-static void
-gst_y4m_encode_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_static_pad_template (element_class,
- &y4mencode_src_factory);
- gst_element_class_add_static_pad_template (element_class,
- &y4mencode_sink_factory);
- gst_element_class_set_details_simple (element_class, "YUV4MPEG video encoder",
- "Codec/Encoder/Video",
- "Encodes a YUV frame into the yuv4mpeg format (mjpegtools)",
- "Wim Taymans <wim.taymans@gmail.com>");
-}
+#define gst_y4m_encode_parent_class parent_class
+G_DEFINE_TYPE (GstY4mEncode, gst_y4m_encode, GST_TYPE_ELEMENT);
static void
gst_y4m_encode_class_init (GstY4mEncodeClass * klass)
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
+ gobject_class->set_property = gst_y4m_encode_set_property;
+ gobject_class->get_property = gst_y4m_encode_get_property;
+
gstelement_class->change_state =
GST_DEBUG_FUNCPTR (gst_y4m_encode_change_state);
- gobject_class->set_property = gst_y4m_encode_set_property;
- gobject_class->get_property = gst_y4m_encode_get_property;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&y4mencode_src_factory));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&y4mencode_sink_factory));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "YUV4MPEG video encoder", "Codec/Encoder/Video",
+ "Encodes a YUV frame into the yuv4mpeg format (mjpegtools)",
+ "Wim Taymans <wim.taymans@gmail.com>");
}
static void
-gst_y4m_encode_init (GstY4mEncode * filter, GstY4mEncodeClass * klass)
+gst_y4m_encode_init (GstY4mEncode * filter)
{
filter->sinkpad =
gst_pad_new_from_static_template (&y4mencode_sink_factory, "sink");
gst_element_add_pad (GST_ELEMENT (filter), filter->sinkpad);
gst_pad_set_chain_function (filter->sinkpad,
GST_DEBUG_FUNCPTR (gst_y4m_encode_chain));
- gst_pad_set_setcaps_function (filter->sinkpad,
- GST_DEBUG_FUNCPTR (gst_y4m_encode_setcaps));
+ gst_pad_set_event_function (filter->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_y4m_encode_sink_event));
filter->srcpad =
gst_pad_new_from_static_template (&y4mencode_src_factory, "src");
static void
gst_y4m_encode_reset (GstY4mEncode * filter)
{
- filter->width = filter->height = -1;
- filter->fps_num = filter->fps_den = 1;
- filter->par_num = filter->par_den = 1;
- filter->colorspace = "unknown";
+ filter->negotiated = FALSE;
}
static gboolean
gst_y4m_encode_setcaps (GstPad * pad, GstCaps * vscaps)
{
+ gboolean ret;
GstY4mEncode *filter;
- GstStructure *structure;
- gboolean res;
- gint w, h;
- guint32 fourcc;
- const GValue *fps, *par, *interlaced;
+ GstVideoInfo info;
filter = GST_Y4M_ENCODE (GST_PAD_PARENT (pad));
- structure = gst_caps_get_structure (vscaps, 0);
-
- res = gst_structure_get_int (structure, "width", &w);
- res &= gst_structure_get_int (structure, "height", &h);
- res &= ((fps = gst_structure_get_value (structure, "framerate")) != NULL);
- res &= gst_structure_get_fourcc (structure, "format", &fourcc);
+ if (!gst_video_info_from_caps (&info, vscaps))
+ goto invalid_format;
- switch (fourcc) { /* Translate fourcc to Y4M colorspace code */
- case GST_MAKE_FOURCC ('I', '4', '2', '0'):
- case GST_MAKE_FOURCC ('I', 'Y', 'U', 'V'):
+ switch (GST_VIDEO_INFO_FORMAT (&info)) {
+ case GST_VIDEO_FORMAT_I420:
filter->colorspace = "420";
break;
- case GST_MAKE_FOURCC ('Y', '4', '2', 'B'):
+ case GST_VIDEO_FORMAT_Y42B:
filter->colorspace = "422";
break;
- case GST_MAKE_FOURCC ('Y', '4', '1', 'B'):
+ case GST_VIDEO_FORMAT_Y41B:
filter->colorspace = "411";
break;
- case GST_MAKE_FOURCC ('Y', '4', '4', '4'):
+ case GST_VIDEO_FORMAT_Y444:
filter->colorspace = "444";
break;
default:
- res = FALSE;
- break;
+ goto invalid_format;
}
- if (!res || w <= 0 || h <= 0 || !GST_VALUE_HOLDS_FRACTION (fps))
- return FALSE;
+ filter->info = info;
- /* optional interlaced info */
- interlaced = gst_structure_get_value (structure, "interlaced");
-
- /* optional par info */
- par = gst_structure_get_value (structure, "pixel-aspect-ratio");
-
- filter->width = w;
- filter->height = h;
- filter->fps_num = gst_value_get_fraction_numerator (fps);
- filter->fps_den = gst_value_get_fraction_denominator (fps);
- if ((par != NULL) && GST_VALUE_HOLDS_FRACTION (par)) {
- filter->par_num = gst_value_get_fraction_numerator (par);
- filter->par_den = gst_value_get_fraction_denominator (par);
- } else { /* indicates unknown */
- filter->par_num = 0;
- filter->par_den = 0;
- }
- if ((interlaced != NULL) && G_VALUE_HOLDS (interlaced, G_TYPE_BOOLEAN)) {
- filter->interlaced = g_value_get_boolean (interlaced);
- } else {
- /* assume progressive if no interlaced property in caps */
- filter->interlaced = FALSE;
- }
/* the template caps will do for the src pad, should always accept */
- return gst_pad_set_caps (filter->srcpad,
+ ret = gst_pad_set_caps (filter->srcpad,
gst_static_pad_template_get_caps (&y4mencode_src_factory));
+
+ filter->negotiated = ret;
+
+ return ret;
+
+ /* ERRORS */
+invalid_format:
+ {
+ GST_ERROR_OBJECT (filter, "got invalid caps");
+ return FALSE;
+ }
+}
+
+static gboolean
+gst_y4m_encode_sink_event (GstPad * pad, GstObject * parent, GstEvent * event)
+{
+ gboolean res;
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_CAPS:
+ {
+ GstCaps *caps;
+
+ gst_event_parse_caps (event, &caps);
+ res = gst_y4m_encode_setcaps (pad, caps);
+ gst_event_unref (event);
+ break;
+ }
+ default:
+ res = gst_pad_event_default (pad, parent, event);
+ break;
+ }
+ return res;
}
static inline GstBuffer *
-gst_y4m_encode_get_stream_header (GstY4mEncode * filter)
+gst_y4m_encode_get_stream_header (GstY4mEncode * filter, gboolean tff)
{
gpointer header;
GstBuffer *buf;
gchar interlaced;
+ gsize len;
- interlaced = 'p';
-
- if (filter->interlaced && filter->top_field_first)
- interlaced = 't';
- else if (filter->interlaced)
- interlaced = 'b';
+ if (filter->info.flags & GST_VIDEO_FLAG_INTERLACED) {
+ if (tff)
+ interlaced = 't';
+ else
+ interlaced = 'b';
+ } else {
+ interlaced = 'p';
+ }
header = g_strdup_printf ("YUV4MPEG2 C%s W%d H%d I%c F%d:%d A%d:%d\n",
- filter->colorspace, filter->width, filter->height, interlaced,
- filter->fps_num, filter->fps_den, filter->par_num, filter->par_den);
+ filter->colorspace, GST_VIDEO_INFO_WIDTH (&filter->info),
+ GST_VIDEO_INFO_HEIGHT (&filter->info), interlaced,
+ GST_VIDEO_INFO_FPS_N (&filter->info),
+ GST_VIDEO_INFO_FPS_D (&filter->info),
+ GST_VIDEO_INFO_PAR_N (&filter->info),
+ GST_VIDEO_INFO_PAR_D (&filter->info));
+ len = strlen (header);
buf = gst_buffer_new ();
- gst_buffer_set_data (buf, header, strlen (header));
- /* so it gets free'd when needed */
- GST_BUFFER_MALLOCDATA (buf) = header;
+ gst_buffer_take_memory (buf, -1,
+ gst_memory_new_wrapped (0, header, g_free, len, 0, len));
return buf;
}
{
gpointer header;
GstBuffer *buf;
+ gsize len;
header = g_strdup_printf ("FRAME\n");
+ len = strlen (header);
buf = gst_buffer_new ();
- gst_buffer_set_data (buf, header, strlen (header));
- /* so it gets free'd when needed */
- GST_BUFFER_MALLOCDATA (buf) = header;
+ gst_buffer_take_memory (buf, -1,
+ gst_memory_new_wrapped (0, header, g_free, len, 0, len));
return buf;
}
static GstFlowReturn
-gst_y4m_encode_chain (GstPad * pad, GstBuffer * buf)
+gst_y4m_encode_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
{
- GstY4mEncode *filter = GST_Y4M_ENCODE (GST_PAD_PARENT (pad));
+ GstY4mEncode *filter = GST_Y4M_ENCODE (parent);
GstBuffer *outbuf;
GstClockTime timestamp;
/* check we got some decent info from caps */
- if (filter->width < 0) {
- GST_ELEMENT_ERROR ("filter", CORE, NEGOTIATION, (NULL),
- ("format wasn't negotiated before chain function"));
- gst_buffer_unref (buf);
- return GST_FLOW_NOT_NEGOTIATED;
- }
+ if (GST_VIDEO_INFO_FORMAT (&filter->info) == GST_VIDEO_FORMAT_UNKNOWN)
+ goto not_negotiated;
timestamp = GST_BUFFER_TIMESTAMP (buf);
if (G_UNLIKELY (!filter->header)) {
- if (filter->interlaced == TRUE) {
- if (GST_BUFFER_FLAG_IS_SET (buf, GST_VIDEO_BUFFER_TFF)) {
- filter->top_field_first = TRUE;
- } else {
- filter->top_field_first = FALSE;
- }
+ gboolean tff;
+
+ if (filter->info.flags & GST_VIDEO_FLAG_INTERLACED) {
+ tff = GST_BUFFER_FLAG_IS_SET (buf, GST_VIDEO_BUFFER_FLAG_TFF);
}
- outbuf = gst_y4m_encode_get_stream_header (filter);
+ outbuf = gst_y4m_encode_get_stream_header (filter, tff);
filter->header = TRUE;
outbuf = gst_buffer_join (outbuf, gst_y4m_encode_get_frame_header (filter));
} else {
outbuf = gst_y4m_encode_get_frame_header (filter);
}
- /* join with data */
+ /* join with data, FIXME, strides are all wrong etc */
outbuf = gst_buffer_join (outbuf, buf);
/* decorate */
- gst_buffer_make_metadata_writable (outbuf);
- gst_buffer_set_caps (outbuf, GST_PAD_CAPS (filter->srcpad));
+ outbuf = gst_buffer_make_writable (outbuf);
GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
return gst_pad_push (filter->srcpad, outbuf);
+
+ /* ERRORS */
+not_negotiated:
+ {
+ GST_ELEMENT_ERROR (filter, CORE, NEGOTIATION, (NULL),
+ ("format wasn't negotiated before chain function"));
+ gst_buffer_unref (buf);
+ return GST_FLOW_NOT_NEGOTIATED;
+ }
}
static void
GstPad *sinkpad,*srcpad;
/* caps information */
- gint width, height;
- gint fps_num, fps_den;
- gint par_num, par_den;
- gboolean interlaced;
- gboolean top_field_first;
+ GstVideoInfo info;
+ gboolean negotiated;
+
const gchar *colorspace;
/* state information */
gboolean header;
EXTRA_DIST = \
a52.m4 \
aalib.m4 \
- as-arts.m4 \
as-ffmpeg.m4 \
as-liblame.m4 \
as-slurp-ffmpeg.m4 \
check-libheader.m4 \
codeset.m4 \
- esd.m4 \
freetype2.m4 \
- gconf-2.m4 \
gettext.m4 \
glibc21.m4 \
glib.m4 \
+++ /dev/null
-dnl as-arts.m4 0.1.0
-
-dnl $Id: as-arts.m4,v 1.5 2004/05/21 11:20:49 thomasvs Exp $
-dnl if you copy this file to your cvs,
-dnl add this file using cvs -ko add to retain this header
-
-dnl This is an example arts .m4 adapted and scrubbed by thomasvs
-
-# Configure paths for ARTS
-# Philip Stadermann 2001-06-21
-# stolen from esd.m4
-
-dnl AM_PATH_ARTS([MINIMUM-VERSION, [ACTION-IF-FOUND [, ACTION-IF-NOT-FOUND]]])
-dnl Test for ARTS, and define ARTS_CXXFLAGS and ARTS_LIBS
-dnl
-AC_DEFUN([AM_PATH_ARTS],
-[dnl
-dnl Get the cflags and libraries from the artsc-config script
-dnl
-AC_ARG_WITH(arts-prefix,
- AC_HELP_STRING([--with-arts-prefix=PFX],
- [prefix where ARTS is installed (optional)]),
- arts_prefix="$withval", arts_prefix="")
-
-AC_ARG_WITH(arts-exec-prefix,
- AC_HELP_STRING([--with-arts-exec-prefix=PFX],
- [exec prefix where ARTS is installed (optional)]),
- arts_exec_prefix="$withval", arts_exec_prefix="")
-
-AC_ARG_ENABLE(artstest,
- AC_HELP_STRING([--disable-artstest],
- [do not try to compile and run a test ARTS program]),
- , enable_artstest=yes)
-
- if test x$arts_exec_prefix != x ; then
- arts_args="$arts_args --exec-prefix=$arts_exec_prefix"
- if test x${ARTS_CONFIG+set} != xset ; then
- ARTS_CONFIG=$arts_exec_prefix/bin/artsc-config
- fi
- fi
- if test x$arts_prefix != x ; then
- arts_args="$arts_args --prefix=$arts_prefix"
- if test x${ARTS_CONFIG+set} != xset ; then
- ARTS_CONFIG=$arts_prefix/bin/artsc-config
- fi
- fi
-
- AC_PATH_PROG(ARTS_CONFIG, artsc-config, no)
- min_arts_version=ifelse([$1], ,0.9.5,$1)
- AC_MSG_CHECKING(for ARTS artsc - version >= $min_arts_version)
- no_arts=""
- if test "$ARTS_CONFIG" = "no" ; then
- no_arts=yes
- else
- # FIXME: thomas added this sed to get arts path instead of artsc
- # replace -I.../artsc with -I.../arts
- ARTS_CXXFLAGS=`$ARTS_CONFIG $artsconf_args --cflags | artsc-config --cflags | sed 's/\(-I.*\)artsc/\1arts/'`
- ARTS_LIBS=`$ARTS_CONFIG $artsconf_args --libs | sed 's/artsc$/arts/'`
-
- arts_major_version=`$ARTS_CONFIG $arts_args --version | \
- sed 's/\([[0-9]]*\).\([[0-9]]*\).\([[0-9]]*\)/\1/'`
- arts_minor_version=`$ARTS_CONFIG $arts_args --version | \
- sed 's/\([[0-9]]*\).\([[0-9]]*\).\([[0-9]]*\)/\2/'`
- arts_micro_version=`$ARTS_CONFIG $arts_config_args --version | \
- sed 's/\([[0-9]]*\).\([[0-9]]*\).\([[0-9]]*\)/\3/'`
- if test "x$enable_artstest" = "xyes" ; then
- dnl ac_save_CXXFLAGS="$CXXFLAGS"
- dnl ac_save_LIBS="$LIBS"
- dnl CFLAGS="$CFLAGS $ARTS_CXXFLAGS"
- dnl LIBS="$LIBS $ARTS_LIBS"
-dnl
-dnl Now check if the installed ARTS is sufficiently new. (Also sanity
-dnl checks the results of artsc-config to some extent)
-dnl
-
-dnl a*s: to successfully compile the C++ test app, we need to
-dnl first make sure we're going to compile it as C++ (with AC_LANG_PUSH),
-dnl then add the CFLAGS and CLIBS of arts which we just discovered to the
-dnl C++ compilation and linking flags.
-dnl We also need to clean up after the test; this means using AC_LANG_POP
-dnl and restoring the CPPFLAGS and LDFLAGS from the saved values we take
-dnl here.
-
-dnl ask nicely for C++ compilation
- AC_LANG_PUSH(C++)
-
-dnl save compilation and link flags and make our own
- ac_save_CPPFLAGS="$CPPFLAGS"
- ac_save_LDFLAGS="$LDFLAGS"
- AC_SUBST(CPPFLAGS,"$CPPFLAGS $ARTS_CXXFLAGS")
- AC_SUBST(LDFLAGS,"$LDFLAGS $ARTS_CLIBS")
-
- rm -f conf.artstest
- AC_TRY_RUN([
-#include <stdio.h>
-#include <stdlib.h>
-#include <string.h>
-#include <artsflow.h>
-
-char*
-my_strdup (char *str)
-{
- char *new_str;
-
- if (str)
- {
- // thomas: the original test did not have the typecast
- new_str = (char *) malloc ((strlen (str) + 1) * sizeof(char));
- strcpy (new_str, str);
- }
- else
- new_str = NULL;
-
- return new_str;
-}
-
-int main ()
-{
- int major, minor, micro;
- char *tmp_version;
-
- system ("touch conf.artstest");
-
- /* HP/UX 9 (%@#!) writes to sscanf strings */
- tmp_version = my_strdup("$min_arts_version");
- if (sscanf(tmp_version, "%d.%d.%d", &major, &minor, µ) != 3) {
- printf("%s, bad version string\n", "$min_arts_version");
- exit(1);
- }
-
- if (($arts_major_version > major) ||
- (($arts_major_version == major) && ($arts_minor_version > minor)) ||
- (($arts_major_version == major) && ($arts_minor_version == minor) && ($arts_micro_version >= micro)))
- {
- return 0;
- }
- else
- {
- printf("\n*** 'artsc-config --version' returned %d.%d.%d, but the minimum version\n", $arts_major_version, $arts_minor_version, $arts_micro_version);
- printf("*** of ARTS required is %d.%d.%d. If artsc-config is correct, then it is\n", major, minor, micro);
- printf("*** best to upgrade to the required version.\n");
- printf("*** If artsc-config was wrong, set the environment variable ARTS_CONFIG\n");
- printf("*** to point to the correct copy of artsc-config, and remove the file\n");
- printf("*** config.cache before re-running configure\n");
- return 1;
- }
-}
-
-],, no_arts=yes,[echo $ac_n "cross compiling; assumed OK... $ac_c"])
- dnl CFLAGS="$ac_save_CFLAGS"
- dnl LIBS="$ac_save_LIBS"
- dnl a*s this is were we clean up after the test
- AC_LANG_POP(C++)
- CXXFLAGS="$ac_save_CXXFLAGS"
- LDFLAGS="$ac_save_LDFLAGS"
- dnl a*s we are sure that these are right, so we make them active
- AC_SUBST(CXXFLAGS,"$CXXFLAGS")
- AC_SUBST(LDFLAGS,"$LDFLAGS")
- fi
- fi
- if test "x$no_arts" = x ; then
- AC_MSG_RESULT(yes)
- ifelse([$2], , :, [$2])
- else
- AC_MSG_RESULT(no)
- if test "$ARTS_CONFIG" = "no" ; then
- echo "*** The artsc-config script installed by ARTS could not be found"
- echo "*** If ARTS was installed in PREFIX, make sure PREFIX/bin is in"
- echo "*** your path, or set the ARTS_CONFIG environment variable to the"
- echo "*** full path to artsc-config."
- else
- if test -f conf.artstest ; then
- :
- else
- echo "*** Could not run ARTS test program, checking why..."
- CFLAGS="$CFLAGS $ARTS_CXXFLAGS"
- LIBS="$LIBS $ARTS_LIBS"
- AC_TRY_LINK([
-#include <stdio.h>
-#include <artsflow.h>
-], [ return 0; ],
- [ echo "*** The test program compiled, but did not run. This usually means"
- echo "*** that the run-time linker is not finding ARTS or finding the wrong"
- echo "*** version of ARTS. If it is not finding ARTS, you'll need to set your"
- echo "*** LD_LIBRARY_PATH environment variable, or edit /etc/ld.so.conf to point"
- echo "*** to the installed location Also, make sure you have run ldconfig if that"
- echo "*** is required on your system"
- echo "***"
- echo "*** If you have an old version installed, it is best to remove it, although"
- echo "*** you may also be able to get things to work by modifying LD_LIBRARY_PATH"],
- [ echo "*** The test program failed to compile or link. See the file config.log for the"
- echo "*** exact error that occured. This usually means ARTS was incorrectly installed"
- echo "*** or that you have moved ARTS since it was installed. In the latter case, you"
- echo "*** may want to edit the artsc-config script: $ARTS_CONFIG" ])
- CFLAGS="$ac_save_CFLAGS"
- LIBS="$ac_save_LIBS"
- fi
- fi
- ARTS_CXXFLAGS=""
- ARTS_LIBS=""
- ifelse([$3], , :, [$3])
- fi
- AC_SUBST(ARTS_CXXFLAGS)
- AC_SUBST(ARTS_LIBS)
- rm -f conf.artstest
-])
-
-dnl release C++ question
-
+++ /dev/null
-# Configure paths for ESD
-# Manish Singh 98-9-30
-# stolen back from Frank Belew
-# stolen from Manish Singh
-# Shamelessly stolen from Owen Taylor
-
-dnl AM_PATH_ESD([MINIMUM-VERSION, [ACTION-IF-FOUND [, ACTION-IF-NOT-FOUND]]])
-dnl Test for ESD, and define ESD_CFLAGS and ESD_LIBS
-dnl
-AC_DEFUN([AM_PATH_ESD],
-[dnl
-dnl Get the cflags and libraries from the esd-config script
-dnl
-AC_ARG_WITH(esd-prefix,
- AC_HELP_STRING([--with-esd-prefix=PFX],
- [prefix where ESD is installed (optional)]),
- esd_prefix="$withval", esd_prefix="")
-
-AC_ARG_WITH(esd-exec-prefix,
- AC_HELP_STRING([--with-esd-exec-prefix=PFX],
- [exec prefix where ESD is installed (optional)]),
- esd_exec_prefix="$withval", esd_exec_prefix="")
-
-AC_ARG_ENABLE(esdtest,
- AC_HELP_STRING([--disable-esdtest],
- [do not try to compile and run a test ESD program]),
- , enable_esdtest=yes)
-
- if test x$esd_exec_prefix != x ; then
- esd_args="$esd_args --exec-prefix=$esd_exec_prefix"
- if test x${ESD_CONFIG+set} != xset ; then
- ESD_CONFIG=$esd_exec_prefix/bin/esd-config
- fi
- fi
- if test x$esd_prefix != x ; then
- esd_args="$esd_args --prefix=$esd_prefix"
- if test x${ESD_CONFIG+set} != xset ; then
- ESD_CONFIG=$esd_prefix/bin/esd-config
- fi
- fi
-
- AC_PATH_PROG(ESD_CONFIG, esd-config, no)
- min_esd_version=ifelse([$1], ,0.2.7,$1)
- AC_MSG_CHECKING(for ESD - version >= $min_esd_version)
- no_esd=""
- if test "$ESD_CONFIG" = "no" ; then
- no_esd=yes
- else
- AC_LANG_SAVE
- AC_LANG_C
- ESD_CFLAGS=`$ESD_CONFIG $esdconf_args --cflags`
- ESD_LIBS=`$ESD_CONFIG $esdconf_args --libs`
-
- esd_major_version=`$ESD_CONFIG $esd_args --version | \
- sed 's/\([[0-9]]*\).\([[0-9]]*\).\([[0-9]]*\)/\1/'`
- esd_minor_version=`$ESD_CONFIG $esd_args --version | \
- sed 's/\([[0-9]]*\).\([[0-9]]*\).\([[0-9]]*\)/\2/'`
- esd_micro_version=`$ESD_CONFIG $esd_config_args --version | \
- sed 's/\([[0-9]]*\).\([[0-9]]*\).\([[0-9]]*\)/\3/'`
- if test "x$enable_esdtest" = "xyes" ; then
- ac_save_CFLAGS="$CFLAGS"
- ac_save_LIBS="$LIBS"
- CFLAGS="$CFLAGS $ESD_CFLAGS"
- LIBS="$LIBS $ESD_LIBS"
-dnl
-dnl Now check if the installed ESD is sufficiently new. (Also sanity
-dnl checks the results of esd-config to some extent
-dnl
- rm -f conf.esdtest
- AC_TRY_RUN([
-#include <stdio.h>
-#include <stdlib.h>
-#include <string.h>
-#include <esd.h>
-
-char*
-my_strdup (char *str)
-{
- char *new_str;
-
- if (str)
- {
- new_str = malloc ((strlen (str) + 1) * sizeof(char));
- strcpy (new_str, str);
- }
- else
- new_str = NULL;
-
- return new_str;
-}
-
-int main ()
-{
- int major, minor, micro;
- char *tmp_version;
-
- system ("touch conf.esdtest");
-
- /* HP/UX 9 (%@#!) writes to sscanf strings */
- tmp_version = my_strdup("$min_esd_version");
- if (sscanf(tmp_version, "%d.%d.%d", &major, &minor, µ) != 3) {
- printf("%s, bad version string\n", "$min_esd_version");
- exit(1);
- }
-
- if (($esd_major_version > major) ||
- (($esd_major_version == major) && ($esd_minor_version > minor)) ||
- (($esd_major_version == major) && ($esd_minor_version == minor) && ($esd_micro_version >= micro)))
- {
- return 0;
- }
- else
- {
- printf("\n*** 'esd-config --version' returned %d.%d.%d, but the minimum version\n", $esd_major_version, $esd_minor_version, $esd_micro_version);
- printf("*** of ESD required is %d.%d.%d. If esd-config is correct, then it is\n", major, minor, micro);
- printf("*** best to upgrade to the required version.\n");
- printf("*** If esd-config was wrong, set the environment variable ESD_CONFIG\n");
- printf("*** to point to the correct copy of esd-config, and remove the file\n");
- printf("*** config.cache before re-running configure\n");
- return 1;
- }
-}
-
-],, no_esd=yes,[echo $ac_n "cross compiling; assumed OK... $ac_c"])
- CFLAGS="$ac_save_CFLAGS"
- LIBS="$ac_save_LIBS"
- AC_LANG_RESTORE
- fi
- fi
- if test "x$no_esd" = x ; then
- AC_MSG_RESULT(yes)
- ifelse([$2], , :, [$2])
- else
- AC_MSG_RESULT(no)
- if test "$ESD_CONFIG" = "no" ; then
- echo "*** The esd-config script installed by ESD could not be found"
- echo "*** If ESD was installed in PREFIX, make sure PREFIX/bin is in"
- echo "*** your path, or set the ESD_CONFIG environment variable to the"
- echo "*** full path to esd-config."
- else
- if test -f conf.esdtest ; then
- :
- else
- echo "*** Could not run ESD test program, checking why..."
- CFLAGS="$CFLAGS $ESD_CFLAGS"
- LIBS="$LIBS $ESD_LIBS"
- AC_LANG_SAVE
- AC_LANG_C
- AC_TRY_LINK([
-#include <stdio.h>
-#include <esd.h>
-], [ return 0; ],
- [ echo "*** The test program compiled, but did not run. This usually means"
- echo "*** that the run-time linker is not finding ESD or finding the wrong"
- echo "*** version of ESD. If it is not finding ESD, you'll need to set your"
- echo "*** LD_LIBRARY_PATH environment variable, or edit /etc/ld.so.conf to point"
- echo "*** to the installed location Also, make sure you have run ldconfig if that"
- echo "*** is required on your system"
- echo "***"
- echo "*** If you have an old version installed, it is best to remove it, although"
- echo "*** you may also be able to get things to work by modifying LD_LIBRARY_PATH"],
- [ echo "*** The test program failed to compile or link. See the file config.log for the"
- echo "*** exact error that occured. This usually means ESD was incorrectly installed"
- echo "*** or that you have moved ESD since it was installed. In the latter case, you"
- echo "*** may want to edit the esd-config script: $ESD_CONFIG" ])
- CFLAGS="$ac_save_CFLAGS"
- LIBS="$ac_save_LIBS"
- AC_LANG_RESTORE
- fi
- fi
- ESD_CFLAGS=""
- ESD_LIBS=""
- ifelse([$3], , :, [$3])
- fi
- AC_SUBST(ESD_CFLAGS)
- AC_SUBST(ESD_LIBS)
- rm -f conf.esdtest
-])
-
-dnl AM_ESD_SUPPORTS_MULTIPLE_RECORD([ACTION-IF-SUPPORTS [, ACTION-IF-NOT-SUPPORTS]])
-dnl Test, whether esd supports multiple recording clients (version >=0.2.21)
-dnl
-AC_DEFUN([AM_ESD_SUPPORTS_MULTIPLE_RECORD],
-[dnl
- AC_MSG_NOTICE([whether installed esd version supports multiple recording clients])
- ac_save_ESD_CFLAGS="$ESD_CFLAGS"
- ac_save_ESD_LIBS="$ESD_LIBS"
- AM_PATH_ESD(0.2.21,
- ifelse([$1], , [
- AM_CONDITIONAL(ESD_SUPPORTS_MULTIPLE_RECORD, true)
- AC_DEFINE(ESD_SUPPORTS_MULTIPLE_RECORD, 1,
- [Define if you have esound with support of multiple recording clients.])],
- [$1]),
- ifelse([$2], , [AM_CONDITIONAL(ESD_SUPPORTS_MULTIPLE_RECORD, false)], [$2])
- if test "x$ac_save_ESD_CFLAGS" != x ; then
- ESD_CFLAGS="$ac_save_ESD_CFLAGS"
- fi
- if test "x$ac_save_ESD_LIBS" != x ; then
- ESD_LIBS="$ac_save_ESD_LIBS"
- fi
- )
-])
+++ /dev/null
-dnl AM_GCONF_SOURCE_2
-dnl Defines GCONF_SCHEMA_CONFIG_SOURCE which is where you should install schemas
-dnl (i.e. pass to gconftool-2
-dnl Defines GCONF_SCHEMA_FILE_DIR which is a filesystem directory where
-dnl you should install foo.schemas files
-dnl
-
-AC_DEFUN([AM_GCONF_SOURCE_2],
-[
- if test "x$GCONF_SCHEMA_INSTALL_SOURCE" = "x"; then
- GCONF_SCHEMA_CONFIG_SOURCE=`gconftool-2 --get-default-source`
- else
- GCONF_SCHEMA_CONFIG_SOURCE=$GCONF_SCHEMA_INSTALL_SOURCE
- fi
-
- AC_ARG_WITH(gconf-source,
- [ --with-gconf-source=sourceaddress Config database for installing schema files.],GCONF_SCHEMA_CONFIG_SOURCE="$withval",)
-
- AC_SUBST(GCONF_SCHEMA_CONFIG_SOURCE)
- AC_MSG_RESULT([Using config source $GCONF_SCHEMA_CONFIG_SOURCE for schema installation])
-
- if test "x$GCONF_SCHEMA_FILE_DIR" = "x"; then
- GCONF_SCHEMA_FILE_DIR='$(sysconfdir)/gconf/schemas'
- fi
-
- AC_ARG_WITH(gconf-schema-file-dir,
- [ --with-gconf-schema-file-dir=dir Directory for installing schema files.],GCONF_SCHEMA_FILE_DIR="$withval",)
-
- AC_SUBST(GCONF_SCHEMA_FILE_DIR)
- AC_MSG_RESULT([Using $GCONF_SCHEMA_FILE_DIR as install directory for schema files])
-
- AC_ARG_ENABLE(schemas-install,
- [ --disable-schemas-install Disable the schemas installation],
- [case "${enableval}" in
- yes) schemas_install=true ;;
- no) schemas_install=false ;;
- *) AC_MSG_ERROR(bad value ${enableval} for --disable-schemas-install) ;;
- esac],[schemas_install=true])
- AM_CONDITIONAL(GCONF_SCHEMAS_INSTALL, test x$schemas_install = xtrue)
-])
-ext/esd/esdsink.c
ext/pulse/pulsesink.c
ext/flac/gstflacdec.c
ext/jpeg/gstjpegdec.c
ext/shout2/gstshout2.c
ext/soup/gstsouphttpsrc.c
ext/wavpack/gstwavpackparse.c
-gconf/gstreamer.schemas.in
gst/avi/gstavidemux.c
gst/avi/gstavimux.c
gst/isomp4/qtdemux.c
sys/oss4/oss4-sink.c
sys/oss4/oss4-source.c
sys/sunaudio/gstsunaudiomixertrack.c
-sys/v4l2/gstv4l2src.c
+sys/v4l2/gstv4l2bufferpool.c
+sys/v4l2/gstv4l2object.c
+sys/v4l2/gstv4l2radio.c
sys/v4l2/v4l2_calls.c
-sys/v4l2/v4l2src_calls.c
sys/ximage/gstximagesrc.c
msgstr ""
"Project-Id-Version: gst-plugins 0.7.6\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-05-10 09:38+0100\n"
+"POT-Creation-Date: 2011-11-26 13:52+0000\n"
"PO-Revision-Date: 2004-03-18 14:16+0200\n"
"Last-Translator: Petri Jooste <rkwjpj@puk.ac.za>\n"
"Language-Team: Afrikaans <i18n@af.org.za>\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
-msgid "Could not establish connection to sound server"
-msgstr ""
-
-msgid "Failed to query sound server capabilities"
-msgstr ""
-
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
msgstr ""
-msgid "Internal data stream error."
+msgid "Failed to decode JPEG image"
msgstr ""
-msgid "Failed to decode JPEG image"
+msgid "Internal data stream error."
msgstr ""
#, fuzzy
msgstr "Kon nie buffers vanaf toestel \"%s\" verkry nie."
#, c-format
+msgid "Failed to enumerate possible video formats device '%s' can work with"
+msgstr ""
+
+#, fuzzy, c-format
+msgid "Could not map buffers from device '%s'"
+msgstr "Kon nie buffers vanaf toestel \"%s\" verkry nie."
+
+#, c-format
+msgid "The driver of device '%s' does not support the IO method %d"
+msgstr ""
+
+#, c-format
+msgid "The driver of device '%s' does not support any known IO method."
+msgstr ""
+
+#, fuzzy, c-format
+msgid "Device '%s' does not support video capture"
+msgstr "Toestel \"%s\" is nie 'n vasleggingtoestel nie."
+
+#, fuzzy, c-format
+msgid "Device '%s' is busy"
+msgstr "Toestel \"%s\" is nie 'n vasleggingtoestel nie."
+
+#, fuzzy, c-format
+msgid "Device '%s' cannot capture at %dx%d"
+msgstr "Toestel \"%s\" is nie 'n vasleggingtoestel nie."
+
+#, fuzzy, c-format
+msgid "Device '%s' cannot capture in the specified format"
+msgstr "Toestel \"%s\" is nie 'n vasleggingtoestel nie."
+
+#, fuzzy, c-format
+msgid "Could not get parameters on device '%s'"
+msgstr "Kon nie buffers vanaf toestel \"%s\" verkry nie."
+
+msgid "Video device did not accept new frame rate setting."
+msgstr ""
+
+msgid "Video device could not create buffer pool."
+msgstr ""
+
+#, c-format
msgid "Got unexpected frame size of %u instead of %u."
msgstr ""
+#, fuzzy, c-format
+msgid "Failed trying to get video frames from device '%s'."
+msgstr "Kon nie genoeg buffers vanaf toestel \"%s\" kry nie."
+
#, c-format
-msgid "Error reading %d bytes on device '%s'."
+msgid "Failed after %d tries. device %s. system error: %s"
msgstr ""
+#, fuzzy, c-format
+msgid "Failed to get settings of tuner %d on device '%s'."
+msgstr "Kon nie genoeg buffers vanaf toestel \"%s\" kry nie."
+
+#, fuzzy, c-format
+msgid "Error getting capabilities for device '%s'."
+msgstr "Kon nie buffers vanaf toestel \"%s\" verkry nie."
+
+#, fuzzy, c-format
+msgid "Device '%s' is not a tuner."
+msgstr "Toestel \"%s\" is nie 'n vasleggingtoestel nie."
+
+#, fuzzy, c-format
+msgid "Failed to get radio input on device '%s'. "
+msgstr "Kon nie oudio-toestel \"%s\" toemaak nie."
+
+#, fuzzy, c-format
+msgid "Failed to set input %d on device %s."
+msgstr "Kon nie oudio-toestel \"%s\" toemaak nie."
+
+#, fuzzy, c-format
+msgid "Failed to change mute state for device '%s'."
+msgstr "Kon nie buffers vanaf toestel \"%s\" verkry nie."
+
#, c-format
msgid ""
"Error getting capabilities for device '%s': It isn't a v4l2 driver. Check if "
msgstr ""
#, fuzzy, c-format
-msgid "Failed to set input %d on device %s."
-msgstr "Kon nie oudio-toestel \"%s\" toemaak nie."
-
-#, fuzzy, c-format
msgid ""
"Failed to get current output on device '%s'. May be it is a radio device"
msgstr "Kon nie genoeg buffers vanaf toestel \"%s\" kry nie."
msgid "Failed to set output %d on device %s."
msgstr "Kon nie oudio-toestel \"%s\" toemaak nie."
-#, fuzzy, c-format
-msgid "Could not enqueue buffers in device '%s'."
-msgstr "Kon nie buffers vanaf toestel \"%s\" verkry nie."
-
-#, fuzzy, c-format
-msgid "Failed trying to get video frames from device '%s'."
-msgstr "Kon nie genoeg buffers vanaf toestel \"%s\" kry nie."
-
-#, c-format
-msgid "Failed after %d tries. device %s. system error: %s"
-msgstr ""
-
-#, fuzzy, c-format
-msgid "Could not get parameters on device '%s'"
-msgstr "Kon nie buffers vanaf toestel \"%s\" verkry nie."
-
-msgid "Video input device did not accept new frame rate setting."
-msgstr ""
-
-#, fuzzy, c-format
-msgid "Could not map buffers from device '%s'"
-msgstr "Kon nie buffers vanaf toestel \"%s\" verkry nie."
-
-#, c-format
-msgid "The driver of device '%s' does not support any known capture method."
-msgstr ""
-
msgid "Changing resolution at runtime is not yet supported."
msgstr ""
msgid "Cannot operate without a clock"
msgstr ""
+#, fuzzy
+#~ msgid "Could not enqueue buffers in device '%s'."
+#~ msgstr "Kon nie buffers vanaf toestel \"%s\" verkry nie."
+
#~ msgid "Could not open file \"%s\" for writing."
#~ msgstr "Kon nie lêer \"%s\" oopmaak om in te skryf nie."
msgstr ""
"Project-Id-Version: gst-plugins-0.8.0\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-05-10 09:38+0100\n"
+"POT-Creation-Date: 2011-11-26 13:52+0000\n"
"PO-Revision-Date: 2004-03-19 18:29+0200\n"
"Last-Translator: Metin Amiroff <metin@karegen.com>\n"
"Language-Team: Azerbaijani <translation-team-az@lists.sourceforge.net>\n"
"Content-Transfer-Encoding: 8bit\n"
"X-Generator: KBabel 1.0.2\n"
-msgid "Could not establish connection to sound server"
-msgstr ""
-
-msgid "Failed to query sound server capabilities"
-msgstr ""
-
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
msgstr ""
-msgid "Internal data stream error."
+msgid "Failed to decode JPEG image"
msgstr ""
-msgid "Failed to decode JPEG image"
+msgid "Internal data stream error."
msgstr ""
#, fuzzy
msgstr "\"%s\" avadanlığından bufferlər alına bilmədi."
#, c-format
+msgid "Failed to enumerate possible video formats device '%s' can work with"
+msgstr ""
+
+#, fuzzy, c-format
+msgid "Could not map buffers from device '%s'"
+msgstr "\"%s\" avadanlığından bufferlər alına bilmədi."
+
+#, c-format
+msgid "The driver of device '%s' does not support the IO method %d"
+msgstr ""
+
+#, c-format
+msgid "The driver of device '%s' does not support any known IO method."
+msgstr ""
+
+#, fuzzy, c-format
+msgid "Device '%s' does not support video capture"
+msgstr "\"%s\" avadanlığı capture avadanlığı deyil."
+
+#, fuzzy, c-format
+msgid "Device '%s' is busy"
+msgstr "\"%s\" avadanlığı capture avadanlığı deyil."
+
+#, fuzzy, c-format
+msgid "Device '%s' cannot capture at %dx%d"
+msgstr "\"%s\" avadanlığı capture avadanlığı deyil."
+
+#, fuzzy, c-format
+msgid "Device '%s' cannot capture in the specified format"
+msgstr "\"%s\" avadanlığı capture avadanlığı deyil."
+
+#, fuzzy, c-format
+msgid "Could not get parameters on device '%s'"
+msgstr "\"%s\" avadanlığından bufferlər alına bilmədi."
+
+msgid "Video device did not accept new frame rate setting."
+msgstr ""
+
+msgid "Video device could not create buffer pool."
+msgstr ""
+
+#, c-format
msgid "Got unexpected frame size of %u instead of %u."
msgstr ""
+#, fuzzy, c-format
+msgid "Failed trying to get video frames from device '%s'."
+msgstr "\"%s\" avadanlığından kifayət qədər bufferlər alına bilmədi."
+
#, c-format
-msgid "Error reading %d bytes on device '%s'."
+msgid "Failed after %d tries. device %s. system error: %s"
msgstr ""
+#, fuzzy, c-format
+msgid "Failed to get settings of tuner %d on device '%s'."
+msgstr "\"%s\" avadanlığından kifayət qədər bufferlər alına bilmədi."
+
+#, fuzzy, c-format
+msgid "Error getting capabilities for device '%s'."
+msgstr "\"%s\" avadanlığından bufferlər alına bilmədi."
+
+#, fuzzy, c-format
+msgid "Device '%s' is not a tuner."
+msgstr "\"%s\" avadanlığı capture avadanlığı deyil."
+
+#, fuzzy, c-format
+msgid "Failed to get radio input on device '%s'. "
+msgstr "\"%s\" audio avadanlığı bağlana bilmədi."
+
+#, fuzzy, c-format
+msgid "Failed to set input %d on device %s."
+msgstr "\"%s\" audio avadanlığı bağlana bilmədi."
+
+#, fuzzy, c-format
+msgid "Failed to change mute state for device '%s'."
+msgstr "\"%s\" avadanlığından bufferlər alına bilmədi."
+
#, c-format
msgid ""
"Error getting capabilities for device '%s': It isn't a v4l2 driver. Check if "
msgstr ""
#, fuzzy, c-format
-msgid "Failed to set input %d on device %s."
-msgstr "\"%s\" audio avadanlığı bağlana bilmədi."
-
-#, fuzzy, c-format
msgid ""
"Failed to get current output on device '%s'. May be it is a radio device"
msgstr "\"%s\" avadanlığından kifayət qədər bufferlər alına bilmədi."
msgid "Failed to set output %d on device %s."
msgstr "\"%s\" audio avadanlığı bağlana bilmədi."
-#, fuzzy, c-format
-msgid "Could not enqueue buffers in device '%s'."
-msgstr "\"%s\" avadanlığından bufferlər alına bilmədi."
-
-#, fuzzy, c-format
-msgid "Failed trying to get video frames from device '%s'."
-msgstr "\"%s\" avadanlığından kifayət qədər bufferlər alına bilmədi."
-
-#, c-format
-msgid "Failed after %d tries. device %s. system error: %s"
-msgstr ""
-
-#, fuzzy, c-format
-msgid "Could not get parameters on device '%s'"
-msgstr "\"%s\" avadanlığından bufferlər alına bilmədi."
-
-msgid "Video input device did not accept new frame rate setting."
-msgstr ""
-
-#, fuzzy, c-format
-msgid "Could not map buffers from device '%s'"
-msgstr "\"%s\" avadanlığından bufferlər alına bilmədi."
-
-#, c-format
-msgid "The driver of device '%s' does not support any known capture method."
-msgstr ""
-
msgid "Changing resolution at runtime is not yet supported."
msgstr ""
msgid "Cannot operate without a clock"
msgstr ""
+#, fuzzy
+#~ msgid "Could not enqueue buffers in device '%s'."
+#~ msgstr "\"%s\" avadanlığından bufferlər alına bilmədi."
+
#~ msgid "Could not open file \"%s\" for writing."
#~ msgstr "\"%s\" faylı yazma üçün açıla bilmədi."
msgstr ""
"Project-Id-Version: gst-plugins-good 0.10.28.2\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-05-10 09:38+0100\n"
+"POT-Creation-Date: 2011-11-26 13:52+0000\n"
"PO-Revision-Date: 2011-04-26 22:35+0300\n"
"Last-Translator: Alexander Shopov <ash@kambanaria.org>\n"
"Language-Team: Bulgarian <dict@fsa-bg.org>\n"
"Content-Transfer-Encoding: 8bit\n"
"Plural-Forms: nplurals=2; plural=n != 1;\n"
-msgid "Could not establish connection to sound server"
-msgstr "Не може да се осъществи връзка към звуковия сървър."
-
-msgid "Failed to query sound server capabilities"
-msgstr "Неуспешно запитване за възможностите на звуковия сървър."
-
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
msgstr "„%s“ от „%s“"
-msgid "Internal data stream error."
-msgstr "Вътрешна грешка на потока от данни."
-
msgid "Failed to decode JPEG image"
msgstr "Неуспешно декодиране на изображение, формат JPEG."
+msgid "Internal data stream error."
+msgstr "Вътрешна грешка на потока от данни."
+
msgid "Could not connect to server"
msgstr "Не може да се осъществи връзка към сървър."
msgstr "Грешка при прочитане %d байта от устройство „%s“."
#, c-format
+msgid "Failed to enumerate possible video formats device '%s' can work with"
+msgstr ""
+
+#, c-format
+msgid "Could not map buffers from device '%s'"
+msgstr "Не могат да се получат буферите от устройството „%s“."
+
+#, fuzzy, c-format
+msgid "The driver of device '%s' does not support the IO method %d"
+msgstr "Драйверът за устройството „%s“ не поддържа познати методи за запис."
+
+#, fuzzy, c-format
+msgid "The driver of device '%s' does not support any known IO method."
+msgstr "Драйверът за устройството „%s“ не поддържа познати методи за запис."
+
+#, fuzzy, c-format
+msgid "Device '%s' does not support video capture"
+msgstr "Устройството „%s“ не е изходно устройство."
+
+#, fuzzy, c-format
+msgid "Device '%s' is busy"
+msgstr "Устройството „%s“ не е изходно устройство."
+
+#, fuzzy, c-format
+msgid "Device '%s' cannot capture at %dx%d"
+msgstr "Устройството „%s“ не е устройство за запис."
+
+#, fuzzy, c-format
+msgid "Device '%s' cannot capture in the specified format"
+msgstr "Устройството „%s“ не е устройство за запис."
+
+#, c-format
+msgid "Could not get parameters on device '%s'"
+msgstr "Не могат да се получат параметрите на устройството „%s“."
+
+#, fuzzy
+msgid "Video device did not accept new frame rate setting."
+msgstr ""
+"Устройството за видео вход не приема новите настройки за честотата на кадри."
+
+msgid "Video device could not create buffer pool."
+msgstr ""
+
+#, c-format
msgid "Got unexpected frame size of %u instead of %u."
msgstr "Получена е рамка с размер %u вместо очаквания %u."
#, c-format
-msgid "Error reading %d bytes on device '%s'."
-msgstr "Грешка при прочитане на %d байта на устройство „%s“."
+msgid "Failed trying to get video frames from device '%s'."
+msgstr "Неуспешен опит за получаване на видео кадрите от устройство „%s“."
+
+#, c-format
+msgid "Failed after %d tries. device %s. system error: %s"
+msgstr "Неуспех след %d опита. Устройство „%s“. Системна грешка: %s"
+
+#, fuzzy, c-format
+msgid "Failed to get settings of tuner %d on device '%s'."
+msgstr "Неуспешно получаване на настройките на тунер %d на устройство „%s“."
+
+#, fuzzy, c-format
+msgid "Error getting capabilities for device '%s'."
+msgstr "Грешка при прочитане %d байта от устройство „%s“."
+
+#, fuzzy, c-format
+msgid "Device '%s' is not a tuner."
+msgstr "Устройството „%s“ не е изходно устройство."
+
+#, fuzzy, c-format
+msgid "Failed to get radio input on device '%s'. "
+msgstr "Неуспешно задаване на вход %d на устройство „%s“."
+
+#, c-format
+msgid "Failed to set input %d on device %s."
+msgstr "Неуспешно задаване на вход %d на устройство „%s“."
+
+#, fuzzy, c-format
+msgid "Failed to change mute state for device '%s'."
+msgstr "Неуспешно получаване на силата на сигнала на устройство „%s“."
#, c-format
msgid ""
"Неуспешно получаване на текущия вход на устройство „%s“. Може би е радио."
#, c-format
-msgid "Failed to set input %d on device %s."
-msgstr "Неуспешно задаване на вход %d на устройство „%s“."
-
-#, c-format
msgid ""
"Failed to get current output on device '%s'. May be it is a radio device"
msgstr ""
msgid "Failed to set output %d on device %s."
msgstr "Неуспешно задаване на изход %d на устройство „%s“."
-#, c-format
-msgid "Could not enqueue buffers in device '%s'."
-msgstr "Буферите в устройство „%s“ не могат да се подредят в опашка."
-
-#, c-format
-msgid "Failed trying to get video frames from device '%s'."
-msgstr "Неуспешен опит за получаване на видео кадрите от устройство „%s“."
-
-#, c-format
-msgid "Failed after %d tries. device %s. system error: %s"
-msgstr "Неуспех след %d опита. Устройство „%s“. Системна грешка: %s"
-
-#, c-format
-msgid "Could not get parameters on device '%s'"
-msgstr "Не могат да се получат параметрите на устройството „%s“."
-
-msgid "Video input device did not accept new frame rate setting."
-msgstr ""
-"Устройството за видео вход не приема новите настройки за честотата на кадри."
-
-#, c-format
-msgid "Could not map buffers from device '%s'"
-msgstr "Не могат да се получат буферите от устройството „%s“."
-
-#, c-format
-msgid "The driver of device '%s' does not support any known capture method."
-msgstr "Драйверът за устройството „%s“ не поддържа познати методи за запис."
-
msgid "Changing resolution at runtime is not yet supported."
msgstr ""
"Промяната на разделителната способност по време на работа все още не се "
msgid "Cannot operate without a clock"
msgstr "Работата без часовник е невъзможна."
+
+#~ msgid "Error reading %d bytes on device '%s'."
+#~ msgstr "Грешка при прочитане на %d байта на устройство „%s“."
+
+#~ msgid "Could not enqueue buffers in device '%s'."
+#~ msgstr "Буферите в устройство „%s“ не могат да се подредят в опашка."
+
+#~ msgid "Could not establish connection to sound server"
+#~ msgstr "Не може да се осъществи връзка към звуковия сървър."
+
+#~ msgid "Failed to query sound server capabilities"
+#~ msgstr "Неуспешно запитване за възможностите на звуковия сървър."
msgstr ""
"Project-Id-Version: gst-plugins-good 0.9.7\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-05-10 09:38+0100\n"
+"POT-Creation-Date: 2011-11-26 13:52+0000\n"
"PO-Revision-Date: 2005-12-04 21:54+0100\n"
"Last-Translator: Jordi Mallach <jordi@sindominio.net>\n"
"Language-Team: Catalan <ca@dodds.net>\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
-msgid "Could not establish connection to sound server"
-msgstr ""
-
-msgid "Failed to query sound server capabilities"
-msgstr ""
-
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
msgstr ""
-msgid "Internal data stream error."
-msgstr "S'ha produït un error intern de flux de dades."
-
msgid "Failed to decode JPEG image"
msgstr ""
+msgid "Internal data stream error."
+msgstr "S'ha produït un error intern de flux de dades."
+
msgid "Could not connect to server"
msgstr ""
msgstr ""
#, c-format
-msgid "Got unexpected frame size of %u instead of %u."
+msgid "Failed to enumerate possible video formats device '%s' can work with"
msgstr ""
#, c-format
-msgid "Error reading %d bytes on device '%s'."
+msgid "Could not map buffers from device '%s'"
msgstr ""
#, c-format
-msgid ""
-"Error getting capabilities for device '%s': It isn't a v4l2 driver. Check if "
-"it is a v4l1 driver."
+msgid "The driver of device '%s' does not support the IO method %d"
msgstr ""
#, c-format
-msgid "Failed to query attributes of input %d in device %s"
+msgid "The driver of device '%s' does not support any known IO method."
msgstr ""
#, c-format
-msgid "Failed to get setting of tuner %d on device '%s'."
+msgid "Device '%s' does not support video capture"
msgstr ""
#, c-format
-msgid "Failed to query norm on device '%s'."
+msgid "Device '%s' is busy"
msgstr ""
#, c-format
-msgid "Failed getting controls attributes on device '%s'."
+msgid "Device '%s' cannot capture at %dx%d"
msgstr ""
#, c-format
-msgid "Cannot identify device '%s'."
+msgid "Device '%s' cannot capture in the specified format"
msgstr ""
#, c-format
-msgid "This isn't a device '%s'."
+msgid "Could not get parameters on device '%s'"
+msgstr ""
+
+msgid "Video device did not accept new frame rate setting."
+msgstr ""
+
+msgid "Video device could not create buffer pool."
msgstr ""
#, c-format
-msgid "Could not open device '%s' for reading and writing."
+msgid "Got unexpected frame size of %u instead of %u."
msgstr ""
#, c-format
-msgid "Device '%s' is not a capture device."
+msgid "Failed trying to get video frames from device '%s'."
msgstr ""
#, c-format
-msgid "Device '%s' is not a output device."
+msgid "Failed after %d tries. device %s. system error: %s"
msgstr ""
#, c-format
-msgid "Failed to set norm for device '%s'."
+msgid "Failed to get settings of tuner %d on device '%s'."
msgstr ""
#, c-format
-msgid "Failed to get current tuner frequency for device '%s'."
+msgid "Error getting capabilities for device '%s'."
msgstr ""
#, c-format
-msgid "Failed to set current tuner frequency for device '%s' to %lu Hz."
+msgid "Device '%s' is not a tuner."
msgstr ""
#, c-format
-msgid "Failed to get signal strength for device '%s'."
+msgid "Failed to get radio input on device '%s'. "
msgstr ""
#, c-format
-msgid "Failed to get value for control %d on device '%s'."
+msgid "Failed to set input %d on device %s."
msgstr ""
#, c-format
-msgid "Failed to set value %d for control %d on device '%s'."
+msgid "Failed to change mute state for device '%s'."
msgstr ""
#, c-format
-msgid "Failed to get current input on device '%s'. May be it is a radio device"
+msgid ""
+"Error getting capabilities for device '%s': It isn't a v4l2 driver. Check if "
+"it is a v4l1 driver."
msgstr ""
#, c-format
-msgid "Failed to set input %d on device %s."
+msgid "Failed to query attributes of input %d in device %s"
msgstr ""
#, c-format
-msgid ""
-"Failed to get current output on device '%s'. May be it is a radio device"
+msgid "Failed to get setting of tuner %d on device '%s'."
msgstr ""
#, c-format
-msgid "Failed to set output %d on device %s."
+msgid "Failed to query norm on device '%s'."
msgstr ""
#, c-format
-msgid "Could not enqueue buffers in device '%s'."
+msgid "Failed getting controls attributes on device '%s'."
msgstr ""
#, c-format
-msgid "Failed trying to get video frames from device '%s'."
+msgid "Cannot identify device '%s'."
msgstr ""
#, c-format
-msgid "Failed after %d tries. device %s. system error: %s"
+msgid "This isn't a device '%s'."
msgstr ""
#, c-format
-msgid "Could not get parameters on device '%s'"
+msgid "Could not open device '%s' for reading and writing."
msgstr ""
-msgid "Video input device did not accept new frame rate setting."
+#, c-format
+msgid "Device '%s' is not a capture device."
msgstr ""
#, c-format
-msgid "Could not map buffers from device '%s'"
+msgid "Device '%s' is not a output device."
+msgstr ""
+
+#, c-format
+msgid "Failed to set norm for device '%s'."
+msgstr ""
+
+#, c-format
+msgid "Failed to get current tuner frequency for device '%s'."
msgstr ""
#, c-format
-msgid "The driver of device '%s' does not support any known capture method."
+msgid "Failed to set current tuner frequency for device '%s' to %lu Hz."
+msgstr ""
+
+#, c-format
+msgid "Failed to get signal strength for device '%s'."
+msgstr ""
+
+#, c-format
+msgid "Failed to get value for control %d on device '%s'."
+msgstr ""
+
+#, c-format
+msgid "Failed to set value %d for control %d on device '%s'."
+msgstr ""
+
+#, c-format
+msgid "Failed to get current input on device '%s'. May be it is a radio device"
+msgstr ""
+
+#, c-format
+msgid ""
+"Failed to get current output on device '%s'. May be it is a radio device"
+msgstr ""
+
+#, c-format
+msgid "Failed to set output %d on device %s."
msgstr ""
msgid "Changing resolution at runtime is not yet supported."
msgstr ""
"Project-Id-Version: gst-plugins-good-0.10.26.2\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-05-10 09:38+0100\n"
+"POT-Creation-Date: 2011-11-26 13:52+0000\n"
"PO-Revision-Date: 2011-01-12 23:07+0100\n"
"Last-Translator: Petr Kovar <pknbe@volny.cz>\n"
"Language-Team: Czech <translation-team-cs@lists.sourceforge.net>\n"
"Plural-Forms: nplurals=3; plural=(n==1) ? 0 : (n>=2 && n<=4) ? 1 : 2;\n"
"X-Generator: Lokalize 1.1\n"
-msgid "Could not establish connection to sound server"
-msgstr "Nezdařilo se navázání spojení se zvukovým serverem"
-
-msgid "Failed to query sound server capabilities"
-msgstr "Nezdařil se dotaz na schopnosti zvukového serveru"
-
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
msgstr "\"%s\" od \"%s\""
-msgid "Internal data stream error."
-msgstr "Vnitřní chyba datového proudu."
-
msgid "Failed to decode JPEG image"
msgstr "Nezdařilo se dekódování obrázku JPEG"
+msgid "Internal data stream error."
+msgstr "Vnitřní chyba datového proudu."
+
msgid "Could not connect to server"
msgstr "Nezdařilo se spojení se serverem"
msgstr "Chyba při čtení %d bajtů na zařízení \"%s\"."
#, c-format
+msgid "Failed to enumerate possible video formats device '%s' can work with"
+msgstr ""
+
+#, c-format
+msgid "Could not map buffers from device '%s'"
+msgstr "Nezdařilo se mapování vyrovnávací paměti ze zařízení \"%s\""
+
+#, fuzzy, c-format
+msgid "The driver of device '%s' does not support the IO method %d"
+msgstr "Ovladač zařízení \"%s\" nepodporuje žádnou známou záznamovou metodu."
+
+#, fuzzy, c-format
+msgid "The driver of device '%s' does not support any known IO method."
+msgstr "Ovladač zařízení \"%s\" nepodporuje žádnou známou záznamovou metodu."
+
+#, fuzzy, c-format
+msgid "Device '%s' does not support video capture"
+msgstr "Zařízení \"%s\" není výstupním zařízením."
+
+#, fuzzy, c-format
+msgid "Device '%s' is busy"
+msgstr "Zařízení \"%s\" není výstupním zařízením."
+
+#, fuzzy, c-format
+msgid "Device '%s' cannot capture at %dx%d"
+msgstr "Zařízení \"%s\" není záznamovým zařízením."
+
+#, fuzzy, c-format
+msgid "Device '%s' cannot capture in the specified format"
+msgstr "Zařízení \"%s\" není záznamovým zařízením."
+
+#, c-format
+msgid "Could not get parameters on device '%s'"
+msgstr "Nezdařilo se zjištění parametrů na zařízení \"%s\""
+
+#, fuzzy
+msgid "Video device did not accept new frame rate setting."
+msgstr "Vstupní videozařízení nepřijalo nové nastavení vzorkovací frekvence."
+
+msgid "Video device could not create buffer pool."
+msgstr ""
+
+#, c-format
msgid "Got unexpected frame size of %u instead of %u."
msgstr "Získána neočekávaná velikost snímku %u namísto %u."
#, c-format
-msgid "Error reading %d bytes on device '%s'."
+msgid "Failed trying to get video frames from device '%s'."
+msgstr "Nezdařil se pokus o získání videosnímků ze zařízení \"%s\"."
+
+#, c-format
+msgid "Failed after %d tries. device %s. system error: %s"
+msgstr "Selhání po %d pokusech. Zařízení %s. Systémová chyba: %s"
+
+#, fuzzy, c-format
+msgid "Failed to get settings of tuner %d on device '%s'."
+msgstr "Selhalo zjištění nastavení tuneru %d na zařízení \"%s\"."
+
+#, fuzzy, c-format
+msgid "Error getting capabilities for device '%s'."
msgstr "Chyba při čtení %d bajtů na zařízení \"%s\"."
+#, fuzzy, c-format
+msgid "Device '%s' is not a tuner."
+msgstr "Zařízení \"%s\" není výstupním zařízením."
+
+#, fuzzy, c-format
+msgid "Failed to get radio input on device '%s'. "
+msgstr "Nezdařilo se nastavení vstupu \"%d\" na zařízení \"%s\"."
+
+#, c-format
+msgid "Failed to set input %d on device %s."
+msgstr "Nezdařilo se nastavení vstupu \"%d\" na zařízení \"%s\"."
+
+#, fuzzy, c-format
+msgid "Failed to change mute state for device '%s'."
+msgstr "Nezdařilo se zjištění síly signálu u zařízení \"%s\"."
+
#, c-format
msgid ""
"Error getting capabilities for device '%s': It isn't a v4l2 driver. Check if "
"radiopřijímač."
#, c-format
-msgid "Failed to set input %d on device %s."
-msgstr "Nezdařilo se nastavení vstupu \"%d\" na zařízení \"%s\"."
-
-#, c-format
msgid ""
"Failed to get current output on device '%s'. May be it is a radio device"
msgstr ""
msgid "Failed to set output %d on device %s."
msgstr "Nezdařilo se nastavení výstupu %d na zařízení %s."
-#, c-format
-msgid "Could not enqueue buffers in device '%s'."
-msgstr "Nezdařilo se zařazení vyrovnávací paměti na zařízení \"%s\"."
-
-#, c-format
-msgid "Failed trying to get video frames from device '%s'."
-msgstr "Nezdařil se pokus o získání videosnímků ze zařízení \"%s\"."
-
-#, c-format
-msgid "Failed after %d tries. device %s. system error: %s"
-msgstr "Selhání po %d pokusech. Zařízení %s. Systémová chyba: %s"
-
-#, c-format
-msgid "Could not get parameters on device '%s'"
-msgstr "Nezdařilo se zjištění parametrů na zařízení \"%s\""
-
-msgid "Video input device did not accept new frame rate setting."
-msgstr "Vstupní videozařízení nepřijalo nové nastavení vzorkovací frekvence."
-
-#, c-format
-msgid "Could not map buffers from device '%s'"
-msgstr "Nezdařilo se mapování vyrovnávací paměti ze zařízení \"%s\""
-
-#, c-format
-msgid "The driver of device '%s' does not support any known capture method."
-msgstr "Ovladač zařízení \"%s\" nepodporuje žádnou známou záznamovou metodu."
-
msgid "Changing resolution at runtime is not yet supported."
msgstr "Změna rozlišení za běhu doposud není podporována."
msgid "Cannot operate without a clock"
msgstr "Není možné fungovat bez hodin"
+
+#~ msgid "Error reading %d bytes on device '%s'."
+#~ msgstr "Chyba při čtení %d bajtů na zařízení \"%s\"."
+
+#~ msgid "Could not enqueue buffers in device '%s'."
+#~ msgstr "Nezdařilo se zařazení vyrovnávací paměti na zařízení \"%s\"."
+
+#~ msgid "Could not establish connection to sound server"
+#~ msgstr "Nezdařilo se navázání spojení se zvukovým serverem"
+
+#~ msgid "Failed to query sound server capabilities"
+#~ msgstr "Nezdařil se dotaz na schopnosti zvukového serveru"
msgstr ""
"Project-Id-Version: gst-plugins-good-0.10.26.2\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-05-10 09:38+0100\n"
+"POT-Creation-Date: 2011-11-26 13:52+0000\n"
"PO-Revision-Date: 2011-01-07 23:54+0200\n"
"Last-Translator: Joe Hansen <joedalton2@yahoo.dk>\n"
"Language-Team: Danish <dansk@dansk-gruppen.dk>\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
-msgid "Could not establish connection to sound server"
-msgstr "Kunne ikke skabe kontakt til lyd-serveren"
-
-msgid "Failed to query sound server capabilities"
-msgstr "Listning af lydservers egenskaber fejlede"
-
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
msgstr "'%s' af '%s'"
-msgid "Internal data stream error."
-msgstr "Intern datastrømsfejl."
-
msgid "Failed to decode JPEG image"
msgstr "Mislykkedes i at afkode JPEG-billede"
+msgid "Internal data stream error."
+msgstr "Intern datastrømsfejl."
+
msgid "Could not connect to server"
msgstr "Kunne ikke forbinde til server"
msgid "Error reading %d bytes from device '%s'."
msgstr "Fejl ved læsning af %d byte på enhed '%s'."
+#, c-format
+msgid "Failed to enumerate possible video formats device '%s' can work with"
+msgstr ""
+
+#, c-format
+msgid "Could not map buffers from device '%s'"
+msgstr "Kunne ikke afbilde mellemlager fra enhed '%s'"
+
+#, fuzzy, c-format
+msgid "The driver of device '%s' does not support the IO method %d"
+msgstr "Driveren til enhed '%s' understøtter ingen kendt optagemetode."
+
+#, fuzzy, c-format
+msgid "The driver of device '%s' does not support any known IO method."
+msgstr "Driveren til enhed '%s' understøtter ingen kendt optagemetode."
+
+#, fuzzy, c-format
+msgid "Device '%s' does not support video capture"
+msgstr "Enhed '%s' er ikke en uddataenhed."
+
+#, fuzzy, c-format
+msgid "Device '%s' is busy"
+msgstr "Enhed '%s' er ikke en uddataenhed."
+
+#, fuzzy, c-format
+msgid "Device '%s' cannot capture at %dx%d"
+msgstr "Enhed '%s' er ikke en optageenhed."
+
+#, fuzzy, c-format
+msgid "Device '%s' cannot capture in the specified format"
+msgstr "Enhed '%s' er ikke en optageenhed."
+
+#, c-format
+msgid "Could not get parameters on device '%s'"
+msgstr "Kunne ikke hente parametre fra enhed '%s'"
+
+#, fuzzy
+msgid "Video device did not accept new frame rate setting."
+msgstr ""
+"Enheden til videoinddata accepterede ikke ny indstilling for billedrate."
+
+msgid "Video device could not create buffer pool."
+msgstr ""
+
# Er rammestørrelse ikke mere passende? Jeg er dog ikke helt sikker på
# konteksten her.
#, c-format
msgstr "Fik uventet billedstørrelse på %u i steden for %u."
#, c-format
-msgid "Error reading %d bytes on device '%s'."
+msgid "Failed trying to get video frames from device '%s'."
+msgstr "Forsøg på at hente videorammer fra enhed '%s' mislykkedes."
+
+#, c-format
+msgid "Failed after %d tries. device %s. system error: %s"
+msgstr "Fejlede efter %d forsøg. enhed %s. systemfejl: %s"
+
+#, fuzzy, c-format
+msgid "Failed to get settings of tuner %d on device '%s'."
+msgstr "Hentning af indstillinger for tuner %d på enhed '%s' fejlede."
+
+#, fuzzy, c-format
+msgid "Error getting capabilities for device '%s'."
msgstr "Fejl ved læsning af %d byte på enhed '%s'."
+#, fuzzy, c-format
+msgid "Device '%s' is not a tuner."
+msgstr "Enhed '%s' er ikke en uddataenhed."
+
+#, fuzzy, c-format
+msgid "Failed to get radio input on device '%s'. "
+msgstr "Kunne ikke sætte inddata %d for enhed %s."
+
+#, c-format
+msgid "Failed to set input %d on device %s."
+msgstr "Kunne ikke sætte inddata %d for enhed %s."
+
+#, fuzzy, c-format
+msgid "Failed to change mute state for device '%s'."
+msgstr "Kunne ikke hente signalstyrke for enhed '%s'."
+
#, c-format
msgid ""
"Error getting capabilities for device '%s': It isn't a v4l2 driver. Check if "
"Kunne ikke hente nuværende inddata for enhed '%s'. Måske er det en radioenhed"
#, c-format
-msgid "Failed to set input %d on device %s."
-msgstr "Kunne ikke sætte inddata %d for enhed %s."
-
-#, c-format
msgid ""
"Failed to get current output on device '%s'. May be it is a radio device"
msgstr ""
msgid "Failed to set output %d on device %s."
msgstr "Kunne ikke sætte uddata %d for enhed %s."
-#, c-format
-msgid "Could not enqueue buffers in device '%s'."
-msgstr "Kunne ikke sætte mellemlager fra enhed '%s' i kø."
-
-#, c-format
-msgid "Failed trying to get video frames from device '%s'."
-msgstr "Forsøg på at hente videorammer fra enhed '%s' mislykkedes."
-
-#, c-format
-msgid "Failed after %d tries. device %s. system error: %s"
-msgstr "Fejlede efter %d forsøg. enhed %s. systemfejl: %s"
-
-#, c-format
-msgid "Could not get parameters on device '%s'"
-msgstr "Kunne ikke hente parametre fra enhed '%s'"
-
-msgid "Video input device did not accept new frame rate setting."
-msgstr ""
-"Enheden til videoinddata accepterede ikke ny indstilling for billedrate."
-
-#, c-format
-msgid "Could not map buffers from device '%s'"
-msgstr "Kunne ikke afbilde mellemlager fra enhed '%s'"
-
-#, c-format
-msgid "The driver of device '%s' does not support any known capture method."
-msgstr "Driveren til enhed '%s' understøtter ingen kendt optagemetode."
-
msgid "Changing resolution at runtime is not yet supported."
msgstr "Ændring af opløsning under kørsel er endnu ikke understøttet."
msgid "Cannot operate without a clock"
msgstr "Kan ikke fungere uden et ur"
+
+#~ msgid "Error reading %d bytes on device '%s'."
+#~ msgstr "Fejl ved læsning af %d byte på enhed '%s'."
+
+#~ msgid "Could not enqueue buffers in device '%s'."
+#~ msgstr "Kunne ikke sætte mellemlager fra enhed '%s' i kø."
+
+#~ msgid "Could not establish connection to sound server"
+#~ msgstr "Kunne ikke skabe kontakt til lyd-serveren"
+
+#~ msgid "Failed to query sound server capabilities"
+#~ msgstr "Listning af lydservers egenskaber fejlede"
msgstr ""
"Project-Id-Version: gst-plugins-good 0.10.26.2\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-05-10 09:38+0100\n"
+"POT-Creation-Date: 2011-11-26 13:52+0000\n"
"PO-Revision-Date: 2011-01-21 22:36+0100\n"
"Last-Translator: Christian Kirbach <christian.kirbach@googlemail.com>\n"
"Language-Team: German <translation-team-de@lists.sourceforge.net>\n"
"X-Poedit-Country: GERMANY\n"
"Plural-Forms: nplurals=2; plural=(n != 1);\n"
-msgid "Could not establish connection to sound server"
-msgstr "Es konnte keine Verbindung zum Audio-Server hergestellt werden"
-
-msgid "Failed to query sound server capabilities"
-msgstr "Abfrage der Fähigkeiten des Audio-Servers ist fehlgeschlagen"
-
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
msgstr "»%s« durch »%s«"
-msgid "Internal data stream error."
-msgstr "Interner Datenstromfehler."
-
msgid "Failed to decode JPEG image"
msgstr "Dekodieren des JPEG-Bildes schlug fehl"
+msgid "Internal data stream error."
+msgstr "Interner Datenstromfehler."
+
msgid "Could not connect to server"
msgstr "Verbindung zum Server konnte nicht hergestellt werden"
msgid "Error reading %d bytes from device '%s'."
msgstr "Fehler beim Lesen von %d Bytes von Gerät »%s«."
+#, c-format
+msgid "Failed to enumerate possible video formats device '%s' can work with"
+msgstr ""
+"Aufzählen möglicher Videoformate, mit denen Gerät »%s« arbeiten kann, schlug "
+"fehl"
+
+# »Geholt« ist auch noch nicht optimal, aber »abgebildet« geht auch nicht wirklich.
+#, c-format
+msgid "Could not map buffers from device '%s'"
+msgstr "Die Puffer des Gerätes »%s« konnten nicht geholt werden."
+
+#, fuzzy, c-format
+msgid "The driver of device '%s' does not support the IO method %d"
+msgstr ""
+"Der Treiber von Gerät »%s« unterstützt keine der bekannten Aufnahmemethoden."
+
+#, fuzzy, c-format
+msgid "The driver of device '%s' does not support any known IO method."
+msgstr ""
+"Der Treiber von Gerät »%s« unterstützt keine der bekannten Aufnahmemethoden."
+
+#, fuzzy, c-format
+msgid "Device '%s' does not support video capture"
+msgstr "»%s« ist kein Wiedergabegerät."
+
+#, fuzzy, c-format
+msgid "Device '%s' is busy"
+msgstr "»%s« ist kein Wiedergabegerät."
+
+#, c-format
+msgid "Device '%s' cannot capture at %dx%d"
+msgstr "Das Gerät »%s« kann nicht bei %dx%d aufnehmen"
+
+#, c-format
+msgid "Device '%s' cannot capture in the specified format"
+msgstr "Das angegebene Format kann nicht vom Gerät »%s« ausgelesen werden"
+
+#, c-format
+msgid "Could not get parameters on device '%s'"
+msgstr "Parameter konnten nicht von Gerät »%s« ausgelesen werden"
+
+#, fuzzy
+msgid "Video device did not accept new frame rate setting."
+msgstr ""
+"Vom Video-Eingabegerät wurde die Einstellung zur Bildwiederholungsrate nicht "
+"akzeptiert."
+
+msgid "Video device could not create buffer pool."
+msgstr ""
+
# »Frame« sollten wir im Zusammenhang mit Videos vielleicht besser so belassen. Habe ich in gnome-subtitles auch so gemacht.
#, c-format
msgid "Got unexpected frame size of %u instead of %u."
msgstr "Unerwartete Frame-Größe von %u anstatt %u wurde erhalten."
#, c-format
-msgid "Error reading %d bytes on device '%s'."
-msgstr "Fehler beim Lesen von %d Bytes auf Gerät »%s«."
+msgid "Failed trying to get video frames from device '%s'."
+msgstr "Der Versuch Videobilder von Gerät »%s« auszulesen schlug fehl."
+
+#, c-format
+msgid "Failed after %d tries. device %s. system error: %s"
+msgstr "%d Versuche sind fehlgeschlagen. Gerät »%s«. Systemfehler: %s"
+
+#, fuzzy, c-format
+msgid "Failed to get settings of tuner %d on device '%s'."
+msgstr ""
+"Auslesen der Einstellungen der Senderwahl »%d« auf Gerät »%s« schlug fehl."
+
+#, fuzzy, c-format
+msgid "Error getting capabilities for device '%s'."
+msgstr "Fehler beim Lesen von %d Bytes von Gerät »%s«."
+
+#, fuzzy, c-format
+msgid "Device '%s' is not a tuner."
+msgstr "»%s« ist kein Wiedergabegerät."
+
+#, fuzzy, c-format
+msgid "Failed to get radio input on device '%s'. "
+msgstr "Festlegen der Eingabe »%d« des Geräts »%s« schlug fehl."
+
+#, c-format
+msgid "Failed to set input %d on device %s."
+msgstr "Festlegen der Eingabe »%d« des Geräts »%s« schlug fehl."
+
+#, fuzzy, c-format
+msgid "Failed to change mute state for device '%s'."
+msgstr "Auslesen der aktuellen Signalstärke des Geräts »%s« schlug fehl."
#, c-format
msgid ""
"Auslesen der aktuellen Eingabe auf dem Gerät »%s« schlug fehl. Vielleicht "
"ist es ein Funkgerät."
-#, c-format
-msgid "Failed to set input %d on device %s."
-msgstr "Festlegen der Eingabe »%d« des Geräts »%s« schlug fehl."
-
# Sendegerät sicherlich nicht, eben eher ein Radio. Warum diese Fehlermeldung beie einem Radiogerät kommen könnte, weiß ich allerdings auch nicht.
#, c-format
msgid ""
msgid "Failed to set output %d on device %s."
msgstr "Festlegen der Ausgabe »%d« des Geräts »%s« schlug fehl."
-#, c-format
-msgid "Could not enqueue buffers in device '%s'."
-msgstr "Auf Gerät »%s« konnten keine Puffer eingereiht werden."
-
-#, c-format
-msgid "Failed trying to get video frames from device '%s'."
-msgstr "Der Versuch Videobilder von Gerät »%s« auszulesen schlug fehl."
-
-#, c-format
-msgid "Failed after %d tries. device %s. system error: %s"
-msgstr "%d Versuche sind fehlgeschlagen. Gerät »%s«. Systemfehler: %s"
-
-#, c-format
-msgid "Could not get parameters on device '%s'"
-msgstr "Parameter konnten nicht von Gerät »%s« ausgelesen werden"
-
-msgid "Video input device did not accept new frame rate setting."
-msgstr ""
-"Vom Video-Eingabegerät wurde die Einstellung zur Bildwiederholungsrate nicht "
-"akzeptiert."
-
-# »Geholt« ist auch noch nicht optimal, aber »abgebildet« geht auch nicht wirklich.
-#, c-format
-msgid "Could not map buffers from device '%s'"
-msgstr "Die Puffer des Gerätes »%s« konnten nicht geholt werden."
-
-#, c-format
-msgid "The driver of device '%s' does not support any known capture method."
-msgstr ""
-"Der Treiber von Gerät »%s« unterstützt keine der bekannten Aufnahmemethoden."
-
msgid "Changing resolution at runtime is not yet supported."
msgstr "Ein Wechsel der Auflösung zur Laufzeit wird noch nicht unterstützt."
msgid "Cannot operate without a clock"
msgstr "Es kann nicht ohne einen Taktgeber gearbeitet werden."
-#~ msgid "Failed to enumerate possible video formats device '%s' can work with"
-#~ msgstr ""
-#~ "Aufzählen möglicher Videoformate, mit denen Gerät »%s« arbeiten kann, "
-#~ "schlug fehl"
+#~ msgid "Error reading %d bytes on device '%s'."
+#~ msgstr "Fehler beim Lesen von %d Bytes auf Gerät »%s«."
+
+#~ msgid "Could not enqueue buffers in device '%s'."
+#~ msgstr "Auf Gerät »%s« konnten keine Puffer eingereiht werden."
+
+#~ msgid "Could not establish connection to sound server"
+#~ msgstr "Es konnte keine Verbindung zum Audio-Server hergestellt werden"
+
+#~ msgid "Failed to query sound server capabilities"
+#~ msgstr "Abfrage der Fähigkeiten des Audio-Servers ist fehlgeschlagen"
#~ msgid ""
#~ "The buffer type is not supported, or the index is out of bounds, or no "
#~ msgid "No free buffers found in the pool at index %d."
#~ msgstr "Es wurden keine freien Puffer im Vorrat bei Index %d gefunden."
-#~ msgid "Device '%s' cannot capture at %dx%d"
-#~ msgstr "Das Gerät »%s« kann nicht bei %dx%d aufnehmen"
-
-#~ msgid "Device '%s' cannot capture in the specified format"
-#~ msgstr "Das angegebene Format kann nicht vom Gerät »%s« ausgelesen werden"
-
#~ msgid "Could not get buffers from device '%s'."
#~ msgstr "Die Puffer konnten nicht von Gerät »%s« ausgelesen werden."
msgstr ""
"Project-Id-Version: gst-plugins-good 0.10.25.3\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-05-10 09:38+0100\n"
+"POT-Creation-Date: 2011-11-26 13:52+0000\n"
"PO-Revision-Date: 2010-10-27 12:16+0200\n"
"Last-Translator: Michael Kotsarinis <mk73628@gmail.com>\n"
"Language-Team: Greek <team@lists.gnome.gr>\n"
"X-Generator: Lokalize 0.3\n"
"Plural-Forms: nplurals=2; plural=(n != 1);\n"
-msgid "Could not establish connection to sound server"
-msgstr "Δεν ήταν δυνατή η σύνδεση με τον διακομιστή ήχου"
-
-msgid "Failed to query sound server capabilities"
-msgstr "Αποτυχία αναζήτησης των δυνατοτήτων του διακομιστή ήχου"
-
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
msgstr "'%s' από '%s'"
-msgid "Internal data stream error."
-msgstr "Εσωτερικό σφάλμα ροής δεδομένων."
-
msgid "Failed to decode JPEG image"
msgstr "Αποτυχία αποκωδικοποίησης της εικόνας JPEG"
+msgid "Internal data stream error."
+msgstr "Εσωτερικό σφάλμα ροής δεδομένων."
+
msgid "Could not connect to server"
msgstr "Δεν ήταν δυνατή η σύνδεση με τον διακομιστή"
msgstr "Σφάλμα κατά την ανάγνωση %d bytes απο την συσκευή '%s'."
#, c-format
+msgid "Failed to enumerate possible video formats device '%s' can work with"
+msgstr ""
+
+#, c-format
+msgid "Could not map buffers from device '%s'"
+msgstr "Αδυναμία χαρτογράφησης buffer για τη συσκευή '%s'"
+
+#, fuzzy, c-format
+msgid "The driver of device '%s' does not support the IO method %d"
+msgstr "Ο οδηγός της συσκευής '%s' δεν υποστηρίζει κάποια γνωστή μέθοδο λήψης."
+
+#, fuzzy, c-format
+msgid "The driver of device '%s' does not support any known IO method."
+msgstr "Ο οδηγός της συσκευής '%s' δεν υποστηρίζει κάποια γνωστή μέθοδο λήψης."
+
+#, fuzzy, c-format
+msgid "Device '%s' does not support video capture"
+msgstr "Η συσκευή '%s' δεν είναι συσκευή εξόδου."
+
+#, fuzzy, c-format
+msgid "Device '%s' is busy"
+msgstr "Η συσκευή '%s' δεν είναι συσκευή εξόδου."
+
+#, fuzzy, c-format
+msgid "Device '%s' cannot capture at %dx%d"
+msgstr "Η συσκευή '%s' δεν είναι μια συσκευή λήψης."
+
+#, fuzzy, c-format
+msgid "Device '%s' cannot capture in the specified format"
+msgstr "Η συσκευή '%s' δεν είναι μια συσκευή λήψης."
+
+#, c-format
+msgid "Could not get parameters on device '%s'"
+msgstr "Δεν ήταν δυνατή η ανάγνωση των παραμέτρων στην συσκευή '%s'"
+
+#, fuzzy
+msgid "Video device did not accept new frame rate setting."
+msgstr "Η συσκευή εισόδου βίντεο δεν αποδέχθηκε τη νέα ρύθμιση ταχύτητας καρέ."
+
+msgid "Video device could not create buffer pool."
+msgstr ""
+
+#, c-format
msgid "Got unexpected frame size of %u instead of %u."
msgstr "Απροσδόκητο μέγεθος πλαισίου από %u αντί του %u"
#, c-format
-msgid "Error reading %d bytes on device '%s'."
-msgstr "Σφάλμα κατά την ανάγνωση %d bytes στην συσκευή '%s'."
+msgid "Failed trying to get video frames from device '%s'."
+msgstr "Αποτυχία ανάγνωσης πλαισίων βίντεο από την συσκευή '%s'."
+
+#, c-format
+msgid "Failed after %d tries. device %s. system error: %s"
+msgstr "Αποτυχία έπειτα από %d προσπάθειες. συσκευή %s. σφάλμα συστήματος: %s"
+
+#, fuzzy, c-format
+msgid "Failed to get settings of tuner %d on device '%s'."
+msgstr "Αποτυχία ανάγνωσης των ρυθμίσεων δέκτη %d στην συσκευή '%s'."
+
+#, fuzzy, c-format
+msgid "Error getting capabilities for device '%s'."
+msgstr "Σφάλμα κατά την ανάγνωση %d bytes απο την συσκευή '%s'."
+
+#, fuzzy, c-format
+msgid "Device '%s' is not a tuner."
+msgstr "Η συσκευή '%s' δεν είναι συσκευή εξόδου."
+
+#, fuzzy, c-format
+msgid "Failed to get radio input on device '%s'. "
+msgstr "Αποτυχία ρύθμισης εισαγωγής %d στην συσκευή %s."
+
+#, c-format
+msgid "Failed to set input %d on device %s."
+msgstr "Αποτυχία ρύθμισης εισαγωγής %d στην συσκευή %s."
+
+#, fuzzy, c-format
+msgid "Failed to change mute state for device '%s'."
+msgstr "Αποτυχία ανάγνωσης ισχύς σήματος για την συσκευή '%s'."
#, c-format
msgid ""
"Αποτυχία ανάγνωσης της τρέχουσας εισαγωγής στην συσκευή '%s'. Πιθανόν να "
"είναι μια συσκευή ράδιο"
-#, c-format
-msgid "Failed to set input %d on device %s."
-msgstr "Αποτυχία ρύθμισης εισαγωγής %d στην συσκευή %s."
-
#, fuzzy, c-format
msgid ""
"Failed to get current output on device '%s'. May be it is a radio device"
msgid "Failed to set output %d on device %s."
msgstr "Αποτυχία ρύθμισης εισαγωγής %d στην συσκευή %s."
-#, c-format
-msgid "Could not enqueue buffers in device '%s'."
-msgstr "Αδυναμία θέσης σε σειρά των buffer στη συσκευή '%s'."
-
-#, c-format
-msgid "Failed trying to get video frames from device '%s'."
-msgstr "Αποτυχία ανάγνωσης πλαισίων βίντεο από την συσκευή '%s'."
-
-#, c-format
-msgid "Failed after %d tries. device %s. system error: %s"
-msgstr "Αποτυχία έπειτα από %d προσπάθειες. συσκευή %s. σφάλμα συστήματος: %s"
-
-#, c-format
-msgid "Could not get parameters on device '%s'"
-msgstr "Δεν ήταν δυνατή η ανάγνωση των παραμέτρων στην συσκευή '%s'"
-
-msgid "Video input device did not accept new frame rate setting."
-msgstr "Η συσκευή εισόδου βίντεο δεν αποδέχθηκε τη νέα ρύθμιση ταχύτητας καρέ."
-
-#, c-format
-msgid "Could not map buffers from device '%s'"
-msgstr "Αδυναμία χαρτογράφησης buffer για τη συσκευή '%s'"
-
-#, c-format
-msgid "The driver of device '%s' does not support any known capture method."
-msgstr "Ο οδηγός της συσκευής '%s' δεν υποστηρίζει κάποια γνωστή μέθοδο λήψης."
-
msgid "Changing resolution at runtime is not yet supported."
msgstr ""
"Η αλλαγή της ανάλυσης κατά τη διάρκεια λειτουργίας δεν υποστηρίζεται ακόμα."
msgid "Cannot operate without a clock"
msgstr "Δεν είναι δυνατή η λειτουργία χωρίς ρολόι "
+#~ msgid "Error reading %d bytes on device '%s'."
+#~ msgstr "Σφάλμα κατά την ανάγνωση %d bytes στην συσκευή '%s'."
+
+#~ msgid "Could not enqueue buffers in device '%s'."
+#~ msgstr "Αδυναμία θέσης σε σειρά των buffer στη συσκευή '%s'."
+
+#~ msgid "Could not establish connection to sound server"
+#~ msgstr "Δεν ήταν δυνατή η σύνδεση με τον διακομιστή ήχου"
+
+#~ msgid "Failed to query sound server capabilities"
+#~ msgstr "Αποτυχία αναζήτησης των δυνατοτήτων του διακομιστή ήχου"
+
#~ msgid "Describes the selected input element."
#~ msgstr "Περιγράφει το επιλεγμένο στοιχείο εισαγωγής."
msgstr ""
"Project-Id-Version: gst-plugins 0.8.1\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-05-10 09:38+0100\n"
+"POT-Creation-Date: 2011-11-26 13:52+0000\n"
"PO-Revision-Date: 2004-04-26 10:41-0400\n"
"Last-Translator: Gareth Owen <gowen72@yahoo.com>\n"
"Language-Team: English (British) <en_gb@li.org>\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
-msgid "Could not establish connection to sound server"
-msgstr ""
-
-msgid "Failed to query sound server capabilities"
-msgstr ""
-
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
msgstr ""
-msgid "Internal data stream error."
+msgid "Failed to decode JPEG image"
msgstr ""
-msgid "Failed to decode JPEG image"
+msgid "Internal data stream error."
msgstr ""
#, fuzzy
msgstr "Could not get buffers from device \"%s\"."
#, c-format
+msgid "Failed to enumerate possible video formats device '%s' can work with"
+msgstr ""
+
+#, fuzzy, c-format
+msgid "Could not map buffers from device '%s'"
+msgstr "Could not get buffers from device \"%s\"."
+
+#, c-format
+msgid "The driver of device '%s' does not support the IO method %d"
+msgstr ""
+
+#, c-format
+msgid "The driver of device '%s' does not support any known IO method."
+msgstr ""
+
+#, fuzzy, c-format
+msgid "Device '%s' does not support video capture"
+msgstr "Device \"%s\" is not a capture device."
+
+#, fuzzy, c-format
+msgid "Device '%s' is busy"
+msgstr "Device \"%s\" is not a capture device."
+
+#, fuzzy, c-format
+msgid "Device '%s' cannot capture at %dx%d"
+msgstr "Device \"%s\" is not a capture device."
+
+#, fuzzy, c-format
+msgid "Device '%s' cannot capture in the specified format"
+msgstr "Device \"%s\" is not a capture device."
+
+#, fuzzy, c-format
+msgid "Could not get parameters on device '%s'"
+msgstr "Could not get buffers from device \"%s\"."
+
+msgid "Video device did not accept new frame rate setting."
+msgstr ""
+
+msgid "Video device could not create buffer pool."
+msgstr ""
+
+#, c-format
msgid "Got unexpected frame size of %u instead of %u."
msgstr ""
+#, fuzzy, c-format
+msgid "Failed trying to get video frames from device '%s'."
+msgstr "Could not get enough buffers from device \"%s\"."
+
#, c-format
-msgid "Error reading %d bytes on device '%s'."
+msgid "Failed after %d tries. device %s. system error: %s"
msgstr ""
+#, fuzzy, c-format
+msgid "Failed to get settings of tuner %d on device '%s'."
+msgstr "Could not get enough buffers from device \"%s\"."
+
+#, fuzzy, c-format
+msgid "Error getting capabilities for device '%s'."
+msgstr "Could not get buffers from device \"%s\"."
+
+#, fuzzy, c-format
+msgid "Device '%s' is not a tuner."
+msgstr "Device \"%s\" is not a capture device."
+
+#, fuzzy, c-format
+msgid "Failed to get radio input on device '%s'. "
+msgstr "Could not close audio device \"%s\"."
+
+#, fuzzy, c-format
+msgid "Failed to set input %d on device %s."
+msgstr "Could not close audio device \"%s\"."
+
+#, fuzzy, c-format
+msgid "Failed to change mute state for device '%s'."
+msgstr "Could not get buffers from device \"%s\"."
+
#, c-format
msgid ""
"Error getting capabilities for device '%s': It isn't a v4l2 driver. Check if "
msgstr ""
#, fuzzy, c-format
-msgid "Failed to set input %d on device %s."
-msgstr "Could not close audio device \"%s\"."
-
-#, fuzzy, c-format
msgid ""
"Failed to get current output on device '%s'. May be it is a radio device"
msgstr "Could not get enough buffers from device \"%s\"."
msgid "Failed to set output %d on device %s."
msgstr "Could not close audio device \"%s\"."
-#, fuzzy, c-format
-msgid "Could not enqueue buffers in device '%s'."
-msgstr "Could not get buffers from device \"%s\"."
-
-#, fuzzy, c-format
-msgid "Failed trying to get video frames from device '%s'."
-msgstr "Could not get enough buffers from device \"%s\"."
-
-#, c-format
-msgid "Failed after %d tries. device %s. system error: %s"
-msgstr ""
-
-#, fuzzy, c-format
-msgid "Could not get parameters on device '%s'"
-msgstr "Could not get buffers from device \"%s\"."
-
-msgid "Video input device did not accept new frame rate setting."
-msgstr ""
-
-#, fuzzy, c-format
-msgid "Could not map buffers from device '%s'"
-msgstr "Could not get buffers from device \"%s\"."
-
-#, c-format
-msgid "The driver of device '%s' does not support any known capture method."
-msgstr ""
-
msgid "Changing resolution at runtime is not yet supported."
msgstr ""
msgid "Cannot operate without a clock"
msgstr ""
+#, fuzzy
+#~ msgid "Could not enqueue buffers in device '%s'."
+#~ msgstr "Could not get buffers from device \"%s\"."
+
#~ msgid "Could not open file \"%s\" for writing."
#~ msgstr "Could not open file \"%s\" for writing."
msgstr ""
"Project-Id-Version: gst-plugins-good 0.10.26.2\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-05-10 09:38+0100\n"
+"POT-Creation-Date: 2011-11-26 13:52+0000\n"
"PO-Revision-Date: 2011-02-12 18:30+0100\n"
"Last-Translator: Jorge González González <aloriel@gmail.com>\n"
"Language-Team: Spanish <es@li.org>\n"
"X-Generator: KBabel 1.11.4\n"
"Plural-Forms: nplurals=2; plural=(n != 1);\n"
-msgid "Could not establish connection to sound server"
-msgstr "No se pudo establecer la conexión con el servidor de sonido"
-
-msgid "Failed to query sound server capabilities"
-msgstr "Falló al preguntar al servidor de sonido sus capacidades"
-
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
msgstr "«%s» por «%s»"
-msgid "Internal data stream error."
-msgstr "Error interno de flujo de datos."
-
msgid "Failed to decode JPEG image"
msgstr "Falló al decodificar la imagen JPEG"
+msgid "Internal data stream error."
+msgstr "Error interno de flujo de datos."
+
msgid "Could not connect to server"
msgstr "No se pudo conectar con el servidor"
msgstr "Error al leer %d bytes del dispositivo «%s»."
#, c-format
+msgid "Failed to enumerate possible video formats device '%s' can work with"
+msgstr ""
+"Falló al enumerar los posibles formatos de vídeo con los que el dispositivo "
+"«%s» puede trabajar"
+
+#, c-format
+msgid "Could not map buffers from device '%s'"
+msgstr "No se pudieron mapear los búferes del dispositivo «%s»"
+
+#, fuzzy, c-format
+msgid "The driver of device '%s' does not support the IO method %d"
+msgstr ""
+"El controlador del dispositivo «%s» no soporta ningún método de captura "
+"conocido."
+
+#, fuzzy, c-format
+msgid "The driver of device '%s' does not support any known IO method."
+msgstr ""
+"El controlador del dispositivo «%s» no soporta ningún método de captura "
+"conocido."
+
+#, fuzzy, c-format
+msgid "Device '%s' does not support video capture"
+msgstr "El dispositivo «%s» no es un dispositivo de salida."
+
+#, fuzzy, c-format
+msgid "Device '%s' is busy"
+msgstr "El dispositivo «%s» no es un dispositivo de salida."
+
+#, c-format
+msgid "Device '%s' cannot capture at %dx%d"
+msgstr "El dispositivo «%s» no puede capturar a %dx%d"
+
+#, c-format
+msgid "Device '%s' cannot capture in the specified format"
+msgstr "El dispositivo «%s» no puede capturar en el formato especificado"
+
+#, c-format
+msgid "Could not get parameters on device '%s'"
+msgstr "No se pudieron obtener los parámetros para el dispositivo «%s»"
+
+#, fuzzy
+msgid "Video device did not accept new frame rate setting."
+msgstr ""
+"El dispositivo de entrada de vídeo no aceptó el ajuste de la nueva tasa de "
+"fotogramas."
+
+msgid "Video device could not create buffer pool."
+msgstr ""
+
+#, c-format
msgid "Got unexpected frame size of %u instead of %u."
msgstr "Se obtuvo un tamaño de cuadro inesperado %u en lugar de %u."
#, c-format
-msgid "Error reading %d bytes on device '%s'."
+msgid "Failed trying to get video frames from device '%s'."
+msgstr "Falló al intentar obtener cuadros de vídeo del dispositivo «%s»."
+
+#, c-format
+msgid "Failed after %d tries. device %s. system error: %s"
+msgstr "Falló después de %d intentos. Dispositivo %s. Error del sistema: %s"
+
+#, fuzzy, c-format
+msgid "Failed to get settings of tuner %d on device '%s'."
+msgstr ""
+"Falló al obtener la posición del sintonizador %d en el dispositivo «%s»."
+
+#, fuzzy, c-format
+msgid "Error getting capabilities for device '%s'."
msgstr "Error al leer %d bytes del dispositivo «%s»."
+#, fuzzy, c-format
+msgid "Device '%s' is not a tuner."
+msgstr "El dispositivo «%s» no es un dispositivo de salida."
+
+#, fuzzy, c-format
+msgid "Failed to get radio input on device '%s'. "
+msgstr "Falló al establecer la entrada %d en el dispositivo %s."
+
+#, c-format
+msgid "Failed to set input %d on device %s."
+msgstr "Falló al establecer la entrada %d en el dispositivo %s."
+
+#, fuzzy, c-format
+msgid "Failed to change mute state for device '%s'."
+msgstr "Falló al obtener la potencia de la señal para el dispositivo «%s»."
+
#, c-format
msgid ""
"Error getting capabilities for device '%s': It isn't a v4l2 driver. Check if "
"dispositivo de radio."
#, c-format
-msgid "Failed to set input %d on device %s."
-msgstr "Falló al establecer la entrada %d en el dispositivo %s."
-
-#, c-format
msgid ""
"Failed to get current output on device '%s'. May be it is a radio device"
msgstr ""
msgid "Failed to set output %d on device %s."
msgstr "Falló al establecer la salida %d en el dispositivo %s."
-#, c-format
-msgid "Could not enqueue buffers in device '%s'."
-msgstr "No se pueden encolar los búferes en el dispositivo «%s»."
-
-#, c-format
-msgid "Failed trying to get video frames from device '%s'."
-msgstr "Falló al intentar obtener cuadros de vídeo del dispositivo «%s»."
-
-#, c-format
-msgid "Failed after %d tries. device %s. system error: %s"
-msgstr "Falló después de %d intentos. Dispositivo %s. Error del sistema: %s"
-
-#, c-format
-msgid "Could not get parameters on device '%s'"
-msgstr "No se pudieron obtener los parámetros para el dispositivo «%s»"
-
-msgid "Video input device did not accept new frame rate setting."
-msgstr ""
-"El dispositivo de entrada de vídeo no aceptó el ajuste de la nueva tasa de "
-"fotogramas."
-
-#, c-format
-msgid "Could not map buffers from device '%s'"
-msgstr "No se pudieron mapear los búferes del dispositivo «%s»"
-
-#, c-format
-msgid "The driver of device '%s' does not support any known capture method."
-msgstr ""
-"El controlador del dispositivo «%s» no soporta ningún método de captura "
-"conocido."
-
msgid "Changing resolution at runtime is not yet supported."
msgstr "El cambio de resolución durante la reproducción aún no está soportado."
msgid "Cannot operate without a clock"
msgstr "No se puede operar sin reloj"
-#~ msgid "Failed to enumerate possible video formats device '%s' can work with"
-#~ msgstr ""
-#~ "Falló al enumerar los posibles formatos de vídeo con los que el "
-#~ "dispositivo «%s» puede trabajar"
+#~ msgid "Error reading %d bytes on device '%s'."
+#~ msgstr "Error al leer %d bytes del dispositivo «%s»."
+
+#~ msgid "Could not enqueue buffers in device '%s'."
+#~ msgstr "No se pueden encolar los búferes en el dispositivo «%s»."
+
+#~ msgid "Could not establish connection to sound server"
+#~ msgstr "No se pudo establecer la conexión con el servidor de sonido"
+
+#~ msgid "Failed to query sound server capabilities"
+#~ msgstr "Falló al preguntar al servidor de sonido sus capacidades"
#~ msgid ""
#~ "The buffer type is not supported, or the index is out of bounds, or no "
#~ msgid "No free buffers found in the pool at index %d."
#~ msgstr "No se encontraron búferes libres en el índice %d del «pool»."
-#~ msgid "Device '%s' cannot capture at %dx%d"
-#~ msgstr "El dispositivo «%s» no puede capturar a %dx%d"
-
-#~ msgid "Device '%s' cannot capture in the specified format"
-#~ msgstr "El dispositivo «%s» no puede capturar en el formato especificado"
-
#~ msgid "Could not get buffers from device '%s'."
#~ msgstr "No se pudieron obtener búferes del dispositivo «%s»."
msgstr ""
"Project-Id-Version: gst-plugins-good-0.10.18.2\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-05-10 09:38+0100\n"
+"POT-Creation-Date: 2011-11-26 13:52+0000\n"
"PO-Revision-Date: 2010-03-25 12:37+0100\n"
"Last-Translator: Mikel Olasagasti Uranga <hey_neken@mundurat.net>\n"
"Language-Team: Basque <translation-team-eu@lists.sourceforge.net>\n"
"X-Generator: KBabel 1.11.4\n"
"Plural-Forms: nplurals=2; plural=(n != 1);\n"
-msgid "Could not establish connection to sound server"
-msgstr "Ezin izan da konexioa ezarri soinu-zerbitzariarekin"
-
-msgid "Failed to query sound server capabilities"
-msgstr "Huts egin du soinu-zerbitzariaren ahalmena kontsultatzean"
-
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
msgstr "'%s' - '%s'"
-msgid "Internal data stream error."
-msgstr "Datu-korrontearen barne-errorea."
-
msgid "Failed to decode JPEG image"
msgstr "Huts egin du JPEG irudia deskodetzean"
+msgid "Internal data stream error."
+msgstr "Datu-korrontearen barne-errorea."
+
msgid "Could not connect to server"
msgstr "Ezin izan da konektatu zerbitzariarekin"
msgstr "Errorea gertatu da '%2$s' gailuan %1$d byte irakurtzean."
#, c-format
+msgid "Failed to enumerate possible video formats device '%s' can work with"
+msgstr ""
+
+#, c-format
+msgid "Could not map buffers from device '%s'"
+msgstr "Ezin izan dira '%s' gailuaren bufferrak mapatu."
+
+#, fuzzy, c-format
+msgid "The driver of device '%s' does not support the IO method %d"
+msgstr ""
+"'%s' gailuaren kontrolatzaileak ez du onartzen kaptura-metodo ezagunik."
+
+#, fuzzy, c-format
+msgid "The driver of device '%s' does not support any known IO method."
+msgstr ""
+"'%s' gailuaren kontrolatzaileak ez du onartzen kaptura-metodo ezagunik."
+
+#, fuzzy, c-format
+msgid "Device '%s' does not support video capture"
+msgstr "'%s' gailua ez da irteerako gailu bat."
+
+#, fuzzy, c-format
+msgid "Device '%s' is busy"
+msgstr "'%s' gailua ez da irteerako gailu bat."
+
+#, fuzzy, c-format
+msgid "Device '%s' cannot capture at %dx%d"
+msgstr "'%s' gailua ez da kaptura-gailu bat."
+
+#, fuzzy, c-format
+msgid "Device '%s' cannot capture in the specified format"
+msgstr "'%s' gailua ez da kaptura-gailu bat."
+
+#, c-format
+msgid "Could not get parameters on device '%s'"
+msgstr "Ezin izan dira '%s' gailuaren parametroak eskuratu"
+
+#, fuzzy
+msgid "Video device did not accept new frame rate setting."
+msgstr ""
+"Bideoaren sarrerako gailuak ez du fotograma-tamainaren ezarpen berria "
+"onartzen."
+
+msgid "Video device could not create buffer pool."
+msgstr ""
+
+#, c-format
msgid "Got unexpected frame size of %u instead of %u."
msgstr "Ustekabeko fotograma-tamaina jaso da (%u), %u ordez."
#, c-format
-msgid "Error reading %d bytes on device '%s'."
+msgid "Failed trying to get video frames from device '%s'."
+msgstr "Huts egin du '%s' gailutik bideo-fotogramak eskuratzen saiatzean."
+
+#, c-format
+msgid "Failed after %d tries. device %s. system error: %s"
+msgstr "Huts egin du %d saio eta gero. %s gailua. Sistema-errorea: %s"
+
+#, fuzzy, c-format
+msgid "Failed to get settings of tuner %d on device '%s'."
+msgstr ""
+"Huts egin du '%2$s' gailuko %1$d. sintonizadorearen ezarpenak eskuratzean."
+
+#, fuzzy, c-format
+msgid "Error getting capabilities for device '%s'."
msgstr "Errorea gertatu da '%2$s' gailuan %1$d byte irakurtzean."
+#, fuzzy, c-format
+msgid "Device '%s' is not a tuner."
+msgstr "'%s' gailua ez da irteerako gailu bat."
+
+#, fuzzy, c-format
+msgid "Failed to get radio input on device '%s'. "
+msgstr "Huts egin du '%2$s' gailuko %1$d. sarrera ezartzean."
+
+#, c-format
+msgid "Failed to set input %d on device %s."
+msgstr "Huts egin du '%2$s' gailuko %1$d. sarrera ezartzean."
+
+#, fuzzy, c-format
+msgid "Failed to change mute state for device '%s'."
+msgstr "Huts egin du '%s' gailuaren seinalearen indarra eskuratzean."
+
#, c-format
msgid ""
"Error getting capabilities for device '%s': It isn't a v4l2 driver. Check if "
"Huts egin du '%s' gailuko uneko sarrera eskuratzean. Litekeena da irrati-"
"gailu bat izatea."
-#, c-format
-msgid "Failed to set input %d on device %s."
-msgstr "Huts egin du '%2$s' gailuko %1$d. sarrera ezartzean."
-
#, fuzzy, c-format
msgid ""
"Failed to get current output on device '%s'. May be it is a radio device"
msgid "Failed to set output %d on device %s."
msgstr "Huts egin du '%2$s' gailuko %1$d. sarrera ezartzean."
-#, c-format
-msgid "Could not enqueue buffers in device '%s'."
-msgstr "Ezin izan dira bufferrak ilaran jarri '%s' gailuan."
-
-#, c-format
-msgid "Failed trying to get video frames from device '%s'."
-msgstr "Huts egin du '%s' gailutik bideo-fotogramak eskuratzen saiatzean."
-
-#, c-format
-msgid "Failed after %d tries. device %s. system error: %s"
-msgstr "Huts egin du %d saio eta gero. %s gailua. Sistema-errorea: %s"
-
-#, c-format
-msgid "Could not get parameters on device '%s'"
-msgstr "Ezin izan dira '%s' gailuaren parametroak eskuratu"
-
-msgid "Video input device did not accept new frame rate setting."
-msgstr ""
-"Bideoaren sarrerako gailuak ez du fotograma-tamainaren ezarpen berria "
-"onartzen."
-
-#, c-format
-msgid "Could not map buffers from device '%s'"
-msgstr "Ezin izan dira '%s' gailuaren bufferrak mapatu."
-
-#, c-format
-msgid "The driver of device '%s' does not support any known capture method."
-msgstr ""
-"'%s' gailuaren kontrolatzaileak ez du onartzen kaptura-metodo ezagunik."
-
msgid "Changing resolution at runtime is not yet supported."
msgstr "Oraindik ez dago onartua exekutatu bitartean bereizmena aldatzea."
msgid "Cannot operate without a clock"
msgstr "Ezin du funtzionatu erlojurik gabe"
+#~ msgid "Error reading %d bytes on device '%s'."
+#~ msgstr "Errorea gertatu da '%2$s' gailuan %1$d byte irakurtzean."
+
+#~ msgid "Could not enqueue buffers in device '%s'."
+#~ msgstr "Ezin izan dira bufferrak ilaran jarri '%s' gailuan."
+
+#~ msgid "Could not establish connection to sound server"
+#~ msgstr "Ezin izan da konexioa ezarri soinu-zerbitzariarekin"
+
+#~ msgid "Failed to query sound server capabilities"
+#~ msgstr "Huts egin du soinu-zerbitzariaren ahalmena kontsultatzean"
+
#~ msgid "Describes the selected input element."
#~ msgstr "Sarrerako elementu hautatua deskribatzen du."
msgstr ""
"Project-Id-Version: gst-plugins-good 0.10.25.3\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-05-10 09:38+0100\n"
+"POT-Creation-Date: 2011-11-26 13:52+0000\n"
"PO-Revision-Date: 2010-11-17 23:03+0200\n"
"Last-Translator: Tommi Vainikainen <Tommi.Vainikainen@iki.fi>\n"
"Language-Team: Finnish <translation-team-fi@lists.sourceforge.net>\n"
"Content-Transfer-Encoding: 8bit\n"
"Plural-Forms: nplurals=2; plural=(n != 1);\n"
-msgid "Could not establish connection to sound server"
-msgstr "Yhteyttä äänipalvelimeen ei voitu avata"
-
-msgid "Failed to query sound server capabilities"
-msgstr "Äänipalvelimen ominaisuuksia ei voitu selvittää"
-
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
msgstr "”%s” artistilta ”%s”"
-msgid "Internal data stream error."
-msgstr "Sisäisen tietovirran virhe."
-
msgid "Failed to decode JPEG image"
msgstr "JPEG-kuvan purku epäonnistui"
+msgid "Internal data stream error."
+msgstr "Sisäisen tietovirran virhe."
+
msgid "Could not connect to server"
msgstr "Palvelimeen ei saatu yhteyttä"
msgstr "Virhe luettaessa %d tavua laitteelta ”%s”."
#, c-format
+msgid "Failed to enumerate possible video formats device '%s' can work with"
+msgstr ""
+
+#, c-format
+msgid "Could not map buffers from device '%s'"
+msgstr "Puskereita laitteelta ”%s” ei voitu kartoittaa"
+
+#, fuzzy, c-format
+msgid "The driver of device '%s' does not support the IO method %d"
+msgstr "Laitteen ”%s” ajuri ei tue mitään tunnettua kaappaustapaa."
+
+#, fuzzy, c-format
+msgid "The driver of device '%s' does not support any known IO method."
+msgstr "Laitteen ”%s” ajuri ei tue mitään tunnettua kaappaustapaa."
+
+#, fuzzy, c-format
+msgid "Device '%s' does not support video capture"
+msgstr "Laite ”%s” ei ole ulostulolaite."
+
+#, fuzzy, c-format
+msgid "Device '%s' is busy"
+msgstr "Laite ”%s” ei ole ulostulolaite."
+
+#, fuzzy, c-format
+msgid "Device '%s' cannot capture at %dx%d"
+msgstr "Laitteelta ”%s” ei voi kaapata."
+
+#, fuzzy, c-format
+msgid "Device '%s' cannot capture in the specified format"
+msgstr "Laitteelta ”%s” ei voi kaapata."
+
+#, c-format
+msgid "Could not get parameters on device '%s'"
+msgstr "Laitteelta ”%s” ei voitu saada parametreja"
+
+#, fuzzy
+msgid "Video device did not accept new frame rate setting."
+msgstr "Videosyötelaite ei hyväksy uutta kehysnopeusasetusta."
+
+msgid "Video device could not create buffer pool."
+msgstr ""
+
+#, c-format
msgid "Got unexpected frame size of %u instead of %u."
msgstr "Saatiin odottamaton kehys kooltaan %u odotetun %u sijaan."
#, c-format
-msgid "Error reading %d bytes on device '%s'."
+msgid "Failed trying to get video frames from device '%s'."
+msgstr "Videoruutujen noutaminen laitteelta ”%s” epäonnistui."
+
+#, c-format
+msgid "Failed after %d tries. device %s. system error: %s"
+msgstr "Epäonnistui %d yrityksen jälkeen: laite %s, järjestelmävirhe: %s"
+
+#, fuzzy, c-format
+msgid "Failed to get settings of tuner %d on device '%s'."
+msgstr "Virittimen %d asetuksia ei voitu lukea laitteelta ”%s”."
+
+#, fuzzy, c-format
+msgid "Error getting capabilities for device '%s'."
msgstr "Virhe luettaessa %d tavua laitteelta ”%s”."
+#, fuzzy, c-format
+msgid "Device '%s' is not a tuner."
+msgstr "Laite ”%s” ei ole ulostulolaite."
+
+#, fuzzy, c-format
+msgid "Failed to get radio input on device '%s'. "
+msgstr "Sisääntuloa %d ei voitu asettaa laitteelle %s."
+
+#, c-format
+msgid "Failed to set input %d on device %s."
+msgstr "Sisääntuloa %d ei voitu asettaa laitteelle %s."
+
+#, fuzzy, c-format
+msgid "Failed to change mute state for device '%s'."
+msgstr "Laitteelta ”%s” ei saatu signaalinvoimakkuutta."
+
#, c-format
msgid ""
"Error getting capabilities for device '%s': It isn't a v4l2 driver. Check if "
"Laitteen ”%s” tämänhetkistä sisääntuloa ei voitu lukea, se ei ehkä ole "
"radiolaite"
-#, c-format
-msgid "Failed to set input %d on device %s."
-msgstr "Sisääntuloa %d ei voitu asettaa laitteelle %s."
-
#, fuzzy, c-format
msgid ""
"Failed to get current output on device '%s'. May be it is a radio device"
msgid "Failed to set output %d on device %s."
msgstr "Sisääntuloa %d ei voitu asettaa laitteelle %s."
-#, c-format
-msgid "Could not enqueue buffers in device '%s'."
-msgstr "Puskureita ei voitu laittaa jonoon laitteella ”%s”."
-
-#, c-format
-msgid "Failed trying to get video frames from device '%s'."
-msgstr "Videoruutujen noutaminen laitteelta ”%s” epäonnistui."
-
-#, c-format
-msgid "Failed after %d tries. device %s. system error: %s"
-msgstr "Epäonnistui %d yrityksen jälkeen: laite %s, järjestelmävirhe: %s"
-
-#, c-format
-msgid "Could not get parameters on device '%s'"
-msgstr "Laitteelta ”%s” ei voitu saada parametreja"
-
-msgid "Video input device did not accept new frame rate setting."
-msgstr "Videosyötelaite ei hyväksy uutta kehysnopeusasetusta."
-
-#, c-format
-msgid "Could not map buffers from device '%s'"
-msgstr "Puskereita laitteelta ”%s” ei voitu kartoittaa"
-
-#, c-format
-msgid "The driver of device '%s' does not support any known capture method."
-msgstr "Laitteen ”%s” ajuri ei tue mitään tunnettua kaappaustapaa."
-
msgid "Changing resolution at runtime is not yet supported."
msgstr "Resoluution vaihto käytön aikana ei ole vielä mahdollista."
msgid "Cannot operate without a clock"
msgstr "Ei voitu toimia ilman kelloa"
+#~ msgid "Error reading %d bytes on device '%s'."
+#~ msgstr "Virhe luettaessa %d tavua laitteelta ”%s”."
+
+#~ msgid "Could not enqueue buffers in device '%s'."
+#~ msgstr "Puskureita ei voitu laittaa jonoon laitteella ”%s”."
+
+#~ msgid "Could not establish connection to sound server"
+#~ msgstr "Yhteyttä äänipalvelimeen ei voitu avata"
+
+#~ msgid "Failed to query sound server capabilities"
+#~ msgstr "Äänipalvelimen ominaisuuksia ei voitu selvittää"
+
#~ msgid ""
#~ "The buffer type is not supported, or the index is out of bounds, or no "
#~ "buffers have been allocated yet, or the userptr or length are invalid. "
msgstr ""
"Project-Id-Version: gst-plugins-good 0.10.28.2\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-05-10 09:38+0100\n"
+"POT-Creation-Date: 2011-11-26 13:52+0000\n"
"PO-Revision-Date: 2011-04-28 09:22+0200\n"
"Last-Translator: Claude Paroz <claude@2xlibre.net>\n"
"Language-Team: French <traduc@traduc.org>\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
-msgid "Could not establish connection to sound server"
-msgstr "Impossible d'établir une connexion vers le serveur de son"
-
-msgid "Failed to query sound server capabilities"
-msgstr ""
-"Échec lors de l'interrogation du serveur de son au sujet de ses capacités"
-
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
msgstr "« %s » par « %s »"
-msgid "Internal data stream error."
-msgstr "Erreur interne de flux de données."
-
msgid "Failed to decode JPEG image"
msgstr "Échec de décodage de l'image JPEG"
+msgid "Internal data stream error."
+msgstr "Erreur interne de flux de données."
+
msgid "Could not connect to server"
msgstr "Impossible de se connecter au serveur"
msgstr "Erreur de lecture de %d octets sur le périphérique « %s »."
#, c-format
+msgid "Failed to enumerate possible video formats device '%s' can work with"
+msgstr ""
+
+#, c-format
+msgid "Could not map buffers from device '%s'"
+msgstr "Impossible de mapper les tampons du périphérique « %s »."
+
+#, fuzzy, c-format
+msgid "The driver of device '%s' does not support the IO method %d"
+msgstr ""
+"Le pilote du périphérique « %s » ne prend en charge aucune méthode "
+"d'enregistrement connue."
+
+#, fuzzy, c-format
+msgid "The driver of device '%s' does not support any known IO method."
+msgstr ""
+"Le pilote du périphérique « %s » ne prend en charge aucune méthode "
+"d'enregistrement connue."
+
+#, fuzzy, c-format
+msgid "Device '%s' does not support video capture"
+msgstr "Le périphérique « %s » n'est pas un périphérique de sortie."
+
+#, fuzzy, c-format
+msgid "Device '%s' is busy"
+msgstr "Le périphérique « %s » n'est pas un périphérique de sortie."
+
+#, fuzzy, c-format
+msgid "Device '%s' cannot capture at %dx%d"
+msgstr "Le périphérique « %s » n'est pas un périphérique d'enregistrement."
+
+#, fuzzy, c-format
+msgid "Device '%s' cannot capture in the specified format"
+msgstr "Le périphérique « %s » n'est pas un périphérique d'enregistrement."
+
+#, c-format
+msgid "Could not get parameters on device '%s'"
+msgstr "Impossible d'obtenir les paramètres du périphérique « %s »"
+
+#, fuzzy
+msgid "Video device did not accept new frame rate setting."
+msgstr ""
+"Le périphérique d'entrée vidéo n'a pas accepté le nouveau paramètre de "
+"fréquence d'image."
+
+msgid "Video device could not create buffer pool."
+msgstr ""
+
+#, c-format
msgid "Got unexpected frame size of %u instead of %u."
msgstr "Taille de trame imprévue de %u à la place de %u."
#, c-format
-msgid "Error reading %d bytes on device '%s'."
+msgid "Failed trying to get video frames from device '%s'."
+msgstr "L'obtention de trames vidéo du périphérique « %s » a échoué."
+
+#, c-format
+msgid "Failed after %d tries. device %s. system error: %s"
+msgstr "Échec après %d tentatives. périphérique %s. erreur système : %s"
+
+#, fuzzy, c-format
+msgid "Failed to get settings of tuner %d on device '%s'."
+msgstr ""
+"Impossible d'obtenir les paramètres du syntoniseur %d du périphérique « %s »."
+
+#, fuzzy, c-format
+msgid "Error getting capabilities for device '%s'."
msgstr "Erreur de lecture de %d octets sur le périphérique « %s »."
+#, fuzzy, c-format
+msgid "Device '%s' is not a tuner."
+msgstr "Le périphérique « %s » n'est pas un périphérique de sortie."
+
+#, fuzzy, c-format
+msgid "Failed to get radio input on device '%s'. "
+msgstr "Impossible de définir l'entrée %d du périphérique %s."
+
+#, c-format
+msgid "Failed to set input %d on device %s."
+msgstr "Impossible de définir l'entrée %d du périphérique %s."
+
+#, fuzzy, c-format
+msgid "Failed to change mute state for device '%s'."
+msgstr "Impossible d'obtenir la force du signal du périphérique « %s »."
+
#, c-format
msgid ""
"Error getting capabilities for device '%s': It isn't a v4l2 driver. Check if "
"être un périphérique radio"
#, c-format
-msgid "Failed to set input %d on device %s."
-msgstr "Impossible de définir l'entrée %d du périphérique %s."
-
-#, c-format
msgid ""
"Failed to get current output on device '%s'. May be it is a radio device"
msgstr ""
msgid "Failed to set output %d on device %s."
msgstr "Impossible de définir la sortie %d du périphérique %s."
-#, c-format
-msgid "Could not enqueue buffers in device '%s'."
-msgstr ""
-"Impossible de mettre les tampons en file d'attente sur le périphérique "
-"« %s »."
-
-#, c-format
-msgid "Failed trying to get video frames from device '%s'."
-msgstr "L'obtention de trames vidéo du périphérique « %s » a échoué."
-
-#, c-format
-msgid "Failed after %d tries. device %s. system error: %s"
-msgstr "Échec après %d tentatives. périphérique %s. erreur système : %s"
-
-#, c-format
-msgid "Could not get parameters on device '%s'"
-msgstr "Impossible d'obtenir les paramètres du périphérique « %s »"
-
-msgid "Video input device did not accept new frame rate setting."
-msgstr ""
-"Le périphérique d'entrée vidéo n'a pas accepté le nouveau paramètre de "
-"fréquence d'image."
-
-#, c-format
-msgid "Could not map buffers from device '%s'"
-msgstr "Impossible de mapper les tampons du périphérique « %s »."
-
-#, c-format
-msgid "The driver of device '%s' does not support any known capture method."
-msgstr ""
-"Le pilote du périphérique « %s » ne prend en charge aucune méthode "
-"d'enregistrement connue."
-
msgid "Changing resolution at runtime is not yet supported."
msgstr ""
"La modification de résolution en cours d'exécution n'est pas encore prise en "
msgid "Cannot operate without a clock"
msgstr "Impossible de fonctionner sans horloge"
+
+#~ msgid "Error reading %d bytes on device '%s'."
+#~ msgstr "Erreur de lecture de %d octets sur le périphérique « %s »."
+
+#~ msgid "Could not enqueue buffers in device '%s'."
+#~ msgstr ""
+#~ "Impossible de mettre les tampons en file d'attente sur le périphérique "
+#~ "« %s »."
+
+#~ msgid "Could not establish connection to sound server"
+#~ msgstr "Impossible d'établir une connexion vers le serveur de son"
+
+#~ msgid "Failed to query sound server capabilities"
+#~ msgstr ""
+#~ "Échec lors de l'interrogation du serveur de son au sujet de ses capacités"
msgstr ""
"Project-Id-Version: gst-plugins-good 0.10.26.2\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-05-10 09:38+0100\n"
+"POT-Creation-Date: 2011-11-26 13:52+0000\n"
"PO-Revision-Date: 2011-01-09 21:20+0100\n"
"Last-Translator: Fran Diéguez <frandieguez@ubuntu.com>\n"
"Language-Team: Galician <proxecto@trasno.net>\n"
"Content-Transfer-Encoding: 8bit\n"
"Plural-Forms: nplurals=2; plural=(n!=1);\n"
-msgid "Could not establish connection to sound server"
-msgstr "Non foi posíbel estabelecer a conexión co servidor de son"
-
-msgid "Failed to query sound server capabilities"
-msgstr "Produciuse un erro ao consultar as capacidades do servidor de son"
-
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
msgstr "«%s» por «%s»"
-msgid "Internal data stream error."
-msgstr "Produciuse un erro no fluxo de datos interno."
-
msgid "Failed to decode JPEG image"
msgstr "Produciuse un erro ao descodificar a imaxe JPEG"
+msgid "Internal data stream error."
+msgstr "Produciuse un erro no fluxo de datos interno."
+
msgid "Could not connect to server"
msgstr "Non foi posíbel conectarse ao servidor"
msgstr "Produciuse un erro ao ler %d bytes desde o dispositivo «%s»."
#, c-format
+msgid "Failed to enumerate possible video formats device '%s' can work with"
+msgstr ""
+
+#, c-format
+msgid "Could not map buffers from device '%s'"
+msgstr "Non foi posíbel mapear os búferes do dispositivo «%s»"
+
+#, fuzzy, c-format
+msgid "The driver of device '%s' does not support the IO method %d"
+msgstr ""
+"O controlador do dispositivo «%s» non admite ningún método de captura "
+"coñecido."
+
+#, fuzzy, c-format
+msgid "The driver of device '%s' does not support any known IO method."
+msgstr ""
+"O controlador do dispositivo «%s» non admite ningún método de captura "
+"coñecido."
+
+#, fuzzy, c-format
+msgid "Device '%s' does not support video capture"
+msgstr "O dispositivo «%s» non é un dispositivo de captura."
+
+#, fuzzy, c-format
+msgid "Device '%s' is busy"
+msgstr "O dispositivo «%s» non é un dispositivo de captura."
+
+#, fuzzy, c-format
+msgid "Device '%s' cannot capture at %dx%d"
+msgstr "O dispositivo «%s» non é un dispositivo de captura."
+
+#, fuzzy, c-format
+msgid "Device '%s' cannot capture in the specified format"
+msgstr "O dispositivo «%s» non é un dispositivo de captura."
+
+#, c-format
+msgid "Could not get parameters on device '%s'"
+msgstr "Non foi posíbel obter os parámetros para o dispositivo «%s»"
+
+#, fuzzy
+msgid "Video device did not accept new frame rate setting."
+msgstr ""
+"O dispositivo de entrada de vídeo non aceptou o axuste da nova taxa de "
+"fotogramas."
+
+msgid "Video device could not create buffer pool."
+msgstr ""
+
+#, c-format
msgid "Got unexpected frame size of %u instead of %u."
msgstr "Obtívose un tamaño de marco de %u non esperado no lugar de %u."
#, c-format
-msgid "Error reading %d bytes on device '%s'."
-msgstr "Produciuse un erro ao ler %d bytes do dispositivo «%s»."
+msgid "Failed trying to get video frames from device '%s'."
+msgstr ""
+"Produciuse un fallo ao tentar obter cadros de vídeo do dispositivo «%s»."
+
+#, c-format
+msgid "Failed after %d tries. device %s. system error: %s"
+msgstr ""
+"Prouciuse un fallo despois de %d intentos. Dispositivo %s. Error do sistema: "
+"%s"
+
+#, fuzzy, c-format
+msgid "Failed to get settings of tuner %d on device '%s'."
+msgstr ""
+"Produciuse un erro ao configurar o sintonizador %d no dispositivo «%s»."
+
+#, fuzzy, c-format
+msgid "Error getting capabilities for device '%s'."
+msgstr "Produciuse un erro ao ler %d bytes desde o dispositivo «%s»."
+
+#, fuzzy, c-format
+msgid "Device '%s' is not a tuner."
+msgstr "O dispositivo «%s» non é un dispositivo de captura."
+
+#, fuzzy, c-format
+msgid "Failed to get radio input on device '%s'. "
+msgstr "Produciuse un fallo ao estabelecer a entrada %d no dispositivo %s."
+
+#, c-format
+msgid "Failed to set input %d on device %s."
+msgstr "Produciuse un fallo ao estabelecer a entrada %d no dispositivo %s."
+
+#, fuzzy, c-format
+msgid "Failed to change mute state for device '%s'."
+msgstr ""
+"Produciuse un erro ao obter a potencia do sinal para o dispositivo «%s»."
#, c-format
msgid ""
"sexa un dispositivo de radio."
#, c-format
-msgid "Failed to set input %d on device %s."
-msgstr "Produciuse un fallo ao estabelecer a entrada %d no dispositivo %s."
-
-#, c-format
msgid ""
"Failed to get current output on device '%s'. May be it is a radio device"
msgstr ""
msgid "Failed to set output %d on device %s."
msgstr "Produciuse un fallo ao estabelecer a saída %d no dispositivo %s."
-#, c-format
-msgid "Could not enqueue buffers in device '%s'."
-msgstr "Non é posíbel meter na cola os búferes no dispositivo «%s»."
-
-#, c-format
-msgid "Failed trying to get video frames from device '%s'."
-msgstr ""
-"Produciuse un fallo ao tentar obter cadros de vídeo do dispositivo «%s»."
-
-#, c-format
-msgid "Failed after %d tries. device %s. system error: %s"
-msgstr ""
-"Prouciuse un fallo despois de %d intentos. Dispositivo %s. Error do sistema: "
-"%s"
-
-#, c-format
-msgid "Could not get parameters on device '%s'"
-msgstr "Non foi posíbel obter os parámetros para o dispositivo «%s»"
-
-msgid "Video input device did not accept new frame rate setting."
-msgstr ""
-"O dispositivo de entrada de vídeo non aceptou o axuste da nova taxa de "
-"fotogramas."
-
-#, c-format
-msgid "Could not map buffers from device '%s'"
-msgstr "Non foi posíbel mapear os búferes do dispositivo «%s»"
-
-#, c-format
-msgid "The driver of device '%s' does not support any known capture method."
-msgstr ""
-"O controlador do dispositivo «%s» non admite ningún método de captura "
-"coñecido."
-
msgid "Changing resolution at runtime is not yet supported."
msgstr "Aínda non se admite o cambio de resolución durante a reprodución."
msgid "Cannot operate without a clock"
msgstr "Non é posíbel operar sen reloxo"
+#~ msgid "Error reading %d bytes on device '%s'."
+#~ msgstr "Produciuse un erro ao ler %d bytes do dispositivo «%s»."
+
+#~ msgid "Could not enqueue buffers in device '%s'."
+#~ msgstr "Non é posíbel meter na cola os búferes no dispositivo «%s»."
+
+#~ msgid "Could not establish connection to sound server"
+#~ msgstr "Non foi posíbel estabelecer a conexión co servidor de son"
+
+#~ msgid "Failed to query sound server capabilities"
+#~ msgstr "Produciuse un erro ao consultar as capacidades do servidor de son"
+
#~ msgid "Describes the selected input element."
#~ msgstr "Describe o elemento de entrada seleccionado."
msgstr ""
"Project-Id-Version: gst-plugins-good 0.10.25.3\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-05-10 09:38+0100\n"
+"POT-Creation-Date: 2011-11-26 13:52+0000\n"
"PO-Revision-Date: 2010-11-04 01:21+0100\n"
"Last-Translator: Gabor Kelemen <kelemeng@gnome.hu>\n"
"Language-Team: Hungarian <translation-team-hu@lists.sourceforge.net>\n"
"X-Rosetta-Export-Date: 2007-07-27 19:18:15+0000\n"
"Plural-Forms: nplurals=2; plural=(n != 1);\n"
-msgid "Could not establish connection to sound server"
-msgstr "Nem hozható létre kapcsolat a hangkiszolgálóhoz"
-
-msgid "Failed to query sound server capabilities"
-msgstr "A hangkiszolgáló képességeinek lekérdezése meghiúsult"
-
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
msgstr "„%s” ettől: „%s”"
-msgid "Internal data stream error."
-msgstr "Belső adatfolyam-hiba."
-
msgid "Failed to decode JPEG image"
msgstr "A JPEG kép visszafejtése meghiúsult"
+msgid "Internal data stream error."
+msgstr "Belső adatfolyam-hiba."
+
msgid "Could not connect to server"
msgstr "Nem lehet csatlakozni a kiszolgálóhoz"
msgstr "Hiba %d bájt olvasásakor a következő eszközről: „%s”."
#, c-format
+msgid "Failed to enumerate possible video formats device '%s' can work with"
+msgstr ""
+
+#, c-format
+msgid "Could not map buffers from device '%s'"
+msgstr "Nem képezhetők le a(z) „%s” eszköz pufferei"
+
+#, fuzzy, c-format
+msgid "The driver of device '%s' does not support the IO method %d"
+msgstr ""
+"A(z) „%s” eszköz illesztőprogramja nem támogat egyetlen ismert felvételi "
+"módot sem."
+
+#, fuzzy, c-format
+msgid "The driver of device '%s' does not support any known IO method."
+msgstr ""
+"A(z) „%s” eszköz illesztőprogramja nem támogat egyetlen ismert felvételi "
+"módot sem."
+
+#, fuzzy, c-format
+msgid "Device '%s' does not support video capture"
+msgstr "A(z) „%s” eszköz nem kimeneti eszköz."
+
+#, fuzzy, c-format
+msgid "Device '%s' is busy"
+msgstr "A(z) „%s” eszköz nem kimeneti eszköz."
+
+#, fuzzy, c-format
+msgid "Device '%s' cannot capture at %dx%d"
+msgstr "A(z) „%s” eszköz nem rögzítőeszköz."
+
+#, fuzzy, c-format
+msgid "Device '%s' cannot capture in the specified format"
+msgstr "A(z) „%s” eszköz nem rögzítőeszköz."
+
+#, c-format
+msgid "Could not get parameters on device '%s'"
+msgstr "Nem kérhetők le a(z) „%s” eszköz paraméterei"
+
+#, fuzzy
+msgid "Video device did not accept new frame rate setting."
+msgstr ""
+"A videobemeneti eszköz nem fogadta el az új képkockasebesség-beállítást."
+
+msgid "Video device could not create buffer pool."
+msgstr ""
+
+#, c-format
msgid "Got unexpected frame size of %u instead of %u."
msgstr "Váratlan keretméret (%u) érkezett %u helyett."
#, c-format
-msgid "Error reading %d bytes on device '%s'."
+msgid "Failed trying to get video frames from device '%s'."
+msgstr "Nem sikerült videokockákat lekérni a(z) „%s” eszköztől."
+
+#, c-format
+msgid "Failed after %d tries. device %s. system error: %s"
+msgstr "Hiba %d próbálkozás után. Eszköz: %s. Rendszerhiba: %s."
+
+#, fuzzy, c-format
+msgid "Failed to get settings of tuner %d on device '%s'."
+msgstr "A(z) %d. tuner beállításának lekérése a(z) „%s” eszközön meghiúsult."
+
+#, fuzzy, c-format
+msgid "Error getting capabilities for device '%s'."
msgstr "Hiba %d bájt olvasásakor a következő eszközről: „%s”."
+#, fuzzy, c-format
+msgid "Device '%s' is not a tuner."
+msgstr "A(z) „%s” eszköz nem kimeneti eszköz."
+
+#, fuzzy, c-format
+msgid "Failed to get radio input on device '%s'. "
+msgstr "A(z) %d. bemenet beállítása meghiúsult a(z) „%s” eszközön."
+
+#, c-format
+msgid "Failed to set input %d on device %s."
+msgstr "A(z) %d. bemenet beállítása meghiúsult a(z) „%s” eszközön."
+
+#, fuzzy, c-format
+msgid "Failed to change mute state for device '%s'."
+msgstr "A jelerősség lekérdezése meghiúsult a(z) „%s” eszközön."
+
#, c-format
msgid ""
"Error getting capabilities for device '%s': It isn't a v4l2 driver. Check if "
"Az aktuális bemenet lekérése meghiúsult a(z) „%s” eszközről. Lehet, hogy ez "
"egy rádióeszköz."
-#, c-format
-msgid "Failed to set input %d on device %s."
-msgstr "A(z) %d. bemenet beállítása meghiúsult a(z) „%s” eszközön."
-
#, fuzzy, c-format
msgid ""
"Failed to get current output on device '%s'. May be it is a radio device"
msgid "Failed to set output %d on device %s."
msgstr "A(z) %d. bemenet beállítása meghiúsult a(z) „%s” eszközön."
-#, c-format
-msgid "Could not enqueue buffers in device '%s'."
-msgstr "Nem állíthatók sorba a pufferek a(z) „%s” eszközben."
-
-#, c-format
-msgid "Failed trying to get video frames from device '%s'."
-msgstr "Nem sikerült videokockákat lekérni a(z) „%s” eszköztől."
-
-#, c-format
-msgid "Failed after %d tries. device %s. system error: %s"
-msgstr "Hiba %d próbálkozás után. Eszköz: %s. Rendszerhiba: %s."
-
-#, c-format
-msgid "Could not get parameters on device '%s'"
-msgstr "Nem kérhetők le a(z) „%s” eszköz paraméterei"
-
-msgid "Video input device did not accept new frame rate setting."
-msgstr ""
-"A videobemeneti eszköz nem fogadta el az új képkockasebesség-beállítást."
-
-#, c-format
-msgid "Could not map buffers from device '%s'"
-msgstr "Nem képezhetők le a(z) „%s” eszköz pufferei"
-
-#, c-format
-msgid "The driver of device '%s' does not support any known capture method."
-msgstr ""
-"A(z) „%s” eszköz illesztőprogramja nem támogat egyetlen ismert felvételi "
-"módot sem."
-
msgid "Changing resolution at runtime is not yet supported."
msgstr "A felbontás módosítása futás közben még nem támogatott."
msgid "Cannot operate without a clock"
msgstr "Óra nélkül lehetetlen a működés"
+
+#~ msgid "Error reading %d bytes on device '%s'."
+#~ msgstr "Hiba %d bájt olvasásakor a következő eszközről: „%s”."
+
+#~ msgid "Could not enqueue buffers in device '%s'."
+#~ msgstr "Nem állíthatók sorba a pufferek a(z) „%s” eszközben."
+
+#~ msgid "Could not establish connection to sound server"
+#~ msgstr "Nem hozható létre kapcsolat a hangkiszolgálóhoz"
+
+#~ msgid "Failed to query sound server capabilities"
+#~ msgstr "A hangkiszolgáló képességeinek lekérdezése meghiúsult"
msgstr ""
"Project-Id-Version: gst-plugins-good 0.10.26.2\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-05-10 09:38+0100\n"
+"POT-Creation-Date: 2011-11-26 13:52+0000\n"
"PO-Revision-Date: 2011-02-26 18:09+0700\n"
"Last-Translator: Andhika Padmawan <andhika.padmawan@gmail.com>\n"
"Language-Team: Indonesian <translation-team-id@lists.sourceforge.net>\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
-msgid "Could not establish connection to sound server"
-msgstr "Tak dapat membangun koneksi ke server suara"
-
-msgid "Failed to query sound server capabilities"
-msgstr "Gagal untuk kueri kemampuan server suara"
-
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
msgstr "'%s' oleh '%s'"
-msgid "Internal data stream error."
-msgstr "Galat arus data internal."
-
msgid "Failed to decode JPEG image"
msgstr "Gagal untuk mengawasandi citra JPEG"
+msgid "Internal data stream error."
+msgstr "Galat arus data internal."
+
msgid "Could not connect to server"
msgstr "Tak dapat menghubungi server"
msgstr "Galat membaca %d bita dari divais '%s'."
#, c-format
+msgid "Failed to enumerate possible video formats device '%s' can work with"
+msgstr ""
+"Gagal menyebutkan kemungkinan format video yang dapat bekerja dengan divais "
+"'%s'."
+
+#, c-format
+msgid "Could not map buffers from device '%s'"
+msgstr "Tak dapat memetakan penyangga dari divais '%s'"
+
+#, fuzzy, c-format
+msgid "The driver of device '%s' does not support the IO method %d"
+msgstr ""
+"Driver dari divais '%s' tak mendukung metode penangkap apapun yang dikenal."
+
+#, fuzzy, c-format
+msgid "The driver of device '%s' does not support any known IO method."
+msgstr ""
+"Driver dari divais '%s' tak mendukung metode penangkap apapun yang dikenal."
+
+#, fuzzy, c-format
+msgid "Device '%s' does not support video capture"
+msgstr "Divais '%s' bukan divais keluaran."
+
+#, fuzzy, c-format
+msgid "Device '%s' is busy"
+msgstr "Divais '%s' bukan divais keluaran."
+
+#, c-format
+msgid "Device '%s' cannot capture at %dx%d"
+msgstr "Divais '%s' tak dapat menangkap di %dx%d"
+
+#, c-format
+msgid "Device '%s' cannot capture in the specified format"
+msgstr "Divais '%s' tak dapat menangkap dalam format yang ditentukan"
+
+#, c-format
+msgid "Could not get parameters on device '%s'"
+msgstr "Tak bisa mendapatkan parameter di divais '%s'"
+
+#, fuzzy
+msgid "Video device did not accept new frame rate setting."
+msgstr "Divais masukan video tak menerima pengaturan rasio bingkai baru."
+
+msgid "Video device could not create buffer pool."
+msgstr ""
+
+#, c-format
msgid "Got unexpected frame size of %u instead of %u."
msgstr "Mendapatkan ukuran bingkai %u ketimbang %u yang tak diharapkan."
#, c-format
-msgid "Error reading %d bytes on device '%s'."
-msgstr "Galat membaca %d bita pada divais '%s'."
+msgid "Failed trying to get video frames from device '%s'."
+msgstr "Gagal mencoba untuk mendapatkan bingkai video dari divais '%s'."
+
+#, c-format
+msgid "Failed after %d tries. device %s. system error: %s"
+msgstr "Gagal setelah %d percobaan. divais %s. galat sistem: %s"
+
+#, fuzzy, c-format
+msgid "Failed to get settings of tuner %d on device '%s'."
+msgstr "Gagal mendapatkan pengaturan tuner %d di divais '%s'."
+
+#, fuzzy, c-format
+msgid "Error getting capabilities for device '%s'."
+msgstr "Galat membaca %d bita dari divais '%s'."
+
+#, fuzzy, c-format
+msgid "Device '%s' is not a tuner."
+msgstr "Divais '%s' bukan divais keluaran."
+
+#, fuzzy, c-format
+msgid "Failed to get radio input on device '%s'. "
+msgstr "Gagal mengatur masukan %d di divais %s."
+
+#, c-format
+msgid "Failed to set input %d on device %s."
+msgstr "Gagal mengatur masukan %d di divais %s."
+
+#, fuzzy, c-format
+msgid "Failed to change mute state for device '%s'."
+msgstr "Gagal mendapatkan kekuatan sinyal untuk divais '%s'."
#, c-format
msgid ""
"Gagal mendapatkan masukan terkini di divais '%s'. Mungkin itu divais radio"
#, c-format
-msgid "Failed to set input %d on device %s."
-msgstr "Gagal mengatur masukan %d di divais %s."
-
-#, c-format
msgid ""
"Failed to get current output on device '%s'. May be it is a radio device"
msgstr ""
msgid "Failed to set output %d on device %s."
msgstr "Gagal mengatur keluaran %d di divais %s."
-#, c-format
-msgid "Could not enqueue buffers in device '%s'."
-msgstr "Tak dapat mengantrekan penyangga di divais '%s'."
-
-#, c-format
-msgid "Failed trying to get video frames from device '%s'."
-msgstr "Gagal mencoba untuk mendapatkan bingkai video dari divais '%s'."
-
-#, c-format
-msgid "Failed after %d tries. device %s. system error: %s"
-msgstr "Gagal setelah %d percobaan. divais %s. galat sistem: %s"
-
-#, c-format
-msgid "Could not get parameters on device '%s'"
-msgstr "Tak bisa mendapatkan parameter di divais '%s'"
-
-msgid "Video input device did not accept new frame rate setting."
-msgstr "Divais masukan video tak menerima pengaturan rasio bingkai baru."
-
-#, c-format
-msgid "Could not map buffers from device '%s'"
-msgstr "Tak dapat memetakan penyangga dari divais '%s'"
-
-#, c-format
-msgid "The driver of device '%s' does not support any known capture method."
-msgstr ""
-"Driver dari divais '%s' tak mendukung metode penangkap apapun yang dikenal."
-
msgid "Changing resolution at runtime is not yet supported."
msgstr "Mengubah resolusi saat waktu berjalan belum didukung."
msgid "Cannot operate without a clock"
msgstr "Tak dapat beroperasi tanpa jam"
-#~ msgid "Failed to enumerate possible video formats device '%s' can work with"
-#~ msgstr ""
-#~ "Gagal menyebutkan kemungkinan format video yang dapat bekerja dengan "
-#~ "divais '%s'."
+#~ msgid "Error reading %d bytes on device '%s'."
+#~ msgstr "Galat membaca %d bita pada divais '%s'."
+
+#~ msgid "Could not enqueue buffers in device '%s'."
+#~ msgstr "Tak dapat mengantrekan penyangga di divais '%s'."
+
+#~ msgid "Could not establish connection to sound server"
+#~ msgstr "Tak dapat membangun koneksi ke server suara"
+
+#~ msgid "Failed to query sound server capabilities"
+#~ msgstr "Gagal untuk kueri kemampuan server suara"
#~ msgid ""
#~ "The buffer type is not supported, or the index is out of bounds, or no "
#~ msgid "No free buffers found in the pool at index %d."
#~ msgstr "Tak ada penyangga bebas ditemukan di lubuk pada indeks %d."
-#~ msgid "Device '%s' cannot capture at %dx%d"
-#~ msgstr "Divais '%s' tak dapat menangkap di %dx%d"
-
-#~ msgid "Device '%s' cannot capture in the specified format"
-#~ msgstr "Divais '%s' tak dapat menangkap dalam format yang ditentukan"
-
#~ msgid "Could not get buffers from device '%s'."
#~ msgstr "Tak bisa mendapatkan penyangga dari divais '%s'."
msgstr ""
"Project-Id-Version: gst-plugins-good 0.10.25.3\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-05-10 09:38+0100\n"
+"POT-Creation-Date: 2011-11-26 13:52+0000\n"
"PO-Revision-Date: 2010-10-25 10:11+0200\n"
"Last-Translator: Luca Ferretti <elle.uca@infinito.it>\n"
"Language-Team: Italian <tp@lists.linux.it>\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
-msgid "Could not establish connection to sound server"
-msgstr "Impossibile stabilire la connessione al server audio"
-
-msgid "Failed to query sound server capabilities"
-msgstr "Interrogazione delle funzionalità del server audio non riuscita"
-
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
msgstr "«%s» di «%s»"
-msgid "Internal data stream error."
-msgstr "Errore interno nello stream dei dati."
-
msgid "Failed to decode JPEG image"
msgstr "Decodifica dell'immagine JPEG non riuscita"
+msgid "Internal data stream error."
+msgstr "Errore interno nello stream dei dati."
+
msgid "Could not connect to server"
msgstr "Impossibile connettersi al server"
msgstr "Errore nel leggere %d byte dal device «%s»."
#, c-format
+msgid "Failed to enumerate possible video formats device '%s' can work with"
+msgstr ""
+
+#, c-format
+msgid "Could not map buffers from device '%s'"
+msgstr "Impossibile mappare dei buffer dal device «%s»."
+
+#, fuzzy, c-format
+msgid "The driver of device '%s' does not support the IO method %d"
+msgstr "Il driver del device «%s» non supporta alcun metodo di cattura noto."
+
+#, fuzzy, c-format
+msgid "The driver of device '%s' does not support any known IO method."
+msgstr "Il driver del device «%s» non supporta alcun metodo di cattura noto."
+
+#, fuzzy, c-format
+msgid "Device '%s' does not support video capture"
+msgstr "Il device «%s» non è un dispositivo di uscita."
+
+#, fuzzy, c-format
+msgid "Device '%s' is busy"
+msgstr "Il device «%s» non è un dispositivo di uscita."
+
+#, fuzzy, c-format
+msgid "Device '%s' cannot capture at %dx%d"
+msgstr "Il device «%s» non è un dispositivo di cattura."
+
+#, fuzzy, c-format
+msgid "Device '%s' cannot capture in the specified format"
+msgstr "Il device «%s» non è un dispositivo di cattura."
+
+#, c-format
+msgid "Could not get parameters on device '%s'"
+msgstr "Impossibile ottenere i parametri sul device «%s»"
+
+#, fuzzy
+msgid "Video device did not accept new frame rate setting."
+msgstr ""
+"Il device di ingresso video non accetta la nuova impostazione sul frame rate."
+
+msgid "Video device could not create buffer pool."
+msgstr ""
+
+#, c-format
msgid "Got unexpected frame size of %u instead of %u."
msgstr "Ottenuta dimensione inattesa del fotogramma: %u invece di %u."
#, c-format
-msgid "Error reading %d bytes on device '%s'."
-msgstr "Errore nel leggere %d byte sul device «%s»."
+msgid "Failed trying to get video frames from device '%s'."
+msgstr "Tentativo di ottenere fotogrammi video dal device «%s» non riuscito."
+
+#, c-format
+msgid "Failed after %d tries. device %s. system error: %s"
+msgstr "Fallito dopo %d tentativi. Device %s. Errore di sistema: %s"
+
+#, fuzzy, c-format
+msgid "Failed to get settings of tuner %d on device '%s'."
+msgstr ""
+"Recupero delle impostazioni del sintonizzatore %d sul device «%s» non "
+"riuscito."
+
+#, fuzzy, c-format
+msgid "Error getting capabilities for device '%s'."
+msgstr "Errore nel leggere %d byte dal device «%s»."
+
+#, fuzzy, c-format
+msgid "Device '%s' is not a tuner."
+msgstr "Il device «%s» non è un dispositivo di uscita."
+
+#, fuzzy, c-format
+msgid "Failed to get radio input on device '%s'. "
+msgstr "Impostazione dell'ingresso %d sul device «%s» non riuscita."
+
+#, c-format
+msgid "Failed to set input %d on device %s."
+msgstr "Impostazione dell'ingresso %d sul device «%s» non riuscita."
+
+#, fuzzy, c-format
+msgid "Failed to change mute state for device '%s'."
+msgstr "Recupero dell'intensità del segnale per il device «%s» non riuscito."
#, c-format
msgid ""
"Recupero dell'attuale ingresso sul device «%s» non riuscito. Forse è un "
"dispositivo radio"
-#, c-format
-msgid "Failed to set input %d on device %s."
-msgstr "Impostazione dell'ingresso %d sul device «%s» non riuscita."
-
#, fuzzy, c-format
msgid ""
"Failed to get current output on device '%s'. May be it is a radio device"
msgid "Failed to set output %d on device %s."
msgstr "Impostazione dell'ingresso %d sul device «%s» non riuscita."
-#, c-format
-msgid "Could not enqueue buffers in device '%s'."
-msgstr "Impossibile accodare i buffer nel device «%s»."
-
-#, c-format
-msgid "Failed trying to get video frames from device '%s'."
-msgstr "Tentativo di ottenere fotogrammi video dal device «%s» non riuscito."
-
-#, c-format
-msgid "Failed after %d tries. device %s. system error: %s"
-msgstr "Fallito dopo %d tentativi. Device %s. Errore di sistema: %s"
-
-#, c-format
-msgid "Could not get parameters on device '%s'"
-msgstr "Impossibile ottenere i parametri sul device «%s»"
-
-msgid "Video input device did not accept new frame rate setting."
-msgstr ""
-"Il device di ingresso video non accetta la nuova impostazione sul frame rate."
-
-#, c-format
-msgid "Could not map buffers from device '%s'"
-msgstr "Impossibile mappare dei buffer dal device «%s»."
-
-#, c-format
-msgid "The driver of device '%s' does not support any known capture method."
-msgstr "Il driver del device «%s» non supporta alcun metodo di cattura noto."
-
msgid "Changing resolution at runtime is not yet supported."
msgstr ""
"Il cambio della risoluzione durante l'esecuzione non è ancora supportato."
msgid "Cannot operate without a clock"
msgstr "Impossibile operare senza un clock"
+
+#~ msgid "Error reading %d bytes on device '%s'."
+#~ msgstr "Errore nel leggere %d byte sul device «%s»."
+
+#~ msgid "Could not enqueue buffers in device '%s'."
+#~ msgstr "Impossibile accodare i buffer nel device «%s»."
+
+#~ msgid "Could not establish connection to sound server"
+#~ msgstr "Impossibile stabilire la connessione al server audio"
+
+#~ msgid "Failed to query sound server capabilities"
+#~ msgstr "Interrogazione delle funzionalità del server audio non riuscita"
#
# Takao Fujiwara <takao.fujiwara@sun.com>, 2006.
# Makoto Kato <makoto.kt@gmail.com>, 2009-2011.
-# Takeshi Hamasaki <hmatrjp@users.sourceforge.jp>, 2011
msgid ""
msgstr ""
"Project-Id-Version: gst-plugins-good 0.10.28.2\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-12-10 02:18+0000\n"
-"PO-Revision-Date: 2011-08-28 23:59+0900\n"
-"Last-Translator: Takeshi Hamasaki <hmatrjp@users.sourceforge.jp>\n"
+"POT-Creation-Date: 2011-11-26 13:52+0000\n"
+"PO-Revision-Date: 2011-04-26 20:15+0900\n"
+"Last-Translator: Makoto Kato <makoto.kt@gmail.com>\n"
"Language-Team: Japanese <translation-team-ja@lists.sourceforge.net>\n"
"Language: ja\n"
"MIME-Version: 1.0\n"
"X-Generator: Lokalize 0.3\n"
"Plural-Forms: nplurals=1; plural=0;\n"
-msgid "Could not establish connection to sound server"
-msgstr "サウンドサーバーへの接続に失敗しました"
-
-msgid "Failed to query sound server capabilities"
-msgstr "サウンドサーバーのケイパビリティのクエリーに失敗しました"
-
#. TRANSLATORS: 'song title' by 'artist name'
-#, fuzzy, c-format
+#, c-format
msgid "'%s' by '%s'"
-msgstr "対"
-
-msgid "Internal data stream error."
-msgstr "内部データストリームエラー"
+msgstr ""
msgid "Failed to decode JPEG image"
msgstr "JPEG画像のデコードに失敗しました"
+msgid "Internal data stream error."
+msgstr "内部データストリームエラー"
+
msgid "Could not connect to server"
msgstr "サーバーへ接続できません"
"A network error occured, or the server closed the connection unexpectedly."
msgstr "ネットワークエラーが発生したか、サーバーが予期せず接続を閉じました。"
-#, fuzzy
msgid "Server sent bad data."
-msgstr "データストリーム (サーバープッシュ型)"
+msgstr ""
msgid "No URL set."
msgstr "URLが指定されていません。"
"サポートしているストリームが見つかりません。Real メディアストリームのための"
"GStreamer RTSP 拡張プラグインをインストールする必要があるかもしれません。"
+#, fuzzy
msgid ""
"No supported stream was found. You might need to allow more transport "
"protocols or may otherwise be missing the right GStreamer RTSP extension "
"plugin."
msgstr ""
-"サポートしているストリームが見つかりません。別の転送プロトコルをインストール"
-"する必要があるかもしれません。または、正しい GStreamer RTSP 拡張プラグインが"
-"ないのかもしれません。"
+"サポートしているストリームが見つかりません。正しい GStreamer RTSP 拡張プラグ"
+"インをインストールする必要があるかもしれません。"
msgid "Internal data flow error."
msgstr "内部データフローエラー。"
msgid "AUX Out"
msgstr "AUX出力"
-#, fuzzy
msgid "3D Depth"
-msgstr "最大深度: "
+msgstr ""
msgid "3D Center"
msgstr "3Dセンター"
-#, fuzzy
msgid "3D Enhance"
-msgstr "色を強調しています"
+msgstr ""
msgid "Telephone"
msgstr "電話"
msgstr "ループバック"
msgid "Diagnostic"
-msgstr "診断"
+msgstr ""
msgid "Bass Boost"
msgstr "低音ブースト"
msgid "Keyboard Beep"
msgstr "キーボードビープ音"
-#, fuzzy
msgid "Simulate Stereo"
-msgstr "彫金を表現します"
+msgstr ""
msgid "Stereo"
msgstr "ステレオ"
msgid "Mute"
msgstr "ミュート"
-#, fuzzy
msgid "Fast"
-msgstr "速い"
+msgstr ""
#. TRANSLATORS: "Very Low" is a quality setting here
-#, fuzzy
msgid "Very Low"
-msgstr "低品位"
+msgstr ""
#. TRANSLATORS: "Low" is a quality setting here
-#, fuzzy
msgid "Low"
-msgstr "低音"
+msgstr ""
#. TRANSLATORS: "Medium" is a quality setting here
-#, fuzzy
msgid "Medium"
-msgstr "中音"
+msgstr ""
#. TRANSLATORS: "High" is a quality setting here
-#, fuzzy
msgid "High"
-msgstr "高音"
+msgstr ""
#. TRANSLATORS: "Very High" is a quality setting here
-#, fuzzy
msgid "Very High"
-msgstr "高品位"
+msgstr ""
#. TRANSLATORS: "Production" is a quality setting here
-#, fuzzy
msgid "Production"
-msgstr "プロダクション"
+msgstr ""
msgid "Front Panel Microphone"
msgstr "フロントパネルのマイクロフォン"
msgid "Yellow Front Panel Connector"
msgstr ""
-#, fuzzy
msgid "Spread Output"
-msgstr "情報出力:\n"
+msgstr ""
msgid "Downmix"
msgstr "ダウンミックス"
msgstr "仮想ミキサーチャンネル"
#. TRANSLATORS: name + number of a volume mixer control
-#, fuzzy, c-format
+#, c-format
msgid "%s %d Function"
-msgstr "機能"
+msgstr ""
#. TRANSLATORS: name of a volume mixer control
-#, fuzzy, c-format
+#, c-format
msgid "%s Function"
-msgstr "機能"
+msgstr ""
+#, fuzzy
msgid ""
"Could not open audio device for playback. This version of the Open Sound "
"System is not supported by this element."
msgstr ""
-"å\86\8dç\94\9fç\94¨ã\81«ã\82ªã\83¼ã\83\87ã\82£ã\82ªã\83\87ã\83\90ã\82¤ã\82¹ã\82\92é\96\8bã\81\8fã\81\93ã\81¨ã\81\8cã\81§ã\81\8dã\81¾ã\81\9bã\82\93ã\81§ã\81\97ã\81\9fã\80\82ã\81\93ã\81®ã\83\90ã\83¼ã\82¸ã\83§ã\83³ã\81® "
-"Open Sound System は、このエレメントによってサポートされていません。"
+"å\86\8dç\94\9fç\94¨ã\81«ã\82ªã\83¼ã\83\87ã\82£ã\82ªã\83\87ã\83\90ã\82¤ã\82¹ã\82\92é\96\8bã\81\8fã\81\93ã\81¨ã\81\8cã\81§ã\81\8dã\81¾ã\81\9bã\82\93ã\81§ã\81\97ã\81\9fã\80\82ã\83\87ã\83\90ã\82¤ã\82¹ã\82\92é\96\8bã\81\8fã\81\9fã\82\81ã\81®"
+"権限がありません。"
msgid "Playback is not supported by this audio device."
-msgstr "再生はこのオーディオデバイスではサポートされていません。"
+msgstr ""
msgid "Audio playback error."
-msgstr "オーディオ再生エラーです。"
+msgstr ""
msgid "Recording is not supported by this audio device."
msgstr "このオーディオデバイスによって録音はサポートされていません。"
+#, fuzzy
msgid "Error recording from audio device."
-msgstr "ã\82ªã\83¼ã\83\87ã\82£ã\82ªã\83\87ã\83\90ã\82¤ã\82¹ã\81\8bã\82\89ã\81®é\8c²é\9f³時にエラーが発生しました。"
+msgstr "ã\83\87ã\83\90ã\82¤ã\82¹ %2$s ã\81\8bã\82\89ã\80\80%1$d ã\83\90ã\82¤ã\83\88èªã\81¿è¾¼ã\81¿時にエラーが発生しました。"
# SUN REVIEWED
msgid "Gain"
msgstr "デバイス %2$s から %1$d バイト読み込み時にエラーが発生しました。"
#, c-format
+msgid "Failed to enumerate possible video formats device '%s' can work with"
+msgstr ""
+
+#, c-format
+msgid "Could not map buffers from device '%s'"
+msgstr "デバイス '%s' からバッファをマップできません。"
+
+#, fuzzy, c-format
+msgid "The driver of device '%s' does not support the IO method %d"
+msgstr ""
+"デバイス '%s' のドライバーはよく知られたキャプチャーメソッドをサポートしてい"
+"ません"
+
+#, fuzzy, c-format
+msgid "The driver of device '%s' does not support any known IO method."
+msgstr ""
+"デバイス '%s' のドライバーはよく知られたキャプチャーメソッドをサポートしてい"
+"ません"
+
+#, fuzzy, c-format
+msgid "Device '%s' does not support video capture"
+msgstr "デバイス '%s' は出力デバイスではありません"
+
+#, fuzzy, c-format
+msgid "Device '%s' is busy"
+msgstr "デバイス '%s' は出力デバイスではありません"
+
+#, c-format
+msgid "Device '%s' cannot capture at %dx%d"
+msgstr "デバイス '%s' は %dx%d でキャプチャできません"
+
+#, c-format
+msgid "Device '%s' cannot capture in the specified format"
+msgstr "デバイス '%s' は指定されたフォーマットでキャプチャできません"
+
+# SUN REVIEWED
+#, c-format
+msgid "Could not get parameters on device '%s'"
+msgstr "デバイス '%s' 上のパラメータを取得できません。"
+
+#, fuzzy
+msgid "Video device did not accept new frame rate setting."
+msgstr "ビデオ入力デバイスは新しいフレームレート設定にすることができません"
+
+msgid "Video device could not create buffer pool."
+msgstr ""
+
+#, c-format
msgid "Got unexpected frame size of %u instead of %u."
msgstr "予期しないフレームサイズ (%2$u ではなく %1$u) を取得しました。"
#, c-format
-msgid "Error reading %d bytes on device '%s'."
-msgstr "デバイス '%2$s' 上で %1$d バイトの読み込み時にエラーが発生しました。"
+msgid "Failed trying to get video frames from device '%s'."
+msgstr "デバイス '%s' からビデオフレームの取得に失敗しました。"
+
+#, c-format
+msgid "Failed after %d tries. device %s. system error: %s"
+msgstr "%d 回リトライしましたが失敗しました。デバイス %s。システムエラー: %s"
+
+#, fuzzy, c-format
+msgid "Failed to get settings of tuner %d on device '%s'."
+msgstr "デバイス '%2$s' 上のチューナー %1$d の設定の取得に失敗しました。"
+
+#, fuzzy, c-format
+msgid "Error getting capabilities for device '%s'."
+msgstr "デバイス %2$s から %1$d バイト読み込み時にエラーが発生しました。"
+
+#, fuzzy, c-format
+msgid "Device '%s' is not a tuner."
+msgstr "デバイス '%s' は出力デバイスではありません"
+
+#, fuzzy, c-format
+msgid "Failed to get radio input on device '%s'. "
+msgstr "デバイス '%2$s' 上のチューナー %1$d の設定の取得に失敗しました。"
+
+#, c-format
+msgid "Failed to set input %d on device %s."
+msgstr ""
+
+#, fuzzy, c-format
+msgid "Failed to change mute state for device '%s'."
+msgstr "デバイス '%s' の現在のチューナーの周波数の取得に失敗しました"
#, c-format
msgid ""
"Error getting capabilities for device '%s': It isn't a v4l2 driver. Check if "
"it is a v4l1 driver."
msgstr ""
-"デバイス '%s' ケイパビリティの取得時にエラーが発生しました: v4l2 ドライバでは"
-"ありません。v4l1 ドライバでないか、チェックしてください。"
+"デバイス '%s' ケイパビリティの取得時にエラーが発生しました: v412 ドライバでは"
+"ありません。もし v411 ドライバであれば、チェックしてください。"
#, c-format
msgid "Failed to query attributes of input %d in device %s"
msgid "Failed to get setting of tuner %d on device '%s'."
msgstr "デバイス '%2$s' 上のチューナー %1$d の設定の取得に失敗しました。"
-#, fuzzy, c-format
+#, c-format
msgid "Failed to query norm on device '%s'."
-msgstr "デバイス '%2$s' 上のチューナー %1$d の設定の取得に失敗しました。"
+msgstr ""
#, c-format
msgid "Failed getting controls attributes on device '%s'."
msgstr "デバイス '%s' のコントロール属性の取得に失敗しました"
-#, fuzzy, c-format
+#, c-format
msgid "Cannot identify device '%s'."
-msgstr "ミキサーデバイス'%s'を開けません"
+msgstr ""
#, c-format
msgid "This isn't a device '%s'."
msgid "Device '%s' is not a output device."
msgstr "デバイス '%s' は出力デバイスではありません"
-#, fuzzy, c-format
+#, c-format
msgid "Failed to set norm for device '%s'."
msgstr ""
-"デバイス '%3$s' 上のコントロール %2$d の値を %1$d へ設定することに失敗しまし"
-"た。"
#, c-format
msgid "Failed to get current tuner frequency for device '%s'."
msgid "Failed to set current tuner frequency for device '%s' to %lu Hz."
msgstr "デバイス '%s' のチューナーの周波数の %lu Hz への設定に失敗しました"
-#, fuzzy, c-format
+#, c-format
msgid "Failed to get signal strength for device '%s'."
-msgstr "デバイス '%s' の現在のチューナーの周波数の取得に失敗しました"
+msgstr ""
#, c-format
msgid "Failed to get value for control %d on device '%s'."
"ん"
#, fuzzy, c-format
-msgid "Failed to set input %d on device %s."
-msgstr ""
-"デバイス '%3$s' 上のコントロール %2$d の値を %1$d へ設定することに失敗しまし"
-"た。"
-
-#, c-format
msgid ""
"Failed to get current output on device '%s'. May be it is a radio device"
msgstr ""
-"ã\83\87ã\83\90ã\82¤ã\82¹ '%s' ä¸\8aã\81®ç\8f¾å\9c¨ã\81®å\87º力の取得に失敗しました。ラジオデバイスかもしれませ"
+"ã\83\87ã\83\90ã\82¤ã\82¹ '%s' ä¸\8aã\81®ç\8f¾å\9c¨ã\81®å\85¥力の取得に失敗しました。ラジオデバイスかもしれませ"
"ん"
-#, c-format
-msgid "Failed to set output %d on device %s."
-msgstr "出力%dをデバイス %s に設定できませんでした。"
-
#, fuzzy, c-format
-msgid "Could not enqueue buffers in device '%s'."
-msgstr "デバイス '%s' からバッファをマップできません。"
-
-#, c-format
-msgid "Failed trying to get video frames from device '%s'."
-msgstr "デバイス '%s' からビデオフレームの取得に失敗しました。"
-
-#, c-format
-msgid "Failed after %d tries. device %s. system error: %s"
-msgstr "%d 回リトライしましたが失敗しました。デバイス %s。システムエラー: %s"
-
-# SUN REVIEWED
-#, c-format
-msgid "Could not get parameters on device '%s'"
-msgstr "デバイス '%s' 上のパラメータを取得できません。"
-
-msgid "Video input device did not accept new frame rate setting."
-msgstr "ビデオ入力デバイスは新しいフレームレート設定にすることができません"
+msgid "Failed to set output %d on device %s."
+msgstr "デバイス '%2$s' 上のチューナー %1$d の設定の取得に失敗しました。"
-#, c-format
-msgid "Could not map buffers from device '%s'"
-msgstr "デバイス '%s' からバッファをマップできません。"
+msgid "Changing resolution at runtime is not yet supported."
+msgstr ""
-#, c-format
-msgid "The driver of device '%s' does not support any known capture method."
+msgid "Cannot operate without a clock"
msgstr ""
-"デバイス '%s' のドライバーはよく知られたキャプチャーメソッドをサポートしてい"
-"ません"
-msgid "Changing resolution at runtime is not yet supported."
-msgstr "実行時の解像度変更はサポートされていません。"
+#~ msgid "Error reading %d bytes on device '%s'."
+#~ msgstr ""
+#~ "デバイス '%2$s' 上で %1$d バイトの読み込み時ににエラーが発生しました。"
-msgid "Cannot operate without a clock"
-msgstr "クロックなしでは動作できません。"
+#~ msgid "Could not establish connection to sound server"
+#~ msgstr "サウンドサーバーへの接続に失敗しました"
+
+#~ msgid "Failed to query sound server capabilities"
+#~ msgstr "サウンドサーバーのケイパビリティのクエリーに失敗しました"
#~ msgid ""
#~ "Failed trying to get video frames from device '%s'. Not enough memory."
#~ msgid "No free buffers found in the pool at index %d."
#~ msgstr "インデックス %d でプール内に空きバッファがありません。"
-#~ msgid "Device '%s' cannot capture at %dx%d"
-#~ msgstr "デバイス '%s' は %dx%d でキャプチャできません"
-
-#~ msgid "Device '%s' cannot capture in the specified format"
-#~ msgstr "デバイス '%s' は指定されたフォーマットでキャプチャできません"
-
#~ msgid "Could not get buffers from device '%s'."
#~ msgstr "デバイス '%s' からバッファを取得できませんでした。"
msgstr ""
"Project-Id-Version: gst-plugins-good 0.10.23.2\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-05-10 09:38+0100\n"
+"POT-Creation-Date: 2011-11-26 13:52+0000\n"
"PO-Revision-Date: 2010-07-16 19:34+0300\n"
"Last-Translator: Žygimantas Beručka <uid0@akl.lt>\n"
"Language-Team: Lithuanian <komp_lt@konferencijos.lt>\n"
"%100<10 || n%100>=20) ? 1 : 2);\n"
"X-Generator: Virtaal 0.6.1\n"
-msgid "Could not establish connection to sound server"
-msgstr "Nepavyko prisijungti prie garso serverio"
-
-msgid "Failed to query sound server capabilities"
-msgstr "Nepavyko užklausti garso serverio galimybių"
-
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
msgstr "„%s“ atlieka „%s“"
-msgid "Internal data stream error."
-msgstr "Vidinė duomenų srauto klaida."
-
msgid "Failed to decode JPEG image"
msgstr "Nepavyko dekoduoti JPEG paveikslėlio"
+msgid "Internal data stream error."
+msgstr "Vidinė duomenų srauto klaida."
+
msgid "Could not connect to server"
msgstr "Nepavyko prisijungti prie serverio"
msgstr "Klaida skaitant %d baitus (-ų) iš įrenginio „%s“."
#, c-format
+msgid "Failed to enumerate possible video formats device '%s' can work with"
+msgstr ""
+
+#, c-format
+msgid "Could not map buffers from device '%s'"
+msgstr "Nepavyko išdėstyti buferių iš įrenginio „%s“"
+
+#, fuzzy, c-format
+msgid "The driver of device '%s' does not support the IO method %d"
+msgstr "Įrenginio „%s“ tvarkyklė nepalaiko jokių žinomų įrašymo būdų."
+
+#, fuzzy, c-format
+msgid "The driver of device '%s' does not support any known IO method."
+msgstr "Įrenginio „%s“ tvarkyklė nepalaiko jokių žinomų įrašymo būdų."
+
+#, fuzzy, c-format
+msgid "Device '%s' does not support video capture"
+msgstr "Įrenginys „%s“ nėra išvesties įrenginys."
+
+#, fuzzy, c-format
+msgid "Device '%s' is busy"
+msgstr "Įrenginys „%s“ nėra išvesties įrenginys."
+
+#, fuzzy, c-format
+msgid "Device '%s' cannot capture at %dx%d"
+msgstr "Įrenginys „%s“ nėra įrašymo įrenginys."
+
+#, fuzzy, c-format
+msgid "Device '%s' cannot capture in the specified format"
+msgstr "Įrenginys „%s“ nėra įrašymo įrenginys."
+
+#, c-format
+msgid "Could not get parameters on device '%s'"
+msgstr "Nepavyko įrenginio „%s“ parametrų"
+
+#, fuzzy
+msgid "Video device did not accept new frame rate setting."
+msgstr "Vaizdo įvesties įrenginys nepriėmė naujų kadrų dažnio nustatymų."
+
+msgid "Video device could not create buffer pool."
+msgstr ""
+
+#, c-format
msgid "Got unexpected frame size of %u instead of %u."
msgstr "Gautas netikėtas kadro dydis %u vietoje %u."
#, c-format
-msgid "Error reading %d bytes on device '%s'."
-msgstr "Klaida skaitant %d baitus (-ų) įrenginyje „%s“."
+msgid "Failed trying to get video frames from device '%s'."
+msgstr "Nepavyko gauti vaizdo kadrų iš įrenginio „%s“."
+
+#, c-format
+msgid "Failed after %d tries. device %s. system error: %s"
+msgstr "Nepavyko po %d bandymų. Įrenginys %s. Sistemos klaida: %s"
+
+#, fuzzy, c-format
+msgid "Failed to get settings of tuner %d on device '%s'."
+msgstr "Nepavyko gauti derintuvo %d įrenginyje „%s“ parametrų."
+
+#, fuzzy, c-format
+msgid "Error getting capabilities for device '%s'."
+msgstr "Klaida skaitant %d baitus (-ų) iš įrenginio „%s“."
+
+#, fuzzy, c-format
+msgid "Device '%s' is not a tuner."
+msgstr "Įrenginys „%s“ nėra išvesties įrenginys."
+
+#, fuzzy, c-format
+msgid "Failed to get radio input on device '%s'. "
+msgstr "Nepavyko nustatyti įvesties %d įrenginyje %s."
+
+#, c-format
+msgid "Failed to set input %d on device %s."
+msgstr "Nepavyko nustatyti įvesties %d įrenginyje %s."
+
+#, fuzzy, c-format
+msgid "Failed to change mute state for device '%s'."
+msgstr "Nepavyko gauti įrenginio „%s“ signalo stiprumo."
#, c-format
msgid ""
"Nepavyko gauti dabartinės įvesties įrenginyje „%s“. Galbūt tai radijo "
"įrenginys"
-#, c-format
-msgid "Failed to set input %d on device %s."
-msgstr "Nepavyko nustatyti įvesties %d įrenginyje %s."
-
#, fuzzy, c-format
msgid ""
"Failed to get current output on device '%s'. May be it is a radio device"
msgid "Failed to set output %d on device %s."
msgstr "Nepavyko nustatyti įvesties %d įrenginyje %s."
-#, c-format
-msgid "Could not enqueue buffers in device '%s'."
-msgstr "Nepavyko sustatyti į eilė buferių įrenginyje „%s“."
-
-#, c-format
-msgid "Failed trying to get video frames from device '%s'."
-msgstr "Nepavyko gauti vaizdo kadrų iš įrenginio „%s“."
-
-#, c-format
-msgid "Failed after %d tries. device %s. system error: %s"
-msgstr "Nepavyko po %d bandymų. Įrenginys %s. Sistemos klaida: %s"
-
-#, c-format
-msgid "Could not get parameters on device '%s'"
-msgstr "Nepavyko įrenginio „%s“ parametrų"
-
-msgid "Video input device did not accept new frame rate setting."
-msgstr "Vaizdo įvesties įrenginys nepriėmė naujų kadrų dažnio nustatymų."
-
-#, c-format
-msgid "Could not map buffers from device '%s'"
-msgstr "Nepavyko išdėstyti buferių iš įrenginio „%s“"
-
-#, c-format
-msgid "The driver of device '%s' does not support any known capture method."
-msgstr "Įrenginio „%s“ tvarkyklė nepalaiko jokių žinomų įrašymo būdų."
-
msgid "Changing resolution at runtime is not yet supported."
msgstr "Galimybės keisti skiriamąją gebą operacijos vykdymo metu dar nėra."
msgid "Cannot operate without a clock"
msgstr "Negali veikti be laikrodžio"
+#~ msgid "Error reading %d bytes on device '%s'."
+#~ msgstr "Klaida skaitant %d baitus (-ų) įrenginyje „%s“."
+
+#~ msgid "Could not enqueue buffers in device '%s'."
+#~ msgstr "Nepavyko sustatyti į eilė buferių įrenginyje „%s“."
+
+#~ msgid "Could not establish connection to sound server"
+#~ msgstr "Nepavyko prisijungti prie garso serverio"
+
+#~ msgid "Failed to query sound server capabilities"
+#~ msgstr "Nepavyko užklausti garso serverio galimybių"
+
#~ msgid "Describes the selected input element."
#~ msgstr "Aprašo pasirinktą įvesties elementą."
#
# Arvis Lācis <arvis.lacis@inbox.lv>, 2009.
# Rihards Prieditis <rprieditis@gmail.com>, 2010.
-# Rihards Prieditis <rprieditis@gmail.com>, 2011.
msgid ""
msgstr ""
-"Project-Id-Version: gst-plugins-good 0.10.28.2\n"
+"Project-Id-Version: gst-plugins-good 0.10.23.2\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-12-10 02:18+0000\n"
-"PO-Revision-Date: 2011-09-02 11:23-0000\n"
+"POT-Creation-Date: 2011-11-26 13:52+0000\n"
+"PO-Revision-Date: 2010-07-07 11:53+0100\n"
"Last-Translator: Rihards Priedītis <rprieditis@gmail.com>\n"
"Language-Team: Latvian <translation-team-lv@lists.sourceforge.net>\n"
"Language: lv\n"
"2);\n"
"X-Generator: Lokalize 1.0\n"
-msgid "Could not establish connection to sound server"
-msgstr "Nevar izveidot savienojumu ar skaņas serveri"
-
-msgid "Failed to query sound server capabilities"
-msgstr "Neizdevās noskaidrot skaņas servera iespējas"
-
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
msgstr "\"%s\"ko \"%s\""
-msgid "Internal data stream error."
-msgstr "Iekšējās datu plūsmas kļūda."
-
msgid "Failed to decode JPEG image"
msgstr "Neizdevās atkodēt JPEG attēlu"
+msgid "Internal data stream error."
+msgstr "Iekšējās datu plūsmas kļūda."
+
msgid "Could not connect to server"
msgstr "Nevar savienoties ar serveri"
msgid "Server does not support seeking."
-msgstr "Serveris neatbalsta meklēšanu."
+msgstr ""
+#, fuzzy
msgid "Could not resolve server name."
-msgstr "Nevar atpazīt servera nosaukumu."
+msgstr "Nevar savienoties ar serveri"
+#, fuzzy
msgid "Could not establish connection to server."
-msgstr "Nevar izveidot savienojumu ar serveri."
+msgstr "Nevar izveidot savienojumu ar skaņas serveri"
msgid "Secure connection setup failed."
-msgstr "Drošā savienojuma izveidošana neizdevās."
+msgstr ""
msgid ""
"A network error occured, or the server closed the connection unexpectedly."
-msgstr "Notika tīkla kļūda, vai serveris negaidīti aizvēra savienojumu."
+msgstr ""
msgid "Server sent bad data."
-msgstr "Serveris nosūtija sliktus datus."
+msgstr ""
msgid "No URL set."
-msgstr "URL nav uzstādīts."
+msgstr ""
msgid "No or invalid input audio, AVI stream will be corrupt."
msgstr "Nav neviena vai nederīgs ievades audio, AVI straume tiks bojāta."
msgstr "Šis fails ir bojāts un nevar tikt atskaņots."
msgid "Invalid atom size."
-msgstr "Nederīgs atoma izmērs."
+msgstr ""
msgid "This file is incomplete and cannot be played."
msgstr "Šis pails ir nepabeigts un nevar tikt atskaņots."
msgstr "Radās kļūda nolasot %d baitus no ierīces \"%s\"."
#, c-format
+msgid "Failed to enumerate possible video formats device '%s' can work with"
+msgstr ""
+
+#, c-format
+msgid "Could not map buffers from device '%s'"
+msgstr "Nevar izvietot buferus no ierīces \"%s\""
+
+#, fuzzy, c-format
+msgid "The driver of device '%s' does not support the IO method %d"
+msgstr ""
+"Ierīces \"%s\" draiveris neatbalsta nevienu zināmo ierakstīšanas metodi."
+
+#, fuzzy, c-format
+msgid "The driver of device '%s' does not support any known IO method."
+msgstr ""
+"Ierīces \"%s\" draiveris neatbalsta nevienu zināmo ierakstīšanas metodi."
+
+#, fuzzy, c-format
+msgid "Device '%s' does not support video capture"
+msgstr "Ierīce \"%s\" nav izvades ierīce."
+
+#, fuzzy, c-format
+msgid "Device '%s' is busy"
+msgstr "Ierīce \"%s\" nav izvades ierīce."
+
+#, fuzzy, c-format
+msgid "Device '%s' cannot capture at %dx%d"
+msgstr "Ierīce \"%s\" nav ierakstīšanas ierīce."
+
+#, fuzzy, c-format
+msgid "Device '%s' cannot capture in the specified format"
+msgstr "Ierīce \"%s\" nav ierakstīšanas ierīce."
+
+#, c-format
+msgid "Could not get parameters on device '%s'"
+msgstr "Nevar nolasīt parametrus no ierīces \"%s\""
+
+#, fuzzy
+msgid "Video device did not accept new frame rate setting."
+msgstr "Video ievades ierīce nepieņem jaunos kadra frekvences uzstādījumus."
+
+msgid "Video device could not create buffer pool."
+msgstr ""
+
+#, c-format
msgid "Got unexpected frame size of %u instead of %u."
msgstr "Saņēmu negaidītu kadra izmēru %u, nevis %u."
#, c-format
-msgid "Error reading %d bytes on device '%s'."
+msgid "Failed trying to get video frames from device '%s'."
+msgstr "Neizdevās saņemt video kadrus no ierīces \"%s\"."
+
+#, c-format
+msgid "Failed after %d tries. device %s. system error: %s"
+msgstr "Neveiksme pēc %d mēģinājumiem. ierīce %s. sistēmas kļūda: %s"
+
+#, fuzzy, c-format
+msgid "Failed to get settings of tuner %d on device '%s'."
+msgstr "Neizdevās saņemt skaņotāja %d uzstādījumus no ierīces \"%s\"."
+
+#, fuzzy, c-format
+msgid "Error getting capabilities for device '%s'."
msgstr "Radās kļūda nolasot %d baitus no ierīces \"%s\"."
+#, fuzzy, c-format
+msgid "Device '%s' is not a tuner."
+msgstr "Ierīce \"%s\" nav izvades ierīce."
+
+#, fuzzy, c-format
+msgid "Failed to get radio input on device '%s'. "
+msgstr "Neizdevās uzstādīt ievadi %d uz ierīces %s."
+
+#, c-format
+msgid "Failed to set input %d on device %s."
+msgstr "Neizdevās uzstādīt ievadi %d uz ierīces %s."
+
+#, fuzzy, c-format
+msgid "Failed to change mute state for device '%s'."
+msgstr "Neizdevās saņemt signāla stiprumu no ierīces \"%s\"."
+
#, c-format
msgid ""
"Error getting capabilities for device '%s': It isn't a v4l2 driver. Check if "
"Neizdevās saņemt pašreizējo ievadi no ierīces \"%s\". Iespējams tā ir radio "
"ierīce"
-#, c-format
-msgid "Failed to set input %d on device %s."
-msgstr "Neizdevās uzstādīt ievadi %d uz ierīces %s."
-
-#, c-format
+#, fuzzy, c-format
msgid ""
"Failed to get current output on device '%s'. May be it is a radio device"
msgstr ""
-"Neizdevās saņemt pašreizējo izvadu no ierīces \"%s\". Iespējams tā ir radio "
+"Neizdevās saņemt pašreizējo ievadi no ierīces \"%s\". Iespējams tā ir radio "
"ierīce"
-#, c-format
+#, fuzzy, c-format
msgid "Failed to set output %d on device %s."
-msgstr "Neizdevās uzstādīt izvadu %d uz ierīces %s."
-
-#, c-format
-msgid "Could not enqueue buffers in device '%s'."
-msgstr "Nevar ierindod buferus ierīcē \"%s\"."
-
-#, c-format
-msgid "Failed trying to get video frames from device '%s'."
-msgstr "Neizdevās saņemt video kadrus no ierīces \"%s\"."
-
-#, c-format
-msgid "Failed after %d tries. device %s. system error: %s"
-msgstr "Neveiksme pēc %d mēģinājumiem. ierīce %s. sistēmas kļūda: %s"
-
-#, c-format
-msgid "Could not get parameters on device '%s'"
-msgstr "Nevar nolasīt parametrus no ierīces \"%s\""
-
-msgid "Video input device did not accept new frame rate setting."
-msgstr "Video ievades ierīce nepieņem jaunos kadra frekvences uzstādījumus."
-
-#, c-format
-msgid "Could not map buffers from device '%s'"
-msgstr "Nevar izvietot buferus no ierīces \"%s\""
-
-#, c-format
-msgid "The driver of device '%s' does not support any known capture method."
-msgstr ""
-"Ierīces \"%s\" draiveris neatbalsta nevienu zināmo ierakstīšanas metodi."
+msgstr "Neizdevās uzstādīt ievadi %d uz ierīces %s."
msgid "Changing resolution at runtime is not yet supported."
msgstr "Izšķirtspējas mainīšana izpildlaikā pašalik netiek atbalstīta."
msgid "Cannot operate without a clock"
msgstr "Nevar veikt darbības bez pulksteņa"
+
+#~ msgid "Error reading %d bytes on device '%s'."
+#~ msgstr "Radās kļūda nolasot %d baitus no ierīces \"%s\"."
+
+#~ msgid "Could not enqueue buffers in device '%s'."
+#~ msgstr "Nevar ierindod buferus ierīcē \"%s\"."
+
+#~ msgid "Could not establish connection to sound server"
+#~ msgstr "Nevar izveidot savienojumu ar skaņas serveri"
+
+#~ msgid "Failed to query sound server capabilities"
+#~ msgstr "Neizdevās noskaidrot skaņas servera iespējas"
msgstr ""
"Project-Id-Version: gst-plugins-good-0.10.10.3\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-05-10 09:38+0100\n"
+"POT-Creation-Date: 2011-11-26 13:52+0000\n"
"PO-Revision-Date: 2008-10-26 19:09+0100\n"
"Last-Translator: Michel Bugeja <michelbugeja@rabatmalta.com>\n"
"Language-Team: Maltese <translation-team-mt@lists.sourceforge.net>\n"
"X-Poedit-Language: Maltese\n"
"X-Poedit-Country: MALTA\n"
-msgid "Could not establish connection to sound server"
-msgstr "Kuntatt mas-sound server ma ġiex stabbilit"
-
-msgid "Failed to query sound server capabilities"
-msgstr "Mhux possibli t-tfittxija għall-kapaċita tas-sound server"
-
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
msgstr ""
-msgid "Internal data stream error."
-msgstr "Problema interna fid-data stream"
-
msgid "Failed to decode JPEG image"
msgstr "Problem fid-decoding tal-istampa JPEG"
+msgid "Internal data stream error."
+msgstr "Problema interna fid-data stream"
+
msgid "Could not connect to server"
msgstr "Mhux possibli naqbad mas-server"
msgstr "Problema fil-qar()i %d bytes fid-device '%s'."
#, c-format
+msgid "Failed to enumerate possible video formats device '%s' can work with"
+msgstr "Failed to enumerate possible video formats device '%s' can work with"
+
+#, c-format
+msgid "Could not map buffers from device '%s'"
+msgstr "Ma nistax nqabbel buffers mill-apparat '%s'."
+
+#, fuzzy, c-format
+msgid "The driver of device '%s' does not support the IO method %d"
+msgstr ""
+"Id-driver tal-apparat '%s' ma jissapportja l-ebda capture method mifhum."
+
+#, fuzzy, c-format
+msgid "The driver of device '%s' does not support any known IO method."
+msgstr ""
+"Id-driver tal-apparat '%s' ma jissapportja l-ebda capture method mifhum."
+
+#, fuzzy, c-format
+msgid "Device '%s' does not support video capture"
+msgstr "Device '%s' mhux capture device."
+
+#, fuzzy, c-format
+msgid "Device '%s' is busy"
+msgstr "Device '%s' mhux capture device."
+
+#, c-format
+msgid "Device '%s' cannot capture at %dx%d"
+msgstr "Appart '%s' ma jistax jagħmel capture bi %dx%d"
+
+#, c-format
+msgid "Device '%s' cannot capture in the specified format"
+msgstr "Appart '%s' ma jistax jagħmel capture f'dan il-format"
+
+#, c-format
+msgid "Could not get parameters on device '%s'"
+msgstr "Mhux possibli nġib parametri tal-apparat '%s'"
+
+#, fuzzy
+msgid "Video device did not accept new frame rate setting."
+msgstr ""
+"Apparat li jaqra video input ma aċċettax is-settings ġodda tal-frame rate."
+
+msgid "Video device could not create buffer pool."
+msgstr ""
+
+#, c-format
msgid "Got unexpected frame size of %u instead of %u."
msgstr "Irċivejt frame size ta' %u minflok kif kien mistenni ta' %u."
+#, c-format
+msgid "Failed trying to get video frames from device '%s'."
+msgstr "Problema biex inġib video frames mill-apparat '%s'."
+
+#, c-format
+msgid "Failed after %d tries. device %s. system error: %s"
+msgstr "Problema wara li pruvajt %d drabi. apparat %s. żball fis-sistema: %s"
+
#, fuzzy, c-format
-msgid "Error reading %d bytes on device '%s'."
+msgid "Failed to get settings of tuner %d on device '%s'."
+msgstr "Problema biex inġib settings tat-tuner %d fuq l-appart '%s'"
+
+#, fuzzy, c-format
+msgid "Error getting capabilities for device '%s'."
msgstr "Problema fil-qar()i %d bytes fid-device '%s'."
+#, fuzzy, c-format
+msgid "Device '%s' is not a tuner."
+msgstr "Device '%s' mhux capture device."
+
+#, fuzzy, c-format
+msgid "Failed to get radio input on device '%s'. "
+msgstr "Problema biex nissettja input %d fuq apparat %s."
+
+#, c-format
+msgid "Failed to set input %d on device %s."
+msgstr "Problema biex nissettja input %d fuq apparat %s."
+
+#, fuzzy, c-format
+msgid "Failed to change mute state for device '%s'."
+msgstr "Problema biex inġib signal strength fuq l-apparat '%s'."
+
#, c-format
msgid ""
"Error getting capabilities for device '%s': It isn't a v4l2 driver. Check if "
"Problema biex inġib current input fuq apparat '%s'. Jista jkun li huwa "
"apparat tar-radju."
-#, c-format
-msgid "Failed to set input %d on device %s."
-msgstr "Problema biex nissettja input %d fuq apparat %s."
-
#, fuzzy, c-format
msgid ""
"Failed to get current output on device '%s'. May be it is a radio device"
msgid "Failed to set output %d on device %s."
msgstr "Problema biex nissettja input %d fuq apparat %s."
-#, c-format
-msgid "Could not enqueue buffers in device '%s'."
-msgstr "Could not enqueue buffers in device '%s'."
-
-#, c-format
-msgid "Failed trying to get video frames from device '%s'."
-msgstr "Problema biex inġib video frames mill-apparat '%s'."
-
-#, c-format
-msgid "Failed after %d tries. device %s. system error: %s"
-msgstr "Problema wara li pruvajt %d drabi. apparat %s. żball fis-sistema: %s"
-
-#, c-format
-msgid "Could not get parameters on device '%s'"
-msgstr "Mhux possibli nġib parametri tal-apparat '%s'"
-
-msgid "Video input device did not accept new frame rate setting."
-msgstr ""
-"Apparat li jaqra video input ma aċċettax is-settings ġodda tal-frame rate."
-
-#, c-format
-msgid "Could not map buffers from device '%s'"
-msgstr "Ma nistax nqabbel buffers mill-apparat '%s'."
-
-#, c-format
-msgid "The driver of device '%s' does not support any known capture method."
-msgstr ""
-"Id-driver tal-apparat '%s' ma jissapportja l-ebda capture method mifhum."
-
msgid "Changing resolution at runtime is not yet supported."
msgstr "Ma tistax tibdel ir-resolution waqt runtime."
msgid "Cannot operate without a clock"
msgstr "Ma nistax nħaddem mingħajr arloġġ"
+#, fuzzy
+#~ msgid "Error reading %d bytes on device '%s'."
+#~ msgstr "Problema fil-qar()i %d bytes fid-device '%s'."
+
+#~ msgid "Could not enqueue buffers in device '%s'."
+#~ msgstr "Could not enqueue buffers in device '%s'."
+
+#~ msgid "Could not establish connection to sound server"
+#~ msgstr "Kuntatt mas-sound server ma ġiex stabbilit"
+
+#~ msgid "Failed to query sound server capabilities"
+#~ msgstr "Mhux possibli t-tfittxija għall-kapaċita tas-sound server"
+
#~ msgid "Error stopping streaming capture from device '%s'."
#~ msgstr "Problem biex inwaqqaf streaming capture mill-apparat '%s'."
#~ msgid "Could not get buffers from device '%s'."
#~ msgstr "Ma nistax nirċievi buffers mill-apparat '%s'."
-#~ msgid "Device '%s' cannot capture in the specified format"
-#~ msgstr "Appart '%s' ma jistax jagħmel capture f'dan il-format"
-
-#~ msgid "Device '%s' cannot capture at %dx%d"
-#~ msgstr "Appart '%s' ma jistax jagħmel capture bi %dx%d"
-
#~ msgid "No free buffers found in the pool at index %d."
#~ msgstr "Ma nstab l-ebda buffer free fl-indiċi %d tal-pool."
#~ "buffers have been allocated yet, or the userptr or length are invalid. "
#~ "device %s"
-#~ msgid "Failed to enumerate possible video formats device '%s' can work with"
-#~ msgstr ""
-#~ "Failed to enumerate possible video formats device '%s' can work with"
-
#~ msgid "Failed getting controls attributes on device '%s.'"
#~ msgstr "L-attributi ta' kontroll ta' device '%s' ma nqrawx."
msgstr ""
"Project-Id-Version: gst-plugins-good 0.10.25.3\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-05-10 09:38+0100\n"
+"POT-Creation-Date: 2011-11-26 13:52+0000\n"
"PO-Revision-Date: 2010-10-24 21:53+0200\n"
"Last-Translator: Kjartan Maraas <kmaraas@gnome.org>\n"
"Language-Team: Norwegian Bokmaal <i18n-nb@lister.ping.uio.no>\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
-msgid "Could not establish connection to sound server"
-msgstr "Kunne ikke etablere tilkobling til lydtjener"
-
-msgid "Failed to query sound server capabilities"
-msgstr ""
-
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
msgstr "«%s» av «%s»"
-msgid "Internal data stream error."
-msgstr "Intern feil i datastrøm."
-
msgid "Failed to decode JPEG image"
msgstr "Klarte ikke å dekode JPEG-bilde"
+msgid "Internal data stream error."
+msgstr "Intern feil i datastrøm."
+
msgid "Could not connect to server"
msgstr "Kunne ikke koble til tjener."
msgstr "Kunne ikke lukke VFS-fil «%s»."
#, c-format
+msgid "Failed to enumerate possible video formats device '%s' can work with"
+msgstr ""
+
+#, fuzzy, c-format
+msgid "Could not map buffers from device '%s'"
+msgstr "Kunne ikke lukke VFS-fil «%s»."
+
+#, c-format
+msgid "The driver of device '%s' does not support the IO method %d"
+msgstr ""
+
+#, c-format
+msgid "The driver of device '%s' does not support any known IO method."
+msgstr ""
+
+#, fuzzy, c-format
+msgid "Device '%s' does not support video capture"
+msgstr "Enhet «%s» kan ikke fange data."
+
+#, fuzzy, c-format
+msgid "Device '%s' is busy"
+msgstr "Enhet «%s» kan ikke fange data."
+
+#, fuzzy, c-format
+msgid "Device '%s' cannot capture at %dx%d"
+msgstr "Enhet «%s» kan ikke fange data."
+
+#, fuzzy, c-format
+msgid "Device '%s' cannot capture in the specified format"
+msgstr "Enhet «%s» kan ikke fange data."
+
+#, fuzzy, c-format
+msgid "Could not get parameters on device '%s'"
+msgstr "Kunne ikke lukke VFS-fil «%s»."
+
+msgid "Video device did not accept new frame rate setting."
+msgstr ""
+
+msgid "Video device could not create buffer pool."
+msgstr ""
+
+#, c-format
msgid "Got unexpected frame size of %u instead of %u."
msgstr ""
+#, c-format
+msgid "Failed trying to get video frames from device '%s'."
+msgstr ""
+
+#, c-format
+msgid "Failed after %d tries. device %s. system error: %s"
+msgstr ""
+
#, fuzzy, c-format
-msgid "Error reading %d bytes on device '%s'."
+msgid "Failed to get settings of tuner %d on device '%s'."
+msgstr "Kunne ikke lukke VFS-fil «%s»."
+
+#, fuzzy, c-format
+msgid "Error getting capabilities for device '%s'."
+msgstr "Kunne ikke lukke VFS-fil «%s»."
+
+#, fuzzy, c-format
+msgid "Device '%s' is not a tuner."
+msgstr "Enhet «%s» kan ikke fange data."
+
+#, fuzzy, c-format
+msgid "Failed to get radio input on device '%s'. "
+msgstr "Kunne ikke lukke VFS-fil «%s»."
+
+#, c-format
+msgid "Failed to set input %d on device %s."
+msgstr ""
+
+#, fuzzy, c-format
+msgid "Failed to change mute state for device '%s'."
msgstr "Kunne ikke lukke VFS-fil «%s»."
#, c-format
msgstr ""
#, c-format
-msgid "Failed to set input %d on device %s."
-msgstr ""
-
-#, c-format
msgid ""
"Failed to get current output on device '%s'. May be it is a radio device"
msgstr ""
msgid "Failed to set output %d on device %s."
msgstr "Kunne ikke lukke VFS-fil «%s»."
-#, fuzzy, c-format
-msgid "Could not enqueue buffers in device '%s'."
-msgstr "Kunne ikke lukke VFS-fil «%s»."
-
-#, c-format
-msgid "Failed trying to get video frames from device '%s'."
-msgstr ""
-
-#, c-format
-msgid "Failed after %d tries. device %s. system error: %s"
-msgstr ""
-
-#, fuzzy, c-format
-msgid "Could not get parameters on device '%s'"
-msgstr "Kunne ikke lukke VFS-fil «%s»."
-
-msgid "Video input device did not accept new frame rate setting."
-msgstr ""
-
-#, fuzzy, c-format
-msgid "Could not map buffers from device '%s'"
-msgstr "Kunne ikke lukke VFS-fil «%s»."
-
-#, c-format
-msgid "The driver of device '%s' does not support any known capture method."
-msgstr ""
-
msgid "Changing resolution at runtime is not yet supported."
msgstr ""
msgid "Cannot operate without a clock"
msgstr "Kan ikke operere uten en klokke"
+#, fuzzy
+#~ msgid "Error reading %d bytes on device '%s'."
+#~ msgstr "Kunne ikke lukke VFS-fil «%s»."
+
+#, fuzzy
+#~ msgid "Could not enqueue buffers in device '%s'."
+#~ msgstr "Kunne ikke lukke VFS-fil «%s»."
+
+#~ msgid "Could not establish connection to sound server"
+#~ msgstr "Kunne ikke etablere tilkobling til lydtjener"
+
#~ msgid "Describes the selected input element."
#~ msgstr "Beskriver valgt inndataelement."
msgstr ""
"Project-Id-Version: gst-plugins-good 0.10.28.2\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-05-10 09:38+0100\n"
+"POT-Creation-Date: 2011-11-26 13:52+0000\n"
"PO-Revision-Date: 2011-04-27 00:05+0200\n"
"Last-Translator: Freek de Kruijf <f.de.kruijf@gmail.com>\n"
"Language-Team: Dutch <vertaling@vrijschrift.org>\n"
"X-Generator: Lokalize 1.1\n"
"Plural-Forms: nplurals=2; plural=n != 1;\n"
-msgid "Could not establish connection to sound server"
-msgstr "Kan geen verbinding maken met de geluidsserver"
-
-msgid "Failed to query sound server capabilities"
-msgstr "Kan de eigenschappen van de geluidsserver niet opvragen"
-
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
msgstr "'%s' door '%s'"
-msgid "Internal data stream error."
-msgstr "Interne fout in gegevensstroom."
-
msgid "Failed to decode JPEG image"
msgstr "Kan de JPEG-afbeelding niet decoderen"
+msgid "Internal data stream error."
+msgstr "Interne fout in gegevensstroom."
+
msgid "Could not connect to server"
msgstr "Kan geen verbinding maken met server"
msgstr "Fout bij het lezen van %d bytes van apparaat '%s'."
#, c-format
+msgid "Failed to enumerate possible video formats device '%s' can work with"
+msgstr ""
+
+#, c-format
+msgid "Could not map buffers from device '%s'"
+msgstr "Kan de buffers van apparaat '%s' niet vinden"
+
+#, fuzzy, c-format
+msgid "The driver of device '%s' does not support the IO method %d"
+msgstr ""
+"Het stuurprogramma van apparaat '%s' ondersteunt geen bekende opnamemethode."
+
+#, fuzzy, c-format
+msgid "The driver of device '%s' does not support any known IO method."
+msgstr ""
+"Het stuurprogramma van apparaat '%s' ondersteunt geen bekende opnamemethode."
+
+#, fuzzy, c-format
+msgid "Device '%s' does not support video capture"
+msgstr "Apparaat '%s' is geen uitvoerapparaat."
+
+#, fuzzy, c-format
+msgid "Device '%s' is busy"
+msgstr "Apparaat '%s' is geen uitvoerapparaat."
+
+#, fuzzy, c-format
+msgid "Device '%s' cannot capture at %dx%d"
+msgstr "Apparaat '%s' is geen opnameapparaat."
+
+#, fuzzy, c-format
+msgid "Device '%s' cannot capture in the specified format"
+msgstr "Apparaat '%s' is geen opnameapparaat."
+
+#, c-format
+msgid "Could not get parameters on device '%s'"
+msgstr "Kan de parameters op apparaat '%s' niet verkrijgen"
+
+#, fuzzy
+msgid "Video device did not accept new frame rate setting."
+msgstr ""
+"Invoerapparaat voor video heeft de nieuwe frame-snelheid niet geaccepteerd."
+
+msgid "Video device could not create buffer pool."
+msgstr ""
+
+#, c-format
msgid "Got unexpected frame size of %u instead of %u."
msgstr "Onverwachte framegrootte, %u in plaats van %u."
#, c-format
-msgid "Error reading %d bytes on device '%s'."
+msgid "Failed trying to get video frames from device '%s'."
+msgstr "Kan geen videoframes verkrijgen van apparaat '%s'."
+
+#, c-format
+msgid "Failed after %d tries. device %s. system error: %s"
+msgstr "Fout na %d pogingen. Apparaat %s. Systeemfout: %s"
+
+#, fuzzy, c-format
+msgid "Failed to get settings of tuner %d on device '%s'."
+msgstr "Kan de instelling van tuner %d op apparaat '%s' niet verkrijgen."
+
+#, fuzzy, c-format
+msgid "Error getting capabilities for device '%s'."
msgstr "Fout bij het lezen van %d bytes van apparaat '%s'."
+#, fuzzy, c-format
+msgid "Device '%s' is not a tuner."
+msgstr "Apparaat '%s' is geen uitvoerapparaat."
+
+#, fuzzy, c-format
+msgid "Failed to get radio input on device '%s'. "
+msgstr "Kan invoer %d op apparaat '%s' niet instellen."
+
+#, c-format
+msgid "Failed to set input %d on device %s."
+msgstr "Kan invoer %d op apparaat '%s' niet instellen."
+
+#, fuzzy, c-format
+msgid "Failed to change mute state for device '%s'."
+msgstr "Kan de signaalsterkte van apparaat '%s' niet verkrijgen."
+
#, c-format
msgid ""
"Error getting capabilities for device '%s': It isn't a v4l2 driver. Check if "
"mogelijk een radio"
#, c-format
-msgid "Failed to set input %d on device %s."
-msgstr "Kan invoer %d op apparaat '%s' niet instellen."
-
-#, c-format
msgid ""
"Failed to get current output on device '%s'. May be it is a radio device"
msgstr ""
msgid "Failed to set output %d on device %s."
msgstr "Kan uitvoer %d op apparaat %s niet instellen."
-#, c-format
-msgid "Could not enqueue buffers in device '%s'."
-msgstr "Kan geen buffers toekennen in apparaat '%s'."
-
-#, c-format
-msgid "Failed trying to get video frames from device '%s'."
-msgstr "Kan geen videoframes verkrijgen van apparaat '%s'."
-
-#, c-format
-msgid "Failed after %d tries. device %s. system error: %s"
-msgstr "Fout na %d pogingen. Apparaat %s. Systeemfout: %s"
-
-#, c-format
-msgid "Could not get parameters on device '%s'"
-msgstr "Kan de parameters op apparaat '%s' niet verkrijgen"
-
-msgid "Video input device did not accept new frame rate setting."
-msgstr ""
-"Invoerapparaat voor video heeft de nieuwe frame-snelheid niet geaccepteerd."
-
-#, c-format
-msgid "Could not map buffers from device '%s'"
-msgstr "Kan de buffers van apparaat '%s' niet vinden"
-
-#, c-format
-msgid "The driver of device '%s' does not support any known capture method."
-msgstr ""
-"Het stuurprogramma van apparaat '%s' ondersteunt geen bekende opnamemethode."
-
msgid "Changing resolution at runtime is not yet supported."
msgstr "Tijdens het draaien kan de resolutie nog niet gewijzigd worden."
msgid "Cannot operate without a clock"
msgstr "Kan niet werken zonder een klok."
+
+#~ msgid "Error reading %d bytes on device '%s'."
+#~ msgstr "Fout bij het lezen van %d bytes van apparaat '%s'."
+
+#~ msgid "Could not enqueue buffers in device '%s'."
+#~ msgstr "Kan geen buffers toekennen in apparaat '%s'."
+
+#~ msgid "Could not establish connection to sound server"
+#~ msgstr "Kan geen verbinding maken met de geluidsserver"
+
+#~ msgid "Failed to query sound server capabilities"
+#~ msgstr "Kan de eigenschappen van de geluidsserver niet opvragen"
msgstr ""
"Project-Id-Version: gst-plugins-0.8.3\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-05-10 09:38+0100\n"
+"POT-Creation-Date: 2011-11-26 13:52+0000\n"
"PO-Revision-Date: 2004-09-27 13:32+0530\n"
"Last-Translator: Gora Mohanty <gora_mohanty@yahoo.co.in>\n"
"Language-Team: Oriya <gora_mohanty@yahoo.co.in>\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
-msgid "Could not establish connection to sound server"
-msgstr ""
-
-msgid "Failed to query sound server capabilities"
-msgstr ""
-
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
msgstr ""
-msgid "Internal data stream error."
+msgid "Failed to decode JPEG image"
msgstr ""
-msgid "Failed to decode JPEG image"
+msgid "Internal data stream error."
msgstr ""
#, fuzzy
msgstr "\"%s\" ଯନ୍ତ୍ରରୁ ଅସ୍ଥାୟୀ ସଞ୍ଚୟ ସ୍ଥାନ ଆଣିହେଲା ନାହିଁ."
#, c-format
+msgid "Failed to enumerate possible video formats device '%s' can work with"
+msgstr ""
+
+#, fuzzy, c-format
+msgid "Could not map buffers from device '%s'"
+msgstr "\"%s\" ଯନ୍ତ୍ରରୁ ଅସ୍ଥାୟୀ ସଞ୍ଚୟ ସ୍ଥାନ ଆଣିହେଲା ନାହିଁ."
+
+#, c-format
+msgid "The driver of device '%s' does not support the IO method %d"
+msgstr ""
+
+#, c-format
+msgid "The driver of device '%s' does not support any known IO method."
+msgstr ""
+
+#, fuzzy, c-format
+msgid "Device '%s' does not support video capture"
+msgstr "\"%s\" ଯନ୍ତ୍ର ଗୋଟିଏ ଅନୁଲିପିକାର ନୁହେଁ."
+
+#, fuzzy, c-format
+msgid "Device '%s' is busy"
+msgstr "\"%s\" ଯନ୍ତ୍ର ଗୋଟିଏ ଅନୁଲିପିକାର ନୁହେଁ."
+
+#, fuzzy, c-format
+msgid "Device '%s' cannot capture at %dx%d"
+msgstr "\"%s\" ଯନ୍ତ୍ର ଗୋଟିଏ ଅନୁଲିପିକାର ନୁହେଁ."
+
+#, fuzzy, c-format
+msgid "Device '%s' cannot capture in the specified format"
+msgstr "\"%s\" ଯନ୍ତ୍ର ଗୋଟିଏ ଅନୁଲିପିକାର ନୁହେଁ."
+
+#, fuzzy, c-format
+msgid "Could not get parameters on device '%s'"
+msgstr "\"%s\" ଯନ୍ତ୍ରରୁ ଅସ୍ଥାୟୀ ସଞ୍ଚୟ ସ୍ଥାନ ଆଣିହେଲା ନାହିଁ."
+
+msgid "Video device did not accept new frame rate setting."
+msgstr ""
+
+msgid "Video device could not create buffer pool."
+msgstr ""
+
+#, c-format
msgid "Got unexpected frame size of %u instead of %u."
msgstr ""
+#, fuzzy, c-format
+msgid "Failed trying to get video frames from device '%s'."
+msgstr "\"%s\" ଯନ୍ତ୍ରରୁ ପର୍ଯ୍ଯାପ୍ତ ଅସ୍ଥାୟୀ ସଞ୍ଚୟ ସ୍ଥାନ ଆଣିହେଲା ନାହିଁ."
+
#, c-format
-msgid "Error reading %d bytes on device '%s'."
+msgid "Failed after %d tries. device %s. system error: %s"
msgstr ""
+#, fuzzy, c-format
+msgid "Failed to get settings of tuner %d on device '%s'."
+msgstr "\"%s\" ଯନ୍ତ୍ରରୁ ପର୍ଯ୍ଯାପ୍ତ ଅସ୍ଥାୟୀ ସଞ୍ଚୟ ସ୍ଥାନ ଆଣିହେଲା ନାହିଁ."
+
+#, fuzzy, c-format
+msgid "Error getting capabilities for device '%s'."
+msgstr "\"%s\" ଯନ୍ତ୍ରରୁ ଅସ୍ଥାୟୀ ସଞ୍ଚୟ ସ୍ଥାନ ଆଣିହେଲା ନାହିଁ."
+
+#, fuzzy, c-format
+msgid "Device '%s' is not a tuner."
+msgstr "\"%s\" ଯନ୍ତ୍ର ଗୋଟିଏ ଅନୁଲିପିକାର ନୁହେଁ."
+
+#, fuzzy, c-format
+msgid "Failed to get radio input on device '%s'. "
+msgstr "\"%s\" ଧ୍ବନି ଯନ୍ତ୍ର ବନ୍ଦ କରିହେଲା ନାହିଁ."
+
+#, fuzzy, c-format
+msgid "Failed to set input %d on device %s."
+msgstr "\"%s\" ଧ୍ବନି ଯନ୍ତ୍ର ବନ୍ଦ କରିହେଲା ନାହିଁ."
+
+#, fuzzy, c-format
+msgid "Failed to change mute state for device '%s'."
+msgstr "\"%s\" ଯନ୍ତ୍ରରୁ ଅସ୍ଥାୟୀ ସଞ୍ଚୟ ସ୍ଥାନ ଆଣିହେଲା ନାହିଁ."
+
#, c-format
msgid ""
"Error getting capabilities for device '%s': It isn't a v4l2 driver. Check if "
msgstr ""
#, fuzzy, c-format
-msgid "Failed to set input %d on device %s."
-msgstr "\"%s\" ଧ୍ବନି ଯନ୍ତ୍ର ବନ୍ଦ କରିହେଲା ନାହିଁ."
-
-#, fuzzy, c-format
msgid ""
"Failed to get current output on device '%s'. May be it is a radio device"
msgstr "\"%s\" ଯନ୍ତ୍ରରୁ ପର୍ଯ୍ଯାପ୍ତ ଅସ୍ଥାୟୀ ସଞ୍ଚୟ ସ୍ଥାନ ଆଣିହେଲା ନାହିଁ."
msgid "Failed to set output %d on device %s."
msgstr "\"%s\" ଧ୍ବନି ଯନ୍ତ୍ର ବନ୍ଦ କରିହେଲା ନାହିଁ."
-#, fuzzy, c-format
-msgid "Could not enqueue buffers in device '%s'."
-msgstr "\"%s\" ଯନ୍ତ୍ରରୁ ଅସ୍ଥାୟୀ ସଞ୍ଚୟ ସ୍ଥାନ ଆଣିହେଲା ନାହିଁ."
-
-#, fuzzy, c-format
-msgid "Failed trying to get video frames from device '%s'."
-msgstr "\"%s\" ଯନ୍ତ୍ରରୁ ପର୍ଯ୍ଯାପ୍ତ ଅସ୍ଥାୟୀ ସଞ୍ଚୟ ସ୍ଥାନ ଆଣିହେଲା ନାହିଁ."
-
-#, c-format
-msgid "Failed after %d tries. device %s. system error: %s"
-msgstr ""
-
-#, fuzzy, c-format
-msgid "Could not get parameters on device '%s'"
-msgstr "\"%s\" ଯନ୍ତ୍ରରୁ ଅସ୍ଥାୟୀ ସଞ୍ଚୟ ସ୍ଥାନ ଆଣିହେଲା ନାହିଁ."
-
-msgid "Video input device did not accept new frame rate setting."
-msgstr ""
-
-#, fuzzy, c-format
-msgid "Could not map buffers from device '%s'"
-msgstr "\"%s\" ଯନ୍ତ୍ରରୁ ଅସ୍ଥାୟୀ ସଞ୍ଚୟ ସ୍ଥାନ ଆଣିହେଲା ନାହିଁ."
-
-#, c-format
-msgid "The driver of device '%s' does not support any known capture method."
-msgstr ""
-
msgid "Changing resolution at runtime is not yet supported."
msgstr ""
msgstr ""
#, fuzzy
-#~ msgid "Device '%s' cannot capture at %dx%d"
-#~ msgstr "\"%s\" ଯନ୍ତ୍ର ଗୋଟିଏ ଅନୁଲିପିକାର ନୁହେଁ."
+#~ msgid "Could not enqueue buffers in device '%s'."
+#~ msgstr "\"%s\" ଯନ୍ତ୍ରରୁ ଅସ୍ଥାୟୀ ସଞ୍ଚୟ ସ୍ଥାନ ଆଣିହେଲା ନାହିଁ."
#, fuzzy
#~ msgid "Could not get buffers from device '%s'."
msgstr ""
"Project-Id-Version: gst-plugins-good 0.10.26.2\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-05-10 09:38+0100\n"
+"POT-Creation-Date: 2011-11-26 13:52+0000\n"
"PO-Revision-Date: 2011-01-07 21:53+0100\n"
"Last-Translator: Jakub Bogusz <qboosh@pld-linux.org>\n"
"Language-Team: Polish <translation-team-pl@lists.sourceforge.net>\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
-msgid "Could not establish connection to sound server"
-msgstr "Nie udało się nawiązać połączenia z serwerem dźwięku"
-
-msgid "Failed to query sound server capabilities"
-msgstr "Nie udało się odpytać o możliwości serwera dźwięku"
-
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
msgstr "'%s' autorstwa '%s'"
-msgid "Internal data stream error."
-msgstr "Błąd wewnętrzny strumienia danych."
-
msgid "Failed to decode JPEG image"
msgstr "Nie udało się zdekodować obrazu JPEG"
+msgid "Internal data stream error."
+msgstr "Błąd wewnętrzny strumienia danych."
+
msgid "Could not connect to server"
msgstr "Nie udało się połączyć z serwerem"
msgstr "Błąd odczytu %d bajtów z urządzenia '%s'."
#, c-format
+msgid "Failed to enumerate possible video formats device '%s' can work with"
+msgstr ""
+
+#, c-format
+msgid "Could not map buffers from device '%s'"
+msgstr "Nie udało się odwzorować buforów z urządzenia '%s'"
+
+#, fuzzy, c-format
+msgid "The driver of device '%s' does not support the IO method %d"
+msgstr ""
+"Sterownik urządzenia '%s' nie obsługuje żadnej znanej metody przechwytywania."
+
+#, fuzzy, c-format
+msgid "The driver of device '%s' does not support any known IO method."
+msgstr ""
+"Sterownik urządzenia '%s' nie obsługuje żadnej znanej metody przechwytywania."
+
+#, fuzzy, c-format
+msgid "Device '%s' does not support video capture"
+msgstr "Urządzenie '%s' nie jest urządzeniem wyjściowym."
+
+#, fuzzy, c-format
+msgid "Device '%s' is busy"
+msgstr "Urządzenie '%s' nie jest urządzeniem wyjściowym."
+
+#, fuzzy, c-format
+msgid "Device '%s' cannot capture at %dx%d"
+msgstr "Urządzenie '%s' nie jest urządzeniem przechwytującym."
+
+#, fuzzy, c-format
+msgid "Device '%s' cannot capture in the specified format"
+msgstr "Urządzenie '%s' nie jest urządzeniem przechwytującym."
+
+#, c-format
+msgid "Could not get parameters on device '%s'"
+msgstr "Nie udało się uzyskać parametrów urządzenia '%s'"
+
+#, fuzzy
+msgid "Video device did not accept new frame rate setting."
+msgstr ""
+"Urządzenie wejściowe obrazu nie przyjęło nowego ustawienia częstotliwości "
+"klatek."
+
+msgid "Video device could not create buffer pool."
+msgstr ""
+
+#, c-format
msgid "Got unexpected frame size of %u instead of %u."
msgstr "Otrzymano nieoczekiwany rozmiar klatki %u zamiast %u."
#, c-format
-msgid "Error reading %d bytes on device '%s'."
+msgid "Failed trying to get video frames from device '%s'."
+msgstr "Nie udało się uzyskać klatek obrazu z urządzenia '%s'."
+
+#, c-format
+msgid "Failed after %d tries. device %s. system error: %s"
+msgstr "Niepowodzenie po %d próbach. Urządzenie %s. Błąd systemowy: %s"
+
+#, fuzzy, c-format
+msgid "Failed to get settings of tuner %d on device '%s'."
+msgstr "Nie udało się uzyskać ustawień tunera %d urządzenia '%s'."
+
+#, fuzzy, c-format
+msgid "Error getting capabilities for device '%s'."
msgstr "Błąd odczytu %d bajtów z urządzenia '%s'."
+#, fuzzy, c-format
+msgid "Device '%s' is not a tuner."
+msgstr "Urządzenie '%s' nie jest urządzeniem wyjściowym."
+
+#, fuzzy, c-format
+msgid "Failed to get radio input on device '%s'. "
+msgstr "Nie udało się ustawić wejścia %d urządzenia %s."
+
+#, c-format
+msgid "Failed to set input %d on device %s."
+msgstr "Nie udało się ustawić wejścia %d urządzenia %s."
+
+#, fuzzy, c-format
+msgid "Failed to change mute state for device '%s'."
+msgstr "Nie udało się uzyskać siły sygnału dla urządzenia '%s'."
+
#, c-format
msgid ""
"Error getting capabilities for device '%s': It isn't a v4l2 driver. Check if "
"Nie udało się uzyskać aktualnego wejścia urządzenia '%s'. Może to radio"
#, c-format
-msgid "Failed to set input %d on device %s."
-msgstr "Nie udało się ustawić wejścia %d urządzenia %s."
-
-#, c-format
msgid ""
"Failed to get current output on device '%s'. May be it is a radio device"
msgstr ""
msgid "Failed to set output %d on device %s."
msgstr "Nie udało się ustawić wyjścia %d urządzenia %s."
-#, c-format
-msgid "Could not enqueue buffers in device '%s'."
-msgstr "Nie udało się skolejkować buforów urządzenia '%s'."
-
-#, c-format
-msgid "Failed trying to get video frames from device '%s'."
-msgstr "Nie udało się uzyskać klatek obrazu z urządzenia '%s'."
-
-#, c-format
-msgid "Failed after %d tries. device %s. system error: %s"
-msgstr "Niepowodzenie po %d próbach. Urządzenie %s. Błąd systemowy: %s"
-
-#, c-format
-msgid "Could not get parameters on device '%s'"
-msgstr "Nie udało się uzyskać parametrów urządzenia '%s'"
-
-msgid "Video input device did not accept new frame rate setting."
-msgstr ""
-"Urządzenie wejściowe obrazu nie przyjęło nowego ustawienia częstotliwości "
-"klatek."
-
-#, c-format
-msgid "Could not map buffers from device '%s'"
-msgstr "Nie udało się odwzorować buforów z urządzenia '%s'"
-
-#, c-format
-msgid "The driver of device '%s' does not support any known capture method."
-msgstr ""
-"Sterownik urządzenia '%s' nie obsługuje żadnej znanej metody przechwytywania."
-
msgid "Changing resolution at runtime is not yet supported."
msgstr "Zmiana rozdzielczości w czasie działania nie jest jeszcze obsługiwana."
msgid "Cannot operate without a clock"
msgstr "Nie można pracować bez zegara"
+
+#~ msgid "Error reading %d bytes on device '%s'."
+#~ msgstr "Błąd odczytu %d bajtów z urządzenia '%s'."
+
+#~ msgid "Could not enqueue buffers in device '%s'."
+#~ msgstr "Nie udało się skolejkować buforów urządzenia '%s'."
+
+#~ msgid "Could not establish connection to sound server"
+#~ msgstr "Nie udało się nawiązać połączenia z serwerem dźwięku"
+
+#~ msgid "Failed to query sound server capabilities"
+#~ msgstr "Nie udało się odpytać o możliwości serwera dźwięku"
msgstr ""
"Project-Id-Version: gst-plugins-good 0.10.26.2\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-05-10 09:38+0100\n"
+"POT-Creation-Date: 2011-11-26 13:52+0000\n"
"PO-Revision-Date: 2011-01-08 01:28-0300\n"
"Last-Translator: Fabrício Godoy <skarllot@gmail.com>\n"
"Language-Team: Brazilian Portuguese <ldp-br@bazar.conectiva.com.br>\n"
"Content-Transfer-Encoding: 8bit\n"
"Plural-Forms: nplurals=2; plural=(n > 1);\n"
-msgid "Could not establish connection to sound server"
-msgstr "Não foi possível estabelecer uma conexão com servidor de som"
-
-msgid "Failed to query sound server capabilities"
-msgstr "Falha ao examinar os recursos do servidor de som"
-
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
msgstr "\"%s\" por \"%s\""
-msgid "Internal data stream error."
-msgstr "Erro interno no fluxo de dados."
-
msgid "Failed to decode JPEG image"
msgstr "Falha ao decodificar a imagem JPEG"
+msgid "Internal data stream error."
+msgstr "Erro interno no fluxo de dados."
+
msgid "Could not connect to server"
msgstr "Não foi possível conectar ao servidor"
msgstr "Erro ao ler %d bytes do dispositivo \"%s\"."
#, c-format
+msgid "Failed to enumerate possible video formats device '%s' can work with"
+msgstr ""
+
+#, c-format
+msgid "Could not map buffers from device '%s'"
+msgstr "Não foi possível mapear buffers do dispositivo \"%s\""
+
+#, fuzzy, c-format
+msgid "The driver of device '%s' does not support the IO method %d"
+msgstr ""
+"O driver do dispositivo \"%s\" não tem suporte a nenhum método conhecido de "
+"captura."
+
+#, fuzzy, c-format
+msgid "The driver of device '%s' does not support any known IO method."
+msgstr ""
+"O driver do dispositivo \"%s\" não tem suporte a nenhum método conhecido de "
+"captura."
+
+#, fuzzy, c-format
+msgid "Device '%s' does not support video capture"
+msgstr "O dispositivo \"%s\" não é um dispositivo de saída."
+
+#, fuzzy, c-format
+msgid "Device '%s' is busy"
+msgstr "O dispositivo \"%s\" não é um dispositivo de saída."
+
+#, fuzzy, c-format
+msgid "Device '%s' cannot capture at %dx%d"
+msgstr "O dispositivo \"%s\" não é um dispositivo de captura."
+
+#, fuzzy, c-format
+msgid "Device '%s' cannot capture in the specified format"
+msgstr "O dispositivo \"%s\" não é um dispositivo de captura."
+
+#, c-format
+msgid "Could not get parameters on device '%s'"
+msgstr "Não foi possível obter os parâmetros no dispositivo \"%s\""
+
+#, fuzzy
+msgid "Video device did not accept new frame rate setting."
+msgstr ""
+"A entrada do dispositivo de vídeo não aceita definir uma nova taxa de "
+"quadros."
+
+msgid "Video device could not create buffer pool."
+msgstr ""
+
+#, c-format
msgid "Got unexpected frame size of %u instead of %u."
msgstr "Foi obtido um tamanho de quadro inesperado de %u, ao invés de %u."
#, c-format
-msgid "Error reading %d bytes on device '%s'."
-msgstr "Erro ao ler %d bytes no dispositivo \"%s\"."
+msgid "Failed trying to get video frames from device '%s'."
+msgstr "Falha ao tentar obter os quadros de vídeo do dispositivo \"%s\"."
+
+#, c-format
+msgid "Failed after %d tries. device %s. system error: %s"
+msgstr "Falha após %d tentativas. Dispositivo %s. Erro do sistema: %s"
+
+#, fuzzy, c-format
+msgid "Failed to get settings of tuner %d on device '%s'."
+msgstr "Falha ao obter configurações do sintonizador %d no dispositivo \"%s\"."
+
+#, fuzzy, c-format
+msgid "Error getting capabilities for device '%s'."
+msgstr "Erro ao ler %d bytes do dispositivo \"%s\"."
+
+#, fuzzy, c-format
+msgid "Device '%s' is not a tuner."
+msgstr "O dispositivo \"%s\" não é um dispositivo de saída."
+
+#, fuzzy, c-format
+msgid "Failed to get radio input on device '%s'. "
+msgstr "Falha ao definir a entrada %d no dispositivo %s."
+
+#, c-format
+msgid "Failed to set input %d on device %s."
+msgstr "Falha ao definir a entrada %d no dispositivo %s."
+
+#, fuzzy, c-format
+msgid "Failed to change mute state for device '%s'."
+msgstr "Falha ao obter a força do sinal para o dispositivo \"%s\"."
#, c-format
msgid ""
"dispositivo de rádio"
#, c-format
-msgid "Failed to set input %d on device %s."
-msgstr "Falha ao definir a entrada %d no dispositivo %s."
-
-#, c-format
msgid ""
"Failed to get current output on device '%s'. May be it is a radio device"
msgstr ""
msgid "Failed to set output %d on device %s."
msgstr "Falha ao definir a saída %d no dispositivo %s."
-#, c-format
-msgid "Could not enqueue buffers in device '%s'."
-msgstr "Não possível adicionar buffers à fila no dispositivo \"%s\"."
-
-#, c-format
-msgid "Failed trying to get video frames from device '%s'."
-msgstr "Falha ao tentar obter os quadros de vídeo do dispositivo \"%s\"."
-
-#, c-format
-msgid "Failed after %d tries. device %s. system error: %s"
-msgstr "Falha após %d tentativas. Dispositivo %s. Erro do sistema: %s"
-
-#, c-format
-msgid "Could not get parameters on device '%s'"
-msgstr "Não foi possível obter os parâmetros no dispositivo \"%s\""
-
-msgid "Video input device did not accept new frame rate setting."
-msgstr ""
-"A entrada do dispositivo de vídeo não aceita definir uma nova taxa de "
-"quadros."
-
-#, c-format
-msgid "Could not map buffers from device '%s'"
-msgstr "Não foi possível mapear buffers do dispositivo \"%s\""
-
-#, c-format
-msgid "The driver of device '%s' does not support any known capture method."
-msgstr ""
-"O driver do dispositivo \"%s\" não tem suporte a nenhum método conhecido de "
-"captura."
-
msgid "Changing resolution at runtime is not yet supported."
msgstr "Ainda não há suporte a mudança de resolução enquanto está executando."
msgid "Cannot operate without a clock"
msgstr "Não é possível operar sem um temporizador"
+
+#~ msgid "Error reading %d bytes on device '%s'."
+#~ msgstr "Erro ao ler %d bytes no dispositivo \"%s\"."
+
+#~ msgid "Could not enqueue buffers in device '%s'."
+#~ msgstr "Não possível adicionar buffers à fila no dispositivo \"%s\"."
+
+#~ msgid "Could not establish connection to sound server"
+#~ msgstr "Não foi possível estabelecer uma conexão com servidor de som"
+
+#~ msgid "Failed to query sound server capabilities"
+#~ msgstr "Falha ao examinar os recursos do servidor de som"
msgstr ""
"Project-Id-Version: gst-plugins-good 0.10.23.2\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-05-10 09:38+0100\n"
+"POT-Creation-Date: 2011-11-26 13:52+0000\n"
"PO-Revision-Date: 2010-08-16 03:22+0300\n"
"Last-Translator: Lucian Adrian Grijincu <lucian.grijincu@gmail.com>\n"
"Language-Team: Romanian <translation-team-ro@lists.sourceforge.net>\n"
"X-Generator: Virtaal 0.6.1\n"
"X-Launchpad-Export-Date: 2010-08-16 00:08+0000\n"
-msgid "Could not establish connection to sound server"
-msgstr "Nu se poate stabili o conexiune la serverul de sunet"
-
-msgid "Failed to query sound server capabilities"
-msgstr "Interogarea capabilităților serverului de sunet a eșuat"
-
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
msgstr "„%s” de „%s”"
-msgid "Internal data stream error."
-msgstr "Eroare internă a fluxului de date."
-
msgid "Failed to decode JPEG image"
msgstr "Nu s-a putut decoda imaginea JPEG"
+msgid "Internal data stream error."
+msgstr "Eroare internă a fluxului de date."
+
msgid "Could not connect to server"
msgstr "Nu se poate stabili o conexiune la server"
msgstr "Eroare citire %d octeți de la dispozitivul „%s”."
#, c-format
+msgid "Failed to enumerate possible video formats device '%s' can work with"
+msgstr ""
+
+#, c-format
+msgid "Could not map buffers from device '%s'"
+msgstr "Nu s-a putut mapa memoria tampon din dispozitivul „%s”"
+
+#, fuzzy, c-format
+msgid "The driver of device '%s' does not support the IO method %d"
+msgstr ""
+"Driverul pentru dispozitivul „%s” nu suport nici o metodă de captură "
+"cunoscută."
+
+#, fuzzy, c-format
+msgid "The driver of device '%s' does not support any known IO method."
+msgstr ""
+"Driverul pentru dispozitivul „%s” nu suport nici o metodă de captură "
+"cunoscută."
+
+#, fuzzy, c-format
+msgid "Device '%s' does not support video capture"
+msgstr "Dispozitivul „%s” nu este un dispozitiv de ieșire."
+
+#, fuzzy, c-format
+msgid "Device '%s' is busy"
+msgstr "Dispozitivul „%s” nu este un dispozitiv de ieșire."
+
+#, fuzzy, c-format
+msgid "Device '%s' cannot capture at %dx%d"
+msgstr "Dispozitivul „%s” nu este un dispozitiv de captură."
+
+#, fuzzy, c-format
+msgid "Device '%s' cannot capture in the specified format"
+msgstr "Dispozitivul „%s” nu este un dispozitiv de captură."
+
+#, c-format
+msgid "Could not get parameters on device '%s'"
+msgstr "Nu s-au putut obține parametrii pentru dispozitivul „%s”"
+
+#, fuzzy
+msgid "Video device did not accept new frame rate setting."
+msgstr ""
+"Dispozitivul de intrare video nu a acceptat noua configurare de rate cadre."
+
+msgid "Video device could not create buffer pool."
+msgstr ""
+
+#, c-format
msgid "Got unexpected frame size of %u instead of %u."
msgstr "S-a obținut o dimensiune neașteptată pentru cadru, %u în loc de %u."
#, c-format
-msgid "Error reading %d bytes on device '%s'."
-msgstr "Eroare citire %d octeți pe dispozitivul „%s”."
+msgid "Failed trying to get video frames from device '%s'."
+msgstr "Obținerea cadrelor video pentru dispozitivul „%s” a eșuat."
+
+#, c-format
+msgid "Failed after %d tries. device %s. system error: %s"
+msgstr "A eșuat după %d încercări. dispozitiv %s. eroare sistem: %s"
+
+#, fuzzy, c-format
+msgid "Failed to get settings of tuner %d on device '%s'."
+msgstr ""
+"A eșuat obținerea configurării receptorului %d pentru dispozitivul „%s”."
+
+#, fuzzy, c-format
+msgid "Error getting capabilities for device '%s'."
+msgstr "Eroare citire %d octeți de la dispozitivul „%s”."
+
+#, fuzzy, c-format
+msgid "Device '%s' is not a tuner."
+msgstr "Dispozitivul „%s” nu este un dispozitiv de ieșire."
+
+#, fuzzy, c-format
+msgid "Failed to get radio input on device '%s'. "
+msgstr "Definirea valorii %d pentru dispozitivul „%s” a eșuat."
+
+#, c-format
+msgid "Failed to set input %d on device %s."
+msgstr "Definirea valorii %d pentru dispozitivul „%s” a eșuat."
+
+#, fuzzy, c-format
+msgid "Failed to change mute state for device '%s'."
+msgstr "Obținerea nivelului semnalului pentru dispozitivul „%s” a eșuat."
#, c-format
msgid ""
"Obținerea intrări curente pentru dispozitivul „%s” a eșuat. Posibil să fie "
"un dispozitiv radio."
-#, c-format
-msgid "Failed to set input %d on device %s."
-msgstr "Definirea valorii %d pentru dispozitivul „%s” a eșuat."
-
#, fuzzy, c-format
msgid ""
"Failed to get current output on device '%s'. May be it is a radio device"
msgid "Failed to set output %d on device %s."
msgstr "Definirea valorii %d pentru dispozitivul „%s” a eșuat."
-#, c-format
-msgid "Could not enqueue buffers in device '%s'."
-msgstr "Nu s-a putut programa memoria tampon în dispozitivul „%s”."
-
-#, c-format
-msgid "Failed trying to get video frames from device '%s'."
-msgstr "Obținerea cadrelor video pentru dispozitivul „%s” a eșuat."
-
-#, c-format
-msgid "Failed after %d tries. device %s. system error: %s"
-msgstr "A eșuat după %d încercări. dispozitiv %s. eroare sistem: %s"
-
-#, c-format
-msgid "Could not get parameters on device '%s'"
-msgstr "Nu s-au putut obține parametrii pentru dispozitivul „%s”"
-
-msgid "Video input device did not accept new frame rate setting."
-msgstr ""
-"Dispozitivul de intrare video nu a acceptat noua configurare de rate cadre."
-
-#, c-format
-msgid "Could not map buffers from device '%s'"
-msgstr "Nu s-a putut mapa memoria tampon din dispozitivul „%s”"
-
-#, c-format
-msgid "The driver of device '%s' does not support any known capture method."
-msgstr ""
-"Driverul pentru dispozitivul „%s” nu suport nici o metodă de captură "
-"cunoscută."
-
msgid "Changing resolution at runtime is not yet supported."
msgstr "Schimbarea rezoluției pe durata rulării nu este incă suportată."
msgid "Cannot operate without a clock"
msgstr "Nu se poate opera fără un ceas"
+
+#~ msgid "Error reading %d bytes on device '%s'."
+#~ msgstr "Eroare citire %d octeți pe dispozitivul „%s”."
+
+#~ msgid "Could not enqueue buffers in device '%s'."
+#~ msgstr "Nu s-a putut programa memoria tampon în dispozitivul „%s”."
+
+#~ msgid "Could not establish connection to sound server"
+#~ msgstr "Nu se poate stabili o conexiune la serverul de sunet"
+
+#~ msgid "Failed to query sound server capabilities"
+#~ msgstr "Interogarea capabilităților serverului de sunet a eșuat"
msgstr ""
"Project-Id-Version: gst-plugins-good 0.10.28.2\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-05-10 09:38+0100\n"
+"POT-Creation-Date: 2011-11-26 13:52+0000\n"
"PO-Revision-Date: 2011-04-26 21:13+0400\n"
"Last-Translator: Yuri Kozlov <yuray@komyakino.ru>\n"
"Language-Team: Russian <gnu@mx.ru>\n"
"Plural-Forms: nplurals=3; plural=(n%10==1 && n%100!=11 ? 0 : n%10>=2 && n"
"%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2);\n"
-msgid "Could not establish connection to sound server"
-msgstr "Не удалось установить соединение с сервером звука"
-
-msgid "Failed to query sound server capabilities"
-msgstr "Ошибка при запросе возможностей сервера звука"
-
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
msgstr "«%s» исполняет «%s»"
-msgid "Internal data stream error."
-msgstr "Внутренняя ошибка потока данных."
-
msgid "Failed to decode JPEG image"
msgstr "Не удалось декодировать JPEG-изображение"
+msgid "Internal data stream error."
+msgstr "Внутренняя ошибка потока данных."
+
msgid "Could not connect to server"
msgstr "Не удалось соединиться с сервером"
msgstr "Ошибка чтения %d байт из устройства «%s»."
#, c-format
+msgid "Failed to enumerate possible video formats device '%s' can work with"
+msgstr ""
+"Не удалось создать список видео-форматов, с которыми может работать "
+"устройство «%s»"
+
+#, c-format
+msgid "Could not map buffers from device '%s'"
+msgstr "Не удалось распределить буферы устройства «%s»"
+
+#, fuzzy, c-format
+msgid "The driver of device '%s' does not support the IO method %d"
+msgstr ""
+"Драйвер устройства «%s» не поддерживает ни один из известных методов захвата."
+
+#, fuzzy, c-format
+msgid "The driver of device '%s' does not support any known IO method."
+msgstr ""
+"Драйвер устройства «%s» не поддерживает ни один из известных методов захвата."
+
+#, fuzzy, c-format
+msgid "Device '%s' does not support video capture"
+msgstr "Устройство «%s» не является устройством вывода."
+
+#, fuzzy, c-format
+msgid "Device '%s' is busy"
+msgstr "Устройство «%s» не является устройством вывода."
+
+#, c-format
+msgid "Device '%s' cannot capture at %dx%d"
+msgstr "Устройство «%s» не может осуществлять захват в разрешении %dx%d"
+
+#, c-format
+msgid "Device '%s' cannot capture in the specified format"
+msgstr "Устройство «%s» не может осуществлять захват в указанном формате"
+
+#, c-format
+msgid "Could not get parameters on device '%s'"
+msgstr "Не удалось получить параметры устройства «%s»"
+
+#, fuzzy
+msgid "Video device did not accept new frame rate setting."
+msgstr "Устройство видео-захвата не приняло новый параметр кадровой частоты."
+
+msgid "Video device could not create buffer pool."
+msgstr ""
+
+#, c-format
msgid "Got unexpected frame size of %u instead of %u."
msgstr "Получен неожиданный размер кадра: %u вместо %u."
#, c-format
-msgid "Error reading %d bytes on device '%s'."
+msgid "Failed trying to get video frames from device '%s'."
+msgstr "Ошибка при попытке получения кадров видео с устройства «%s»."
+
+#, c-format
+msgid "Failed after %d tries. device %s. system error: %s"
+msgstr "Ошибка после %d попыток. Устройство: %s. Системная ошибка: %s"
+
+#, fuzzy, c-format
+msgid "Failed to get settings of tuner %d on device '%s'."
+msgstr "Ошибка получения установок тюнера %d устройства «%s»."
+
+#, fuzzy, c-format
+msgid "Error getting capabilities for device '%s'."
msgstr "Ошибка чтения %d байт из устройства «%s»."
+#, fuzzy, c-format
+msgid "Device '%s' is not a tuner."
+msgstr "Устройство «%s» не является устройством вывода."
+
+#, fuzzy, c-format
+msgid "Failed to get radio input on device '%s'. "
+msgstr "Не удалось выбрать вход %d для устройства %s."
+
+#, c-format
+msgid "Failed to set input %d on device %s."
+msgstr "Не удалось выбрать вход %d для устройства %s."
+
+#, fuzzy, c-format
+msgid "Failed to change mute state for device '%s'."
+msgstr "Ошибка получения мощности сигнала для устройства «%s»."
+
#, c-format
msgid ""
"Error getting capabilities for device '%s': It isn't a v4l2 driver. Check if "
"устройство"
#, c-format
-msgid "Failed to set input %d on device %s."
-msgstr "Не удалось выбрать вход %d для устройства %s."
-
-#, c-format
msgid ""
"Failed to get current output on device '%s'. May be it is a radio device"
msgstr ""
msgid "Failed to set output %d on device %s."
msgstr "Не удалось выбрать выход %d для устройства %s."
-#, c-format
-msgid "Could not enqueue buffers in device '%s'."
-msgstr "Не удалось добавить в очередь буферы устройства «%s»"
-
-#, c-format
-msgid "Failed trying to get video frames from device '%s'."
-msgstr "Ошибка при попытке получения кадров видео с устройства «%s»."
-
-#, c-format
-msgid "Failed after %d tries. device %s. system error: %s"
-msgstr "Ошибка после %d попыток. Устройство: %s. Системная ошибка: %s"
-
-#, c-format
-msgid "Could not get parameters on device '%s'"
-msgstr "Не удалось получить параметры устройства «%s»"
-
-msgid "Video input device did not accept new frame rate setting."
-msgstr "Устройство видео-захвата не приняло новый параметр кадровой частоты."
-
-#, c-format
-msgid "Could not map buffers from device '%s'"
-msgstr "Не удалось распределить буферы устройства «%s»"
-
-#, c-format
-msgid "The driver of device '%s' does not support any known capture method."
-msgstr ""
-"Драйвер устройства «%s» не поддерживает ни один из известных методов захвата."
-
msgid "Changing resolution at runtime is not yet supported."
msgstr ""
"Изменение разрешения видео во время выполнения не поддерживается в настоящий "
msgid "Cannot operate without a clock"
msgstr "Операция невозможна без часов"
-#~ msgid "Failed to enumerate possible video formats device '%s' can work with"
-#~ msgstr ""
-#~ "Не удалось создать список видео-форматов, с которыми может работать "
-#~ "устройство «%s»"
+#~ msgid "Error reading %d bytes on device '%s'."
+#~ msgstr "Ошибка чтения %d байт из устройства «%s»."
+
+#~ msgid "Could not enqueue buffers in device '%s'."
+#~ msgstr "Не удалось добавить в очередь буферы устройства «%s»"
+
+#~ msgid "Could not establish connection to sound server"
+#~ msgstr "Не удалось установить соединение с сервером звука"
+
+#~ msgid "Failed to query sound server capabilities"
+#~ msgstr "Ошибка при запросе возможностей сервера звука"
#~ msgid ""
#~ "The buffer type is not supported, or the index is out of bounds, or no "
#~ msgid "No free buffers found in the pool at index %d."
#~ msgstr "В пуле не найдено свободных буферов для индекса %d."
-#~ msgid "Device '%s' cannot capture at %dx%d"
-#~ msgstr "Устройство «%s» не может осуществлять захват в разрешении %dx%d"
-
-#~ msgid "Device '%s' cannot capture in the specified format"
-#~ msgstr "Устройство «%s» не может осуществлять захват в указанном формате"
-
#~ msgid "Could not get buffers from device '%s'."
#~ msgstr "Не удалось получить буферы устройства «%s»"
msgstr ""
"Project-Id-Version: gst-plugins-good 0.10.25.3\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-05-10 09:38+0100\n"
+"POT-Creation-Date: 2011-11-26 13:52+0000\n"
"PO-Revision-Date: 2010-11-08 15:48+0100\n"
"Last-Translator: Peter Tuhársky <tuharsky@misbb.sk>\n"
"Language-Team: Slovak <sk-i18n@lists.linux.sk>\n"
"X-Generator: KBabel 1.11.4\n"
"X-Poedit-Country: SLOVAKIA\n"
-msgid "Could not establish connection to sound server"
-msgstr "Nepodarilo sa nadviazať spojenie so zvukovým serverom"
-
-msgid "Failed to query sound server capabilities"
-msgstr "Nepodarilo sa zistiť schopnosti zvukového servera"
-
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
msgstr "'%s' od '%s'"
-msgid "Internal data stream error."
-msgstr "Vnútorná chyba prúdu údajov."
-
msgid "Failed to decode JPEG image"
msgstr "Nepodarilo sa dekódovať obrázok JPEG"
+msgid "Internal data stream error."
+msgstr "Vnútorná chyba prúdu údajov."
+
msgid "Could not connect to server"
msgstr "Nepodarilo sa pripojiť k serveru"
msgstr "Chyba pri čítaní %d bajtov na zariadení '%s'."
#, c-format
+msgid "Failed to enumerate possible video formats device '%s' can work with"
+msgstr ""
+"Nepodarilo sa zistiť možné video formáty, '%s' s ktorými vie zariadenie "
+"pracovať"
+
+#, c-format
+msgid "Could not map buffers from device '%s'"
+msgstr "Nepodarilo sa namapovať vyrovnávaciu pamäť zariadenia '%s'"
+
+#, fuzzy, c-format
+msgid "The driver of device '%s' does not support the IO method %d"
+msgstr ""
+"Tento ovládač zariadenia '%s' nepodporuje žiadnu známu metódu nahrávania."
+
+#, fuzzy, c-format
+msgid "The driver of device '%s' does not support any known IO method."
+msgstr ""
+"Tento ovládač zariadenia '%s' nepodporuje žiadnu známu metódu nahrávania."
+
+#, fuzzy, c-format
+msgid "Device '%s' does not support video capture"
+msgstr "Zariadenie '%s' nie je určené pre výstup."
+
+#, fuzzy, c-format
+msgid "Device '%s' is busy"
+msgstr "Zariadenie '%s' nie je určené pre výstup."
+
+#, c-format
+msgid "Device '%s' cannot capture at %dx%d"
+msgstr "Zariadenie '%s' nevie zachytávať na %dx%d"
+
+#, c-format
+msgid "Device '%s' cannot capture in the specified format"
+msgstr "Zariadenie '%s' nedokáže zachytávať v uvedenom formáte"
+
+#, c-format
+msgid "Could not get parameters on device '%s'"
+msgstr "Nepodarilo sa získať parametre zariadenia '%s'"
+
+#, fuzzy
+msgid "Video device did not accept new frame rate setting."
+msgstr ""
+"Zariadenie video vstupu neakceptovalo nové nastavenie frekvencie snímok."
+
+msgid "Video device could not create buffer pool."
+msgstr ""
+
+#, c-format
msgid "Got unexpected frame size of %u instead of %u."
msgstr "Nastala neočakávaná veľkosť snímky %u namiesto %u."
#, c-format
-msgid "Error reading %d bytes on device '%s'."
+msgid "Failed trying to get video frames from device '%s'."
+msgstr "Nepodarilo sa získať videosnímky zo zariadenia '%s'."
+
+#, c-format
+msgid "Failed after %d tries. device %s. system error: %s"
+msgstr "Nepodarilo sa po %d pokusoch. Zariadenie %s. Systémová chyba: %s"
+
+#, fuzzy, c-format
+msgid "Failed to get settings of tuner %d on device '%s'."
+msgstr "Nepodarilo sa získať nastavenie prijímača %d od zariadenia '%s'."
+
+#, fuzzy, c-format
+msgid "Error getting capabilities for device '%s'."
msgstr "Chyba pri čítaní %d bajtov na zariadení '%s'."
+#, fuzzy, c-format
+msgid "Device '%s' is not a tuner."
+msgstr "Zariadenie '%s' nie je určené pre výstup."
+
+#, fuzzy, c-format
+msgid "Failed to get radio input on device '%s'. "
+msgstr "Nepodarilo sa nastaviť vstup %d na zariadení %s."
+
+#, c-format
+msgid "Failed to set input %d on device %s."
+msgstr "Nepodarilo sa nastaviť vstup %d na zariadení %s."
+
+#, fuzzy, c-format
+msgid "Failed to change mute state for device '%s'."
+msgstr "Nepodarilo sa zistiť silu signálu pre zariadenie '%s'."
+
#, c-format
msgid ""
"Error getting capabilities for device '%s': It isn't a v4l2 driver. Check if "
msgstr ""
"Nepodarilo sa zistiť súčasný vstup na zariadení '%s'. Možno je to rádio."
-#, c-format
-msgid "Failed to set input %d on device %s."
-msgstr "Nepodarilo sa nastaviť vstup %d na zariadení %s."
-
#, fuzzy, c-format
msgid ""
"Failed to get current output on device '%s'. May be it is a radio device"
msgid "Failed to set output %d on device %s."
msgstr "Nepodarilo sa nastaviť vstup %d na zariadení %s."
-#, c-format
-msgid "Could not enqueue buffers in device '%s'."
-msgstr "Nepodarilo sa zaradiť vyrovnávaciu pamäť na zariadení '%s'."
-
-#, c-format
-msgid "Failed trying to get video frames from device '%s'."
-msgstr "Nepodarilo sa získať videosnímky zo zariadenia '%s'."
-
-#, c-format
-msgid "Failed after %d tries. device %s. system error: %s"
-msgstr "Nepodarilo sa po %d pokusoch. Zariadenie %s. Systémová chyba: %s"
-
-#, c-format
-msgid "Could not get parameters on device '%s'"
-msgstr "Nepodarilo sa získať parametre zariadenia '%s'"
-
-msgid "Video input device did not accept new frame rate setting."
-msgstr ""
-"Zariadenie video vstupu neakceptovalo nové nastavenie frekvencie snímok."
-
-#, c-format
-msgid "Could not map buffers from device '%s'"
-msgstr "Nepodarilo sa namapovať vyrovnávaciu pamäť zariadenia '%s'"
-
-#, c-format
-msgid "The driver of device '%s' does not support any known capture method."
-msgstr ""
-"Tento ovládač zariadenia '%s' nepodporuje žiadnu známu metódu nahrávania."
-
msgid "Changing resolution at runtime is not yet supported."
msgstr "Zmena rozlíšenia za chodu nie je zatiaľ podporovaná."
msgid "Cannot operate without a clock"
msgstr "Nemôžem fungovať bez hodín"
+#~ msgid "Error reading %d bytes on device '%s'."
+#~ msgstr "Chyba pri čítaní %d bajtov na zariadení '%s'."
+
+#~ msgid "Could not enqueue buffers in device '%s'."
+#~ msgstr "Nepodarilo sa zaradiť vyrovnávaciu pamäť na zariadení '%s'."
+
+#~ msgid "Could not establish connection to sound server"
+#~ msgstr "Nepodarilo sa nadviazať spojenie so zvukovým serverom"
+
+#~ msgid "Failed to query sound server capabilities"
+#~ msgstr "Nepodarilo sa zistiť schopnosti zvukového servera"
+
#~ msgid "Failed getting controls attributes on device '%s.'"
#~ msgstr "Nepodarilo sa získať atribúty ovládacích prvkov na zariadení '%s.'"
-#~ msgid "Failed to enumerate possible video formats device '%s' can work with"
-#~ msgstr ""
-#~ "Nepodarilo sa zistiť možné video formáty, '%s' s ktorými vie zariadenie "
-#~ "pracovať"
-
#~ msgid ""
#~ "The buffer type is not supported, or the index is out of bounds, or no "
#~ "buffers have been allocated yet, or the userptr or length are invalid. "
#~ msgstr ""
#~ "Nenašli sa žiadne voľné oblasti vyrovnávacei pamäte v bloku na indexe %d."
-#~ msgid "Device '%s' cannot capture at %dx%d"
-#~ msgstr "Zariadenie '%s' nevie zachytávať na %dx%d"
-
-#~ msgid "Device '%s' cannot capture in the specified format"
-#~ msgstr "Zariadenie '%s' nedokáže zachytávať v uvedenom formáte"
-
#~ msgid "Could not get buffers from device '%s'."
#~ msgstr "Nepodarilo sa získať vyrovnávaciu pamäť od zariadenia '%s'."
msgstr ""
"Project-Id-Version: gst-plugins-good 0.10.26.2\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-05-10 09:38+0100\n"
+"POT-Creation-Date: 2011-11-26 13:52+0000\n"
"PO-Revision-Date: 2011-03-12 18:35+0100\n"
"Last-Translator: Klemen Košir <klemen.kosir@gmx.com>\n"
"Language-Team: Slovenian <translation-team-sl@lists.sourceforge.net>\n"
"X-Poedit-Country: SLOVENIA\n"
"X-Poedit-SourceCharset: utf-8\n"
-msgid "Could not establish connection to sound server"
-msgstr "Povezave z zvočnim strežnikom ni mogoče vzpostaviti"
-
-msgid "Failed to query sound server capabilities"
-msgstr "Poizvedba o zmogljivosti zvočnega strežnika je spodletela"
-
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
msgstr "'%s' - '%s'"
-msgid "Internal data stream error."
-msgstr "Notranja napaka pretoka podatkov."
-
msgid "Failed to decode JPEG image"
msgstr "Odkodiranje slike JPEG je spodletelo"
+msgid "Internal data stream error."
+msgstr "Notranja napaka pretoka podatkov."
+
msgid "Could not connect to server"
msgstr "S strežnikom se ni mogoče povezati"
msgstr "Napaka med branjem %d bajtov iz naprave '%s'."
#, c-format
+msgid "Failed to enumerate possible video formats device '%s' can work with"
+msgstr ""
+
+#, c-format
+msgid "Could not map buffers from device '%s'"
+msgstr "Medpomnilnika naprave '%s' ni mogoče preslikati"
+
+#, fuzzy, c-format
+msgid "The driver of device '%s' does not support the IO method %d"
+msgstr "Gonilnik naprave '%s' ne podpira nobenega znanega načina zajemanja."
+
+#, fuzzy, c-format
+msgid "The driver of device '%s' does not support any known IO method."
+msgstr "Gonilnik naprave '%s' ne podpira nobenega znanega načina zajemanja."
+
+#, fuzzy, c-format
+msgid "Device '%s' does not support video capture"
+msgstr "Naprava '%s' ni izhodna naprava."
+
+#, fuzzy, c-format
+msgid "Device '%s' is busy"
+msgstr "Naprava '%s' ni izhodna naprava."
+
+#, fuzzy, c-format
+msgid "Device '%s' cannot capture at %dx%d"
+msgstr "Naprava '%s' ni naprava za zajemanje."
+
+#, fuzzy, c-format
+msgid "Device '%s' cannot capture in the specified format"
+msgstr "Naprava '%s' ni naprava za zajemanje."
+
+#, c-format
+msgid "Could not get parameters on device '%s'"
+msgstr "Parametrov na napravi '%s' ni mogoče pridobiti."
+
+#, fuzzy
+msgid "Video device did not accept new frame rate setting."
+msgstr ""
+"Vhodni video napravi ni mogoče določiti novih nastavitev hitrosti sličic."
+
+msgid "Video device could not create buffer pool."
+msgstr ""
+
+#, c-format
msgid "Got unexpected frame size of %u instead of %u."
msgstr ""
"Nepričakovana vrednost velikosti okvirja (%u). Pričakovana vrednost je %u."
#, c-format
-msgid "Error reading %d bytes on device '%s'."
-msgstr "Napaka med branjem %d bajtov na napravi '%s'."
+msgid "Failed trying to get video frames from device '%s'."
+msgstr "Napaka med poskusom pridobivanja video sličic z naprave '%s'."
+
+#, c-format
+msgid "Failed after %d tries. device %s. system error: %s"
+msgstr ""
+"Neuspešen zagon po %d poskusih na napravi %s. Sporočilo sistemske napake: %s"
+
+#, fuzzy, c-format
+msgid "Failed to get settings of tuner %d on device '%s'."
+msgstr "Napaka med pridobivanjem nastavitev uglaševalnika %d naprave '%s'."
+
+#, fuzzy, c-format
+msgid "Error getting capabilities for device '%s'."
+msgstr "Napaka med branjem %d bajtov iz naprave '%s'."
+
+#, fuzzy, c-format
+msgid "Device '%s' is not a tuner."
+msgstr "Naprava '%s' ni izhodna naprava."
+
+#, fuzzy, c-format
+msgid "Failed to get radio input on device '%s'. "
+msgstr "Napaka med nastavljanjem vhoda %d na napravi %s."
+
+#, c-format
+msgid "Failed to set input %d on device %s."
+msgstr "Napaka med nastavljanjem vhoda %d na napravi %s."
+
+#, fuzzy, c-format
+msgid "Failed to change mute state for device '%s'."
+msgstr "Napaka med pridobivanjem moči signala za napravo '%s'."
#, c-format
msgid ""
"naprava."
#, c-format
-msgid "Failed to set input %d on device %s."
-msgstr "Napaka med nastavljanjem vhoda %d na napravi %s."
-
-#, c-format
msgid ""
"Failed to get current output on device '%s'. May be it is a radio device"
msgstr ""
msgid "Failed to set output %d on device %s."
msgstr "Napaka med nastavljanjem izhoda %d na napravi %s."
-#, c-format
-msgid "Could not enqueue buffers in device '%s'."
-msgstr "Medpomnilnika na napravi '%s' ni mogoče uvrstiti."
-
-#, c-format
-msgid "Failed trying to get video frames from device '%s'."
-msgstr "Napaka med poskusom pridobivanja video sličic z naprave '%s'."
-
-#, c-format
-msgid "Failed after %d tries. device %s. system error: %s"
-msgstr ""
-"Neuspešen zagon po %d poskusih na napravi %s. Sporočilo sistemske napake: %s"
-
-#, c-format
-msgid "Could not get parameters on device '%s'"
-msgstr "Parametrov na napravi '%s' ni mogoče pridobiti."
-
-msgid "Video input device did not accept new frame rate setting."
-msgstr ""
-"Vhodni video napravi ni mogoče določiti novih nastavitev hitrosti sličic."
-
-#, c-format
-msgid "Could not map buffers from device '%s'"
-msgstr "Medpomnilnika naprave '%s' ni mogoče preslikati"
-
-#, c-format
-msgid "The driver of device '%s' does not support any known capture method."
-msgstr "Gonilnik naprave '%s' ne podpira nobenega znanega načina zajemanja."
-
msgid "Changing resolution at runtime is not yet supported."
msgstr "Spreminjanje ločljivosti med delovanjem še ni podprto."
msgid "Cannot operate without a clock"
msgstr "Delovanje brez ure ni mogoče"
+#~ msgid "Error reading %d bytes on device '%s'."
+#~ msgstr "Napaka med branjem %d bajtov na napravi '%s'."
+
+#~ msgid "Could not enqueue buffers in device '%s'."
+#~ msgstr "Medpomnilnika na napravi '%s' ni mogoče uvrstiti."
+
+#~ msgid "Could not establish connection to sound server"
+#~ msgstr "Povezave z zvočnim strežnikom ni mogoče vzpostaviti"
+
+#~ msgid "Failed to query sound server capabilities"
+#~ msgstr "Poizvedba o zmogljivosti zvočnega strežnika je spodletela"
+
#~ msgid "Describes the selected input element."
#~ msgstr "Opisuje izbran vnosni predmet."
msgstr ""
"Project-Id-Version: gst-plugins 0.8.3\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-05-10 09:38+0100\n"
+"POT-Creation-Date: 2011-11-26 13:52+0000\n"
"PO-Revision-Date: 2004-08-07 20:29+0200\n"
"Last-Translator: Laurent Dhima <laurenti@alblinux.net>\n"
"Language-Team: Albanian <begraj@hotmail.com>\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
-msgid "Could not establish connection to sound server"
-msgstr ""
-
-msgid "Failed to query sound server capabilities"
-msgstr ""
-
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
msgstr ""
-msgid "Internal data stream error."
+msgid "Failed to decode JPEG image"
msgstr ""
-msgid "Failed to decode JPEG image"
+msgid "Internal data stream error."
msgstr ""
#, fuzzy
msgstr "E pamundur marrja e buffers nga dispozitivi \"%s\"."
#, c-format
+msgid "Failed to enumerate possible video formats device '%s' can work with"
+msgstr ""
+
+#, fuzzy, c-format
+msgid "Could not map buffers from device '%s'"
+msgstr "E pamundur marrja e buffers nga dispozitivi \"%s\"."
+
+#, c-format
+msgid "The driver of device '%s' does not support the IO method %d"
+msgstr ""
+
+#, c-format
+msgid "The driver of device '%s' does not support any known IO method."
+msgstr ""
+
+#, fuzzy, c-format
+msgid "Device '%s' does not support video capture"
+msgstr "Dispozitivi \"%s\" nuk është një dispozitiv marrje."
+
+#, fuzzy, c-format
+msgid "Device '%s' is busy"
+msgstr "Dispozitivi \"%s\" nuk është një dispozitiv marrje."
+
+#, fuzzy, c-format
+msgid "Device '%s' cannot capture at %dx%d"
+msgstr "Dispozitivi \"%s\" nuk është një dispozitiv marrje."
+
+#, fuzzy, c-format
+msgid "Device '%s' cannot capture in the specified format"
+msgstr "Dispozitivi \"%s\" nuk është një dispozitiv marrje."
+
+#, fuzzy, c-format
+msgid "Could not get parameters on device '%s'"
+msgstr "E pamundur marrja e buffers nga dispozitivi \"%s\"."
+
+msgid "Video device did not accept new frame rate setting."
+msgstr ""
+
+msgid "Video device could not create buffer pool."
+msgstr ""
+
+#, c-format
msgid "Got unexpected frame size of %u instead of %u."
msgstr ""
+#, fuzzy, c-format
+msgid "Failed trying to get video frames from device '%s'."
+msgstr "E pamundur marrja e buffers të mjaftueshëm nga dispozitivi \"%s\"."
+
#, c-format
-msgid "Error reading %d bytes on device '%s'."
+msgid "Failed after %d tries. device %s. system error: %s"
msgstr ""
+#, fuzzy, c-format
+msgid "Failed to get settings of tuner %d on device '%s'."
+msgstr "E pamundur marrja e buffers të mjaftueshëm nga dispozitivi \"%s\"."
+
+#, fuzzy, c-format
+msgid "Error getting capabilities for device '%s'."
+msgstr "E pamundur marrja e buffers nga dispozitivi \"%s\"."
+
+#, fuzzy, c-format
+msgid "Device '%s' is not a tuner."
+msgstr "Dispozitivi \"%s\" nuk është një dispozitiv marrje."
+
+#, fuzzy, c-format
+msgid "Failed to get radio input on device '%s'. "
+msgstr "E pamundur mbyllja e dispozitivit audio \"%s\"."
+
+#, fuzzy, c-format
+msgid "Failed to set input %d on device %s."
+msgstr "E pamundur mbyllja e dispozitivit audio \"%s\"."
+
+#, fuzzy, c-format
+msgid "Failed to change mute state for device '%s'."
+msgstr "E pamundur marrja e buffers nga dispozitivi \"%s\"."
+
#, c-format
msgid ""
"Error getting capabilities for device '%s': It isn't a v4l2 driver. Check if "
msgstr ""
#, fuzzy, c-format
-msgid "Failed to set input %d on device %s."
-msgstr "E pamundur mbyllja e dispozitivit audio \"%s\"."
-
-#, fuzzy, c-format
msgid ""
"Failed to get current output on device '%s'. May be it is a radio device"
msgstr "E pamundur marrja e buffers të mjaftueshëm nga dispozitivi \"%s\"."
msgid "Failed to set output %d on device %s."
msgstr "E pamundur mbyllja e dispozitivit audio \"%s\"."
-#, fuzzy, c-format
-msgid "Could not enqueue buffers in device '%s'."
-msgstr "E pamundur marrja e buffers nga dispozitivi \"%s\"."
-
-#, fuzzy, c-format
-msgid "Failed trying to get video frames from device '%s'."
-msgstr "E pamundur marrja e buffers të mjaftueshëm nga dispozitivi \"%s\"."
-
-#, c-format
-msgid "Failed after %d tries. device %s. system error: %s"
-msgstr ""
-
-#, fuzzy, c-format
-msgid "Could not get parameters on device '%s'"
-msgstr "E pamundur marrja e buffers nga dispozitivi \"%s\"."
-
-msgid "Video input device did not accept new frame rate setting."
-msgstr ""
-
-#, fuzzy, c-format
-msgid "Could not map buffers from device '%s'"
-msgstr "E pamundur marrja e buffers nga dispozitivi \"%s\"."
-
-#, c-format
-msgid "The driver of device '%s' does not support any known capture method."
-msgstr ""
-
msgid "Changing resolution at runtime is not yet supported."
msgstr ""
msgid "Cannot operate without a clock"
msgstr ""
+#, fuzzy
+#~ msgid "Could not enqueue buffers in device '%s'."
+#~ msgstr "E pamundur marrja e buffers nga dispozitivi \"%s\"."
+
#~ msgid ""
#~ "No usable colorspace element could be found.\n"
#~ "Please install one and restart."
# Serbian translation of gst-plugins
# Copyright (C) 2004 Free Software Foundation, Inc.
-# This file is distributed under the same license as the gst-plugins-good package.
# Danilo Segan <dsegan@gmx.net>, 2004.
-# Мирослав Николић <miroslavnikolic@rocketmail.com>, 2011.
+#
msgid ""
msgstr ""
-"Project-Id-Version: gst-plugins-good-0.10.28.2\n"
+"Project-Id-Version: gst-plugins 0.7.6\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-12-10 02:18+0000\n"
-"PO-Revision-Date: 2011-12-04 16:44+0200\n"
-"Last-Translator: Мирослав Николић <miroslavnikolic@rocketmail.com>\n"
+"POT-Creation-Date: 2011-11-26 13:52+0000\n"
+"PO-Revision-Date: 2004-03-13 00:18+0100\n"
+"Last-Translator: Danilo Segan <dsegan@gmx.net>\n"
"Language-Team: Serbian <gnu@prevod.org>\n"
"Language: sr\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
-"Plural-Forms: nplurals=3; plural=(n%10==1 && n%100!=11 ? 0 : n%10>=2 && n"
+"Plural-Forms: nplurals=3; plural=n%10==1 && n%100!=11 ? 0 : (n%10>=2 && n"
"%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2);\n"
-"X-Generator: Virtaal 0.7.0\n"
-"X-Project-Style: gnome\n"
-
-msgid "Could not establish connection to sound server"
-msgstr "Не могу да успоставим везу са сервером звука"
-
-msgid "Failed to query sound server capabilities"
-msgstr "Нисам успео да испитам могућности сервера звука"
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
-msgstr "„%s“ изводи „%s“"
-
-msgid "Internal data stream error."
-msgstr "Унутрашња грешка тока података."
+msgstr ""
msgid "Failed to decode JPEG image"
-msgstr "Нисам успео да декодирам ЈПЕГ слику"
+msgstr ""
+
+msgid "Internal data stream error."
+msgstr ""
+#, fuzzy
msgid "Could not connect to server"
-msgstr "Не могу да се повежем са сервером"
+msgstr "Не могу да затворим управљачки уређај „%s“."
msgid "Server does not support seeking."
-msgstr "Сервер не подржава позиционирање."
+msgstr ""
+#, fuzzy
msgid "Could not resolve server name."
-msgstr "Не могу да решим назив сервера."
+msgstr "Не могу да затворим управљачки уређај „%s“."
+#, fuzzy
msgid "Could not establish connection to server."
-msgstr "Не могу да успоставим везу са сервером."
+msgstr "Не могу да затворим управљачки уређај „%s“."
msgid "Secure connection setup failed."
-msgstr "Подешавање безбедне везе није успело."
+msgstr ""
msgid ""
"A network error occured, or the server closed the connection unexpectedly."
-msgstr "Дошло је до грешке на мрежи, или је сервер неочекивано затворио везу."
+msgstr ""
msgid "Server sent bad data."
-msgstr "Сервер је послао лоше податке."
+msgstr ""
msgid "No URL set."
-msgstr "Није подешена адреса."
+msgstr ""
msgid "No or invalid input audio, AVI stream will be corrupt."
-msgstr "Улазног звÑ\83ка нема или Ñ\98е неиÑ\81пÑ\80аван, Ð\90Ð\92Ð\98 Ñ\82ок Ñ\9bе биÑ\82и оÑ\88Ñ\82еÑ\9bен."
+msgstr "Улазног звÑ\83ка нема или Ñ\98е неиÑ\81пÑ\80аван, Ð\90Ð\92Ð\98 Ñ\82ок Ñ\9bе биÑ\82и иÑ\81кваÑ\80ен."
msgid "This file contains no playable streams."
-msgstr "Ова датотека не садржи токове за пуштање."
+msgstr ""
msgid "This file is invalid and cannot be played."
-msgstr "Ова датотека је неисправна и не може бити пуштена."
+msgstr ""
msgid "This file is corrupt and cannot be played."
-msgstr "Ова датотека је оштећена и не може бити пуштена."
+msgstr ""
msgid "Invalid atom size."
-msgstr "Неисправна величина атома."
+msgstr ""
msgid "This file is incomplete and cannot be played."
-msgstr "Ова датотека је непотпуна и не може бити пуштена."
+msgstr ""
msgid "The video in this file might not play correctly."
-msgstr "Видео у овој датотеци можда неће бити пуштен исправно."
+msgstr ""
#, c-format
msgid "This file contains too many streams. Only playing first %d"
-msgstr "Ова датотека садржи превише токова. Пуштам само први %d"
+msgstr ""
msgid ""
"No supported stream was found. You might need to install a GStreamer RTSP "
"extension plugin for Real media streams."
msgstr ""
-"Није пронађен ниједан подржани ток. Можда ћете морати да инсталирате "
-"прикључак РТСП проширења Гстримера за токове Стварног медија."
msgid ""
"No supported stream was found. You might need to allow more transport "
"protocols or may otherwise be missing the right GStreamer RTSP extension "
"plugin."
msgstr ""
-"Није пронађен ниједан подржани ток. Можда ћете морати да омогућите више "
-"протокола преноса или можда на неки други начин недостаје прави прикључак "
-"РТСП проширења ГСтримера."
msgid "Internal data flow error."
-msgstr "Унутрaшња грешка протока података."
+msgstr ""
msgid "Volume"
msgstr "Јачина звука"
msgstr "Бас"
msgid "Treble"
-msgstr "Ð\92иÑ\81окоÑ\82онаÑ\86"
+msgstr "ШÑ\83м"
msgid "Synth"
-msgstr "Синтисајзер"
+msgstr "Синт."
msgid "PCM"
msgstr "ПЦМ"
msgstr "Звучник"
msgid "Line-in"
-msgstr "Ð\9bиниÑ\98Ñ\81ки Ñ\83лаз"
+msgstr "Ул.лин."
msgid "Microphone"
msgstr "Микрофон"
msgstr "ЦД"
msgid "Mixer"
-msgstr "Ð\9cеÑ\88аÑ\87"
+msgstr "Ð\9cикÑ\81еÑ\80"
msgid "PCM-2"
msgstr "ПЦМ-2"
msgstr "Снимање"
msgid "In-gain"
-msgstr "Улазно појачање"
+msgstr "Ул. пој."
msgid "Out-gain"
-msgstr "Излазно појачање"
+msgstr "Из. пој."
msgid "Line-1"
-msgstr "Линија 1"
+msgstr "Лин. 1"
msgid "Line-2"
-msgstr "Линија 2"
+msgstr "Лин. 2"
msgid "Line-3"
-msgstr "Линија 3"
+msgstr "Лин. 3"
msgid "Digital-1"
-msgstr "Дигитални 1"
+msgstr "Диг. 1"
msgid "Digital-2"
-msgstr "Дигитални 2"
+msgstr "Диг. 2"
msgid "Digital-3"
-msgstr "Дигитални 3"
+msgstr "Диг. 3"
msgid "Phone-in"
-msgstr "Телефонски улаз"
+msgstr "Тел. ул."
msgid "Phone-out"
-msgstr "Телефонски излаз"
+msgstr "Тел. из."
msgid "Video"
msgstr "Видео"
"Could not open audio device for playback. Device is being used by another "
"application."
msgstr ""
-"Не могу да покренем звучни уређај ради пуштања. Уређај користи нека друга "
-"апликација."
msgid ""
"Could not open audio device for playback. You don't have permission to open "
"the device."
msgstr ""
-"Не могу да покренем аудио уређај ради пуштања. Немате овлашћење за покретање "
-"уређаја."
+#, fuzzy
msgid "Could not open audio device for playback."
-msgstr "Ð\9dе могÑ\83 да покÑ\80енем звÑ\83Ñ\87ни Ñ\83Ñ\80еÑ\92аÑ\98 Ñ\80ади пÑ\83Ñ\88Ñ\82аÑ\9aа."
+msgstr "Ð\9dе могÑ\83 да оÑ\82воÑ\80им звÑ\83Ñ\87ни Ñ\83Ñ\80еÑ\92аÑ\98 â\80\9e%sâ\80\9c Ñ\80ади Ñ\83пиÑ\81а."
msgid ""
"Could not open audio device for recording. You don't have permission to open "
"the device."
msgstr ""
-"Не могу да покренем аудио уређај ради снимања. Немате овлашћење за покретање "
-"уређаја."
+#, fuzzy
msgid "Could not open audio device for recording."
-msgstr "Ð\9dе могÑ\83 да покÑ\80енем звÑ\83Ñ\87ни Ñ\83Ñ\80еÑ\92аÑ\98 Ñ\80ади Ñ\81нимања."
+msgstr "Ð\9dе могÑ\83 да оÑ\82воÑ\80им ЦÐ\94 Ñ\83Ñ\80еÑ\92аÑ\98 Ñ\80ади Ñ\87иÑ\82ања."
+#, fuzzy
msgid "Could not open audio device for mixer control handling."
-msgstr "Ð\9dе могÑ\83 да покÑ\80енем звÑ\83Ñ\87ни Ñ\83Ñ\80еÑ\92аÑ\98 за Ñ\80Ñ\83коваÑ\9aе Ñ\83пÑ\80авÑ\99аÑ\9aем меÑ\88аÑ\87а."
+msgstr "Ð\9dе могÑ\83 да оÑ\82воÑ\80им звÑ\83Ñ\87ни Ñ\83Ñ\80еÑ\92аÑ\98 â\80\9e%sâ\80\9c Ñ\80ади Ñ\83пиÑ\81а."
msgid ""
"Could not open audio device for mixer control handling. This version of the "
"Open Sound System is not supported by this element."
msgstr ""
-"Не могу да покренем звучни уређај за руковање управљањем мешача. Ово издање "
-"система отвореног звука није подржано овим елементом."
msgid "Master"
-msgstr "Главни"
+msgstr ""
msgid "Front"
-msgstr "Предњи"
+msgstr ""
+#, fuzzy
msgid "Rear"
-msgstr "Ð\97адÑ\9aи"
+msgstr "СнимаÑ\9aе"
msgid "Headphones"
-msgstr "Слушалице"
+msgstr ""
msgid "Center"
-msgstr "Средињи"
+msgstr ""
msgid "LFE"
-msgstr "ЛФЕ"
+msgstr ""
msgid "Surround"
-msgstr "Окружење"
+msgstr ""
+#, fuzzy
msgid "Side"
-msgstr "Са Ñ\81Ñ\82Ñ\80ане"
+msgstr "Ð\92идео"
+#, fuzzy
msgid "Built-in Speaker"
-msgstr "УгÑ\80аÑ\92ени звучник"
+msgstr "Ð\97вучник"
msgid "AUX 1 Out"
-msgstr "АУХ 1 излаз"
+msgstr ""
msgid "AUX 2 Out"
-msgstr "АУХ 2 излаз"
+msgstr ""
msgid "AUX Out"
-msgstr "АУХ излаз"
+msgstr ""
msgid "3D Depth"
-msgstr "3Д дубина"
+msgstr ""
msgid "3D Center"
-msgstr "3Д средиште"
+msgstr ""
msgid "3D Enhance"
-msgstr "3Д побољшање"
+msgstr ""
msgid "Telephone"
-msgstr "Телефон"
+msgstr ""
msgid "Line Out"
-msgstr "Линијски излаз"
+msgstr ""
+#, fuzzy
msgid "Line In"
-msgstr "Ð\9bиниÑ\98Ñ\81ки Ñ\83лаз"
+msgstr "Ул.лин."
msgid "Internal CD"
-msgstr "Унутрашњи ЦД"
+msgstr ""
+#, fuzzy
msgid "Video In"
-msgstr "Видео улаз"
+msgstr "Видео"
msgid "AUX 1 In"
-msgstr "АУХ 1 улаз"
+msgstr ""
msgid "AUX 2 In"
-msgstr "АУХ 2 улаз"
+msgstr ""
msgid "AUX In"
-msgstr "АУХ улаз"
+msgstr ""
+#, fuzzy
msgid "Record Gain"
-msgstr "Ð\9fоÑ\98аÑ\87аÑ\9aе Ñ\81нимаÑ\9aа"
+msgstr "СнимаÑ\9aе"
+#, fuzzy
msgid "Output Gain"
-msgstr "Ð\9fоÑ\98аÑ\87аÑ\9aе излаза"
+msgstr "Ð\98з. поÑ\98."
+#, fuzzy
msgid "Microphone Boost"
-msgstr "Ð\9fоÑ\98аÑ\87аÑ\9aе микÑ\80оÑ\84она"
+msgstr "Ð\9cикÑ\80оÑ\84он"
msgid "Loopback"
-msgstr "Повратна петља"
+msgstr ""
msgid "Diagnostic"
-msgstr "Дијагностика"
+msgstr ""
msgid "Bass Boost"
-msgstr "Појачање баса"
+msgstr ""
msgid "Playback Ports"
-msgstr "Портови пуштања"
+msgstr ""
msgid "Input"
-msgstr "Улаз"
+msgstr ""
+#, fuzzy
msgid "Record Source"
-msgstr "Ð\98звоÑ\80 Ñ\81нимаÑ\9aа"
+msgstr "СнимаÑ\9aе"
+#, fuzzy
msgid "Monitor Source"
-msgstr "Ð\98звоÑ\80 пÑ\80аÑ\9bеÑ\9aа"
+msgstr "Ð\9fÑ\80аÑ\9bеÑ\9aе"
msgid "Keyboard Beep"
-msgstr "Звук тастатуре"
+msgstr ""
msgid "Simulate Stereo"
-msgstr "Лажни стерео"
+msgstr ""
msgid "Stereo"
-msgstr "Стерео"
+msgstr ""
msgid "Surround Sound"
-msgstr "Звук окружења"
+msgstr ""
+#, fuzzy
msgid "Microphone Gain"
-msgstr "Ð\9fоÑ\98аÑ\87аÑ\9aе микÑ\80оÑ\84она"
+msgstr "Ð\9cикÑ\80оÑ\84он"
+#, fuzzy
msgid "Speaker Source"
-msgstr "Ð\98звоÑ\80 звÑ\83Ñ\87ника"
+msgstr "Ð\97вÑ\83Ñ\87ник"
+#, fuzzy
msgid "Microphone Source"
-msgstr "Ð\98звоÑ\80 микÑ\80оÑ\84она"
+msgstr "Ð\9cикÑ\80оÑ\84он"
msgid "Jack"
-msgstr "Утичница"
+msgstr ""
msgid "Center / LFE"
-msgstr "Средиште / ЛФЕ"
+msgstr ""
msgid "Stereo Mix"
-msgstr "Стерео микс"
+msgstr ""
msgid "Mono Mix"
-msgstr "Моно микс"
+msgstr ""
msgid "Input Mix"
-msgstr "Улазни микс"
+msgstr ""
msgid "SPDIF In"
-msgstr "СПДИФ улаз"
+msgstr ""
msgid "SPDIF Out"
-msgstr "СПДИФ излаз"
+msgstr ""
+#, fuzzy
msgid "Microphone 1"
-msgstr "Микрофон 1"
+msgstr "Микрофон"
+#, fuzzy
msgid "Microphone 2"
-msgstr "Микрофон 2"
+msgstr "Микрофон"
+#, fuzzy
msgid "Digital Out"
-msgstr "Дигитални излаз"
+msgstr "Диг. 1"
+#, fuzzy
msgid "Digital In"
-msgstr "Дигитални улаз"
+msgstr "Диг. 1"
msgid "HDMI"
-msgstr "ХДМИ"
+msgstr ""
msgid "Modem"
-msgstr "Модем"
+msgstr ""
msgid "Handset"
-msgstr "Слушалица"
+msgstr ""
msgid "Other"
-msgstr "Остало"
+msgstr ""
msgid "None"
-msgstr "Ништа"
+msgstr ""
msgid "On"
-msgstr "Укљ."
+msgstr ""
msgid "Off"
-msgstr "Искљ."
+msgstr ""
msgid "Mute"
-msgstr "Без звука"
+msgstr ""
msgid "Fast"
-msgstr "Брзо"
+msgstr ""
#. TRANSLATORS: "Very Low" is a quality setting here
msgid "Very Low"
-msgstr "Врло низак"
+msgstr ""
#. TRANSLATORS: "Low" is a quality setting here
msgid "Low"
-msgstr "Низак"
+msgstr ""
#. TRANSLATORS: "Medium" is a quality setting here
msgid "Medium"
-msgstr "Средњи"
+msgstr ""
#. TRANSLATORS: "High" is a quality setting here
msgid "High"
-msgstr "Висок"
+msgstr ""
#. TRANSLATORS: "Very High" is a quality setting here
msgid "Very High"
-msgstr "Врло висок"
+msgstr ""
#. TRANSLATORS: "Production" is a quality setting here
msgid "Production"
-msgstr "Производни"
+msgstr ""
+#, fuzzy
msgid "Front Panel Microphone"
-msgstr "Микрофон предње површи"
+msgstr "Микрофон"
msgid "Front Panel Line In"
-msgstr "Линијски улаз предње површи"
+msgstr ""
msgid "Front Panel Headphones"
-msgstr "Слушалице предње површи"
+msgstr ""
msgid "Front Panel Line Out"
-msgstr "Линијски излаз предње површи"
+msgstr ""
msgid "Green Connector"
-msgstr "Зелени прикључак"
+msgstr ""
msgid "Pink Connector"
-msgstr "Ружичаст прикључак"
+msgstr ""
msgid "Blue Connector"
-msgstr "Плави прикључак"
+msgstr ""
msgid "White Connector"
-msgstr "Бели прикључак"
+msgstr ""
msgid "Black Connector"
-msgstr "Црни прикључак"
+msgstr ""
msgid "Gray Connector"
-msgstr "Сиви прикључак"
+msgstr ""
msgid "Orange Connector"
-msgstr "Наранџасти прикључак"
+msgstr ""
msgid "Red Connector"
-msgstr "Црвени прикључак"
+msgstr ""
msgid "Yellow Connector"
-msgstr "Жути прикључак"
+msgstr ""
msgid "Green Front Panel Connector"
-msgstr "Зелени прикључак на предњој површи"
+msgstr ""
msgid "Pink Front Panel Connector"
-msgstr "Ружичасти прикључак на предњој површи"
+msgstr ""
msgid "Blue Front Panel Connector"
-msgstr "Плави прикључак на предњој површи"
+msgstr ""
msgid "White Front Panel Connector"
-msgstr "Бели прикључак на предњој површи"
+msgstr ""
msgid "Black Front Panel Connector"
-msgstr "Црни прикључак на предњој површи"
+msgstr ""
msgid "Gray Front Panel Connector"
-msgstr "Сиви прикључак на предњој површи"
+msgstr ""
msgid "Orange Front Panel Connector"
-msgstr "Наранџасти прикључак на предњој површи"
+msgstr ""
msgid "Red Front Panel Connector"
-msgstr "Црвени прикључак на предњој површи"
+msgstr ""
msgid "Yellow Front Panel Connector"
-msgstr "Жути прикључак на предњој површи"
+msgstr ""
msgid "Spread Output"
-msgstr "Излаз ширења"
+msgstr ""
msgid "Downmix"
-msgstr "Сабирни мешач"
+msgstr ""
msgid "Virtual Mixer Input"
-msgstr "Улаз виртуелног мешача"
+msgstr ""
msgid "Virtual Mixer Output"
-msgstr "Излаз виртуелног мешача"
+msgstr ""
msgid "Virtual Mixer Channels"
-msgstr "Канали виртуелног мешача"
+msgstr ""
#. TRANSLATORS: name + number of a volume mixer control
#, c-format
msgid "%s %d Function"
-msgstr "%s %d функција"
+msgstr ""
#. TRANSLATORS: name of a volume mixer control
#, c-format
msgid "%s Function"
-msgstr "%s функција"
+msgstr ""
msgid ""
"Could not open audio device for playback. This version of the Open Sound "
"System is not supported by this element."
msgstr ""
-"Не могу да покренем звучни уређај ради пуштања. Ово издање система отвореног "
-"звука није подржано овим елементом."
msgid "Playback is not supported by this audio device."
-msgstr "Пуштање није подржано од стране овог звучног уређаја."
+msgstr ""
msgid "Audio playback error."
-msgstr "Грешка приликом пуштања звука."
+msgstr ""
msgid "Recording is not supported by this audio device."
-msgstr "Снимање није подржано од стране овог звучног уређаја."
+msgstr ""
msgid "Error recording from audio device."
-msgstr "Грешка приликом снимања са звучног уређаја."
+msgstr ""
msgid "Gain"
-msgstr "Појачање"
+msgstr ""
msgid "Headphone"
-msgstr "Слушалице"
+msgstr ""
-#, c-format
+#, fuzzy, c-format
msgid "Error reading %d bytes from device '%s'."
-msgstr "Грешка приликом читања %d бајтова са уређаја „%s“."
+msgstr "Не могу да примим бафере са уређаја „%s“."
+
+#, c-format
+msgid "Failed to enumerate possible video formats device '%s' can work with"
+msgstr ""
+
+#, fuzzy, c-format
+msgid "Could not map buffers from device '%s'"
+msgstr "Не могу да примим бафере са уређаја „%s“."
+
+#, c-format
+msgid "The driver of device '%s' does not support the IO method %d"
+msgstr ""
+
+#, c-format
+msgid "The driver of device '%s' does not support any known IO method."
+msgstr ""
+
+#, fuzzy, c-format
+msgid "Device '%s' does not support video capture"
+msgstr "Уређај „%s“ не представља уређај за снимање."
+
+#, fuzzy, c-format
+msgid "Device '%s' is busy"
+msgstr "Уређај „%s“ не представља уређај за снимање."
+
+#, fuzzy, c-format
+msgid "Device '%s' cannot capture at %dx%d"
+msgstr "Уређај „%s“ не представља уређај за снимање."
+
+#, fuzzy, c-format
+msgid "Device '%s' cannot capture in the specified format"
+msgstr "Уређај „%s“ не представља уређај за снимање."
+
+#, fuzzy, c-format
+msgid "Could not get parameters on device '%s'"
+msgstr "Не могу да примим бафере са уређаја „%s“."
+
+msgid "Video device did not accept new frame rate setting."
+msgstr ""
+
+msgid "Video device could not create buffer pool."
+msgstr ""
#, c-format
msgid "Got unexpected frame size of %u instead of %u."
-msgstr "Добих неочекивану величину кадра, %u уместо %u."
+msgstr ""
+
+#, fuzzy, c-format
+msgid "Failed trying to get video frames from device '%s'."
+msgstr "Не могу да примим довољно бафера са уређаја „%s“."
#, c-format
-msgid "Error reading %d bytes on device '%s'."
-msgstr "Грешка приликом читања %d бајтова на уређају „%s“."
+msgid "Failed after %d tries. device %s. system error: %s"
+msgstr ""
+
+#, fuzzy, c-format
+msgid "Failed to get settings of tuner %d on device '%s'."
+msgstr "Не могу да примим довољно бафера са уређаја „%s“."
+
+#, fuzzy, c-format
+msgid "Error getting capabilities for device '%s'."
+msgstr "Не могу да примим бафере са уређаја „%s“."
+
+#, fuzzy, c-format
+msgid "Device '%s' is not a tuner."
+msgstr "Уређај „%s“ не представља уређај за снимање."
+
+#, fuzzy, c-format
+msgid "Failed to get radio input on device '%s'. "
+msgstr "Не могу да отворим радио уређај '%s'"
+
+#, fuzzy, c-format
+msgid "Failed to set input %d on device %s."
+msgstr "Не могу да отворим радио уређај '%s'"
+
+#, fuzzy, c-format
+msgid "Failed to change mute state for device '%s'."
+msgstr "Не могу да примим бафере са уређаја „%s“."
#, c-format
msgid ""
"Error getting capabilities for device '%s': It isn't a v4l2 driver. Check if "
"it is a v4l1 driver."
msgstr ""
-"Грешка приликом добављања могућности за уређај „%s“: Није в4л2 управљачки "
-"програм. Проверите да ли је то в4л1 управљачки програм."
#, c-format
msgid "Failed to query attributes of input %d in device %s"
-msgstr "Нисам успео да пропитам својства уноса %d у уређају %s"
+msgstr ""
-#, c-format
+#, fuzzy, c-format
msgid "Failed to get setting of tuner %d on device '%s'."
-msgstr "Ð\9dиÑ\81ам Ñ\83Ñ\81пео да добавим подеÑ\88аваÑ\9aа Ñ\82Ñ\98Ñ\83неÑ\80а %d на Ñ\83Ñ\80еÑ\92аÑ\98Ñ\83 „%s“."
+msgstr "Ð\9dе могÑ\83 да пÑ\80имим довоÑ\99но баÑ\84еÑ\80а Ñ\81а Ñ\83Ñ\80еÑ\92аÑ\98а „%s“."
#, c-format
msgid "Failed to query norm on device '%s'."
-msgstr "Нисам успео да пропитам норму на уређају „%s“."
+msgstr ""
#, c-format
msgid "Failed getting controls attributes on device '%s'."
-msgstr "Нисам успео да добавим својства контрола на уређају „%s“."
+msgstr ""
-#, c-format
+#, fuzzy, c-format
msgid "Cannot identify device '%s'."
-msgstr "Не могу да распознам уређај „%s“."
+msgstr "Не могу да пишем на видео уређај „%s“."
#, c-format
msgid "This isn't a device '%s'."
-msgstr "Ово није уређај „%s“."
+msgstr ""
-#, c-format
+#, fuzzy, c-format
msgid "Could not open device '%s' for reading and writing."
-msgstr "Ð\9dе могÑ\83 да покÑ\80енем уређај „%s“ ради читања и уписа."
+msgstr "Ð\9dе могÑ\83 да оÑ\82воÑ\80им уређај „%s“ ради читања и уписа."
-#, c-format
+#, fuzzy, c-format
msgid "Device '%s' is not a capture device."
-msgstr "УÑ\80еÑ\92аÑ\98 â\80\9e%sâ\80\9c ниÑ\98е уређај за снимање."
+msgstr "УÑ\80еÑ\92аÑ\98 â\80\9e%sâ\80\9c не пÑ\80едÑ\81Ñ\82авÑ\99а уређај за снимање."
-#, c-format
+#, fuzzy, c-format
msgid "Device '%s' is not a output device."
-msgstr "УÑ\80еÑ\92аÑ\98 â\80\9e%sâ\80\9c ниÑ\98е излазни Ñ\83Ñ\80еÑ\92аÑ\98."
+msgstr "УÑ\80еÑ\92аÑ\98 â\80\9e%sâ\80\9c не пÑ\80едÑ\81Ñ\82авÑ\99а Ñ\83Ñ\80еÑ\92аÑ\98 за Ñ\81нимаÑ\9aе."
-#, c-format
+#, fuzzy, c-format
msgid "Failed to set norm for device '%s'."
-msgstr "Ð\9dиÑ\81ам Ñ\83Ñ\81пео да подеÑ\81им ноÑ\80мÑ\83 за Ñ\83Ñ\80еÑ\92аÑ\98 „%s“."
+msgstr "Ð\9dе могÑ\83 да пÑ\80имим баÑ\84еÑ\80е Ñ\81а Ñ\83Ñ\80еÑ\92аÑ\98а „%s“."
-#, c-format
+#, fuzzy, c-format
msgid "Failed to get current tuner frequency for device '%s'."
-msgstr "Ð\9dиÑ\81ам Ñ\83Ñ\81пео да добавим Ñ\82екÑ\83Ñ\9bÑ\83 Ñ\83Ñ\87еÑ\81Ñ\82аноÑ\81Ñ\82 Ñ\82Ñ\98Ñ\83неÑ\80а за Ñ\83Ñ\80еÑ\92аÑ\98 „%s“."
+msgstr "Ð\9dе могÑ\83 да пÑ\80имим довоÑ\99но баÑ\84еÑ\80а Ñ\81а Ñ\83Ñ\80еÑ\92аÑ\98а „%s“."
#, c-format
msgid "Failed to set current tuner frequency for device '%s' to %lu Hz."
msgstr ""
-"Нисам успео да подесим текућу учестаност тјунера за уређај „%s“ на %lu Hz."
#, c-format
msgid "Failed to get signal strength for device '%s'."
-msgstr "Нисам успео да добавим јачину сигнала за уређај „%s“."
+msgstr ""
-#, c-format
+#, fuzzy, c-format
msgid "Failed to get value for control %d on device '%s'."
-msgstr "Ð\9dиÑ\81ам Ñ\83Ñ\81пео да добавим вÑ\80едноÑ\81Ñ\82 за конÑ\82Ñ\80олÑ\83 %d на Ñ\83Ñ\80еÑ\92аÑ\98Ñ\83 „%s“."
+msgstr "Ð\9dе могÑ\83 да заÑ\82воÑ\80им Ñ\83пÑ\80авÑ\99аÑ\87ки Ñ\83Ñ\80еÑ\92аÑ\98 „%s“."
-#, c-format
+#, fuzzy, c-format
msgid "Failed to set value %d for control %d on device '%s'."
-msgstr "Ð\9dиÑ\81ам Ñ\83Ñ\81пео да подеÑ\81им вÑ\80едноÑ\81Ñ\82 %d за конÑ\82Ñ\80олÑ\83 %d на Ñ\83Ñ\80еÑ\92аÑ\98Ñ\83 „%s“."
+msgstr "Ð\9dе могÑ\83 да заÑ\82воÑ\80им Ñ\83пÑ\80авÑ\99аÑ\87ки Ñ\83Ñ\80еÑ\92аÑ\98 „%s“."
#, c-format
msgid "Failed to get current input on device '%s'. May be it is a radio device"
msgstr ""
-"Нисам успео да добавим текући улаз на уређају „%s“. Можда је то радио уређај."
-
-#, c-format
-msgid "Failed to set input %d on device %s."
-msgstr "Нисам успео да подесим улаз %d на уређају %s."
-#, c-format
+#, fuzzy, c-format
msgid ""
"Failed to get current output on device '%s'. May be it is a radio device"
-msgstr ""
-"Нисам успео да добавим текући излаз на уређају „%s“. Можда је то радио "
-"уређај."
+msgstr "Не могу да примим довољно бафера са уређаја „%s“."
-#, c-format
+#, fuzzy, c-format
msgid "Failed to set output %d on device %s."
-msgstr "Нисам успео да подесим излаз %d на уређају %s."
-
-#, c-format
-msgid "Could not enqueue buffers in device '%s'."
-msgstr "Не могу да додам помоћне меморије у уређају „%s“."
-
-#, c-format
-msgid "Failed trying to get video frames from device '%s'."
-msgstr "Нисам успео да добавим видео кадрове са уређаја „%s“."
-
-#, c-format
-msgid "Failed after %d tries. device %s. system error: %s"
-msgstr "Нисам успео након %d покушаја. уређај %s. системска грешка: %s"
-
-#, c-format
-msgid "Could not get parameters on device '%s'"
-msgstr "Не могу да добавим параметре на уређају „%s“"
-
-msgid "Video input device did not accept new frame rate setting."
-msgstr "Улазни видео уређај не прихвата нове поставке протока кадрова."
-
-#, c-format
-msgid "Could not map buffers from device '%s'"
-msgstr "Не могу да мапирам помоћне меморије са уређаја „%s“"
-
-#, c-format
-msgid "The driver of device '%s' does not support any known capture method."
-msgstr ""
-"Управљачки програм уређаја „%s“ не подржава ниједан познати начин снимања."
+msgstr "Не могу да отворим радио уређај '%s'"
msgid "Changing resolution at runtime is not yet supported."
-msgstr "Промена резолуције приликом извршавања још увек није подржана."
+msgstr ""
msgid "Cannot operate without a clock"
-msgstr "Не могу да радим без сата"
+msgstr ""
+
+#, fuzzy
+#~ msgid "Could not enqueue buffers in device '%s'."
+#~ msgstr "Не могу да примим бафере са уређаја „%s“."
#~ msgid "Could not open file \"%s\" for writing."
#~ msgstr "Не могу да отворим датотеку „%s“ ради уписа."
msgstr ""
"Project-Id-Version: gst-plugins-good 0.10.26.2\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-05-10 09:38+0100\n"
+"POT-Creation-Date: 2011-11-26 13:52+0000\n"
"PO-Revision-Date: 2011-01-09 19:36+0100\n"
"Last-Translator: Daniel Nylander <po@danielnylander.se>\n"
"Language-Team: Swedish <tp-sv@listor.tp-sv.se>\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
-msgid "Could not establish connection to sound server"
-msgstr "Kunde inte etablera en anslutning till ljudservern"
-
-msgid "Failed to query sound server capabilities"
-msgstr "Misslyckades med att fråga efter ljudserverförmågor"
-
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
msgstr "\"%s\" av \"%s\""
-msgid "Internal data stream error."
-msgstr "Internt fel i dataström."
-
msgid "Failed to decode JPEG image"
msgstr "Misslyckades med att avkoda JPEG-bild"
+msgid "Internal data stream error."
+msgstr "Internt fel i dataström."
+
msgid "Could not connect to server"
msgstr "Kunde inte ansluta till servern"
msgstr "Fel vid läsning av %d byte från enheten \"%s\"."
#, c-format
+msgid "Failed to enumerate possible video formats device '%s' can work with"
+msgstr ""
+"Misslyckades med att räkna upp möjliga videoformat som enheten \"%s\" kan "
+"arbeta med"
+
+#, c-format
+msgid "Could not map buffers from device '%s'"
+msgstr "Kunde inte mappa buffertar från enheten \"%s\"."
+
+#, fuzzy, c-format
+msgid "The driver of device '%s' does not support the IO method %d"
+msgstr "Drivrutinen för enheten \"%s\" saknar stöd för någon känd fångstmetod."
+
+#, fuzzy, c-format
+msgid "The driver of device '%s' does not support any known IO method."
+msgstr "Drivrutinen för enheten \"%s\" saknar stöd för någon känd fångstmetod."
+
+#, fuzzy, c-format
+msgid "Device '%s' does not support video capture"
+msgstr "Enheten \"%s\" är ingen utgångsenhet."
+
+#, fuzzy, c-format
+msgid "Device '%s' is busy"
+msgstr "Enheten \"%s\" är ingen utgångsenhet."
+
+#, c-format
+msgid "Device '%s' cannot capture at %dx%d"
+msgstr "Enheten \"%s\" kan inte fånga i %dx%d"
+
+#, c-format
+msgid "Device '%s' cannot capture in the specified format"
+msgstr "Enheten \"%s\" kan inte fånga i det angivna formatet"
+
+#, c-format
+msgid "Could not get parameters on device '%s'"
+msgstr "Kunde inte få tag i parametrar på enheten \"%s\""
+
+#, fuzzy
+msgid "Video device did not accept new frame rate setting."
+msgstr "Videoingångsenheten accepterade inte ny inställning för bildfrekvens."
+
+msgid "Video device could not create buffer pool."
+msgstr ""
+
+#, c-format
msgid "Got unexpected frame size of %u instead of %u."
msgstr "Fick oväntade bildrutstorleken %u istället för %u."
#, c-format
-msgid "Error reading %d bytes on device '%s'."
-msgstr "Fel vid läsning av %d byte på enheten \"%s\"."
+msgid "Failed trying to get video frames from device '%s'."
+msgstr "Misslyckades med att få videobildrutor från enheten \"%s\"."
+
+#, c-format
+msgid "Failed after %d tries. device %s. system error: %s"
+msgstr "Misslyckades efter %d försök. enhet %s. systemfel: %s"
+
+#, fuzzy, c-format
+msgid "Failed to get settings of tuner %d on device '%s'."
+msgstr ""
+"Misslyckades med att få tag i inställningen för mottagare %d på enheten \"%s"
+"\"."
+
+#, fuzzy, c-format
+msgid "Error getting capabilities for device '%s'."
+msgstr "Fel vid läsning av %d byte från enheten \"%s\"."
+
+#, fuzzy, c-format
+msgid "Device '%s' is not a tuner."
+msgstr "Enheten \"%s\" är ingen utgångsenhet."
+
+#, fuzzy, c-format
+msgid "Failed to get radio input on device '%s'. "
+msgstr "Kunde inte ställa in ingång %d på enheten %s."
+
+#, c-format
+msgid "Failed to set input %d on device %s."
+msgstr "Kunde inte ställa in ingång %d på enheten %s."
+
+#, fuzzy, c-format
+msgid "Failed to change mute state for device '%s'."
+msgstr "Misslyckades med att få tag på signalstyrka för enheten \"%s\"."
#, c-format
msgid ""
"radioenhet"
#, c-format
-msgid "Failed to set input %d on device %s."
-msgstr "Kunde inte ställa in ingång %d på enheten %s."
-
-#, c-format
msgid ""
"Failed to get current output on device '%s'. May be it is a radio device"
msgstr ""
msgid "Failed to set output %d on device %s."
msgstr "Misslyckades med att ställa in utgång %d på enheten %s."
-#, c-format
-msgid "Could not enqueue buffers in device '%s'."
-msgstr "Kunde inte kölägga buffertar i enheten \"%s\"."
-
-#, c-format
-msgid "Failed trying to get video frames from device '%s'."
-msgstr "Misslyckades med att få videobildrutor från enheten \"%s\"."
-
-#, c-format
-msgid "Failed after %d tries. device %s. system error: %s"
-msgstr "Misslyckades efter %d försök. enhet %s. systemfel: %s"
-
-#, c-format
-msgid "Could not get parameters on device '%s'"
-msgstr "Kunde inte få tag i parametrar på enheten \"%s\""
-
-msgid "Video input device did not accept new frame rate setting."
-msgstr "Videoingångsenheten accepterade inte ny inställning för bildfrekvens."
-
-#, c-format
-msgid "Could not map buffers from device '%s'"
-msgstr "Kunde inte mappa buffertar från enheten \"%s\"."
-
-#, c-format
-msgid "The driver of device '%s' does not support any known capture method."
-msgstr "Drivrutinen för enheten \"%s\" saknar stöd för någon känd fångstmetod."
-
msgid "Changing resolution at runtime is not yet supported."
msgstr "Ändring av upplösning vid körtid stöds inte än."
msgid "Cannot operate without a clock"
msgstr "Kan inte fungera utan en klocka"
-#~ msgid "Failed to enumerate possible video formats device '%s' can work with"
-#~ msgstr ""
-#~ "Misslyckades med att räkna upp möjliga videoformat som enheten \"%s\" kan "
-#~ "arbeta med"
+#~ msgid "Error reading %d bytes on device '%s'."
+#~ msgstr "Fel vid läsning av %d byte på enheten \"%s\"."
+
+#~ msgid "Could not enqueue buffers in device '%s'."
+#~ msgstr "Kunde inte kölägga buffertar i enheten \"%s\"."
+
+#~ msgid "Could not establish connection to sound server"
+#~ msgstr "Kunde inte etablera en anslutning till ljudservern"
+
+#~ msgid "Failed to query sound server capabilities"
+#~ msgstr "Misslyckades med att fråga efter ljudserverförmågor"
#~ msgid ""
#~ "The buffer type is not supported, or the index is out of bounds, or no "
#~ msgid "No free buffers found in the pool at index %d."
#~ msgstr "Inga lediga buffertar hittades i poolen på index %d."
-#~ msgid "Device '%s' cannot capture at %dx%d"
-#~ msgstr "Enheten \"%s\" kan inte fånga i %dx%d"
-
-#~ msgid "Device '%s' cannot capture in the specified format"
-#~ msgstr "Enheten \"%s\" kan inte fånga i det angivna formatet"
-
#~ msgid "Could not get buffers from device '%s'."
#~ msgstr "Kunde inte få tag i buffertar från enheten \"%s\"."
msgstr ""
"Project-Id-Version: gst-plugins-good 0.10.26.2\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-05-10 09:38+0100\n"
+"POT-Creation-Date: 2011-11-26 13:52+0000\n"
"PO-Revision-Date: 2011-01-08 00:03+0200\n"
"Last-Translator: Server Acim <serveracim@gmail.com>\n"
"Language-Team: Turkish <gnu-tr-u12a@lists.sourceforge.net>\n"
"Content-Type: text/plain; charset=utf-8\n"
"Content-Transfer-Encoding: 8bit\n"
-msgid "Could not establish connection to sound server"
-msgstr "Ses sunucusuyla bağlantı kurulumayor"
-
-msgid "Failed to query sound server capabilities"
-msgstr "Ses sunucusu olanakları sorgulanamadı"
-
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
msgstr "'%s' tarafından '%s'"
-msgid "Internal data stream error."
-msgstr "İç veri akım hatası."
-
msgid "Failed to decode JPEG image"
msgstr "JPEG görüntüsünü çözümlenemedi"
+msgid "Internal data stream error."
+msgstr "İç veri akım hatası."
+
msgid "Could not connect to server"
msgstr "Sunucuya bağlanamıyor"
msgstr "%d bayt bilgili '%s' aygıtından okumada hata."
#, c-format
+msgid "Failed to enumerate possible video formats device '%s' can work with"
+msgstr "Aygıtın '%s' birlikte çalışabileceği vidyo kiplerini sıralamada hata"
+
+#, c-format
+msgid "Could not map buffers from device '%s'"
+msgstr "Aygıttan '%s' bellekler eşlenemiyor"
+
+#, fuzzy, c-format
+msgid "The driver of device '%s' does not support the IO method %d"
+msgstr ""
+"Bu aygıtın sürücüsü '%s' bilinen görüntü yakalama yöntemlerinden herhangi "
+"birisi desteklemiyor."
+
+#, fuzzy, c-format
+msgid "The driver of device '%s' does not support any known IO method."
+msgstr ""
+"Bu aygıtın sürücüsü '%s' bilinen görüntü yakalama yöntemlerinden herhangi "
+"birisi desteklemiyor."
+
+#, fuzzy, c-format
+msgid "Device '%s' does not support video capture"
+msgstr "Aygıt '%s' bir çıkış aygıtı değil."
+
+#, fuzzy, c-format
+msgid "Device '%s' is busy"
+msgstr "Aygıt '%s' bir çıkış aygıtı değil."
+
+#, c-format
+msgid "Device '%s' cannot capture at %dx%d"
+msgstr "Aygıt '%s' görüntü yakalayamadı%dx%d"
+
+#, c-format
+msgid "Device '%s' cannot capture in the specified format"
+msgstr "Aygıt '%s' belirtilen kipte görüntü yakalayamadı"
+
+#, c-format
+msgid "Could not get parameters on device '%s'"
+msgstr "Değiştirgeler aygıttan '%s' alınamıyor"
+
+#, fuzzy
+msgid "Video device did not accept new frame rate setting."
+msgstr "Vidyo giriş aygıtı yeni çerçeve oranı ayarlarını kabul etmedi."
+
+msgid "Video device could not create buffer pool."
+msgstr ""
+
+#, c-format
msgid "Got unexpected frame size of %u instead of %u."
msgstr "Beklenmeyen bir çerçevece boyutu %u bunun yerine %u görüntülendi."
#, c-format
-msgid "Error reading %d bytes on device '%s'."
-msgstr "%d baytı şu aygıtta '%s' okumada hata."
+msgid "Failed trying to get video frames from device '%s'."
+msgstr "Aygıttan device '%s' vidyo çerçeveleri alınamadı."
+
+#, c-format
+msgid "Failed after %d tries. device %s. system error: %s"
+msgstr "Şu denemeden sonra %d başarılamadı. aygıt %s. sistem hatası: %s"
+
+#, fuzzy, c-format
+msgid "Failed to get settings of tuner %d on device '%s'."
+msgstr "Radyo istasyonu ayarlarını bulma %d şu aygıtta '%s' başarılamadı."
+
+#, fuzzy, c-format
+msgid "Error getting capabilities for device '%s'."
+msgstr "%d bayt bilgili '%s' aygıtından okumada hata."
+
+#, fuzzy, c-format
+msgid "Device '%s' is not a tuner."
+msgstr "Aygıt '%s' bir çıkış aygıtı değil."
+
+#, fuzzy, c-format
+msgid "Failed to get radio input on device '%s'. "
+msgstr "Giriş değeri olarak bu %d şu aygıtta %s ayarlanamadı."
+
+#, c-format
+msgid "Failed to set input %d on device %s."
+msgstr "Giriş değeri olarak bu %d şu aygıtta %s ayarlanamadı."
+
+#, fuzzy, c-format
+msgid "Failed to change mute state for device '%s'."
+msgstr "Aygıt için '%s' güçlü sinyal alımı gerçekleşemedi."
#, c-format
msgid ""
"Şu aygıtta '%s' geçerli giriş elde edilemedi. O bir radyo aygıtı olabilir."
#, c-format
-msgid "Failed to set input %d on device %s."
-msgstr "Giriş değeri olarak bu %d şu aygıtta %s ayarlanamadı."
-
-#, c-format
msgid ""
"Failed to get current output on device '%s'. May be it is a radio device"
msgstr ""
msgid "Failed to set output %d on device %s."
msgstr "Çıkış değeri olarak %d şu aygıtta %s elde edilemedi."
-#, c-format
-msgid "Could not enqueue buffers in device '%s'."
-msgstr "Aygıtta '%s' arabellek kuyruğa sokulamıyor."
-
-#, c-format
-msgid "Failed trying to get video frames from device '%s'."
-msgstr "Aygıttan device '%s' vidyo çerçeveleri alınamadı."
-
-#, c-format
-msgid "Failed after %d tries. device %s. system error: %s"
-msgstr "Şu denemeden sonra %d başarılamadı. aygıt %s. sistem hatası: %s"
-
-#, c-format
-msgid "Could not get parameters on device '%s'"
-msgstr "Değiştirgeler aygıttan '%s' alınamıyor"
-
-msgid "Video input device did not accept new frame rate setting."
-msgstr "Vidyo giriş aygıtı yeni çerçeve oranı ayarlarını kabul etmedi."
-
-#, c-format
-msgid "Could not map buffers from device '%s'"
-msgstr "Aygıttan '%s' bellekler eşlenemiyor"
-
-#, c-format
-msgid "The driver of device '%s' does not support any known capture method."
-msgstr ""
-"Bu aygıtın sürücüsü '%s' bilinen görüntü yakalama yöntemlerinden herhangi "
-"birisi desteklemiyor."
-
msgid "Changing resolution at runtime is not yet supported."
msgstr "Çalışırken çözünürlüğü değiştirmek henüz desteklenmiyor."
msgid "Cannot operate without a clock"
msgstr "Saat olmadan çalışamaz"
-#~ msgid "Failed to enumerate possible video formats device '%s' can work with"
-#~ msgstr ""
-#~ "Aygıtın '%s' birlikte çalışabileceği vidyo kiplerini sıralamada hata"
+#~ msgid "Error reading %d bytes on device '%s'."
+#~ msgstr "%d baytı şu aygıtta '%s' okumada hata."
+
+#~ msgid "Could not enqueue buffers in device '%s'."
+#~ msgstr "Aygıtta '%s' arabellek kuyruğa sokulamıyor."
+
+#~ msgid "Could not establish connection to sound server"
+#~ msgstr "Ses sunucusuyla bağlantı kurulumayor"
+
+#~ msgid "Failed to query sound server capabilities"
+#~ msgstr "Ses sunucusu olanakları sorgulanamadı"
#~ msgid ""
#~ "The buffer type is not supported, or the index is out of bounds, or no "
#~ msgid "No free buffers found in the pool at index %d."
#~ msgstr "Dizindeki %d havuzda boş bellek bulunamadı."
-#~ msgid "Device '%s' cannot capture at %dx%d"
-#~ msgstr "Aygıt '%s' görüntü yakalayamadı%dx%d"
-
-#~ msgid "Device '%s' cannot capture in the specified format"
-#~ msgstr "Aygıt '%s' belirtilen kipte görüntü yakalayamadı"
-
#~ msgid "Could not get buffers from device '%s'."
#~ msgstr "Arabellek aygıttan '%s' alınamıyor."
msgstr ""
"Project-Id-Version: gst-plugins-good 0.10.28.2\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-05-10 09:38+0100\n"
+"POT-Creation-Date: 2011-11-26 13:52+0000\n"
"PO-Revision-Date: 2011-05-01 11:55+0300\n"
"Last-Translator: Yuri Chornoivan <yurchor@ukr.net>\n"
"Language-Team: Ukrainian <translation-team-uk@lists.sourceforge.net>\n"
"X-Generator: Lokalize 1.2\n"
"Plural-Forms: nplurals=1; plural=0;\n"
-msgid "Could not establish connection to sound server"
-msgstr "Не вдалося встановити з'єднання із звуковим сервером."
-
-msgid "Failed to query sound server capabilities"
-msgstr "Помилка при запиті можливостей звукового сервера"
-
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
msgstr "«%s» виконує %s"
-msgid "Internal data stream error."
-msgstr "Внутрішня помилка потоку даних."
-
msgid "Failed to decode JPEG image"
msgstr "Помилка при декодуванні зображення JPEG"
+msgid "Internal data stream error."
+msgstr "Внутрішня помилка потоку даних."
+
msgid "Could not connect to server"
msgstr "Не вдалося з'єднатись з сервером"
msgstr "Помилка під час спроби читання %d байтів з пристрою «%s»."
#, c-format
+msgid "Failed to enumerate possible video formats device '%s' can work with"
+msgstr ""
+"Не вдається отримати список усіх можливих відеоформатів, які підтримує "
+"пристрій \"%s\""
+
+#, c-format
+msgid "Could not map buffers from device '%s'"
+msgstr "Не вдалося пов’язати буфери з пристрою «%s»."
+
+#, fuzzy, c-format
+msgid "The driver of device '%s' does not support the IO method %d"
+msgstr "Драйвер пристрою «%s» не підтримує жоден відомий метод захоплення."
+
+#, fuzzy, c-format
+msgid "The driver of device '%s' does not support any known IO method."
+msgstr "Драйвер пристрою «%s» не підтримує жоден відомий метод захоплення."
+
+#, fuzzy, c-format
+msgid "Device '%s' does not support video capture"
+msgstr "Пристрій «%s» не є пристроєм виведення даних."
+
+#, fuzzy, c-format
+msgid "Device '%s' is busy"
+msgstr "Пристрій «%s» не є пристроєм виведення даних."
+
+#, c-format
+msgid "Device '%s' cannot capture at %dx%d"
+msgstr "Пристрій \"%s\" не здатний захоплювати відео у форматі %dx%d"
+
+#, c-format
+msgid "Device '%s' cannot capture in the specified format"
+msgstr "Пристрій \"%s\" не здатний захоплювати відео у вказаному форматі"
+
+#, c-format
+msgid "Could not get parameters on device '%s'"
+msgstr "Не вдалося отримати параметри пристрою «%s»."
+
+#, fuzzy
+msgid "Video device did not accept new frame rate setting."
+msgstr ""
+"Пристроєм відеовходу не підтримується нове значення параметра частоти кадрів."
+
+msgid "Video device could not create buffer pool."
+msgstr ""
+
+#, c-format
msgid "Got unexpected frame size of %u instead of %u."
msgstr "Отримано неочікуваний розмір блоку %u замість %u."
#, c-format
-msgid "Error reading %d bytes on device '%s'."
-msgstr "Помилка під час спроби читання %d байтів на пристрої «%s»."
+msgid "Failed trying to get video frames from device '%s'."
+msgstr "Не вдалося отримати відеокадри з пристрою «%s»."
+
+#, c-format
+msgid "Failed after %d tries. device %s. system error: %s"
+msgstr "Помилка після %d спроб. Пристрій %s. Системна помилка: %s"
+
+#, fuzzy, c-format
+msgid "Failed to get settings of tuner %d on device '%s'."
+msgstr "Помилка при встановленні приймача %d пристрою «%s»."
+
+#, fuzzy, c-format
+msgid "Error getting capabilities for device '%s'."
+msgstr "Помилка під час спроби читання %d байтів з пристрою «%s»."
+
+#, fuzzy, c-format
+msgid "Device '%s' is not a tuner."
+msgstr "Пристрій «%s» не є пристроєм виведення даних."
+
+#, fuzzy, c-format
+msgid "Failed to get radio input on device '%s'. "
+msgstr "Не вдалося встановити ввід %d пристрою «%s»."
+
+#, c-format
+msgid "Failed to set input %d on device %s."
+msgstr "Не вдалося встановити ввід %d пристрою «%s»."
+
+#, fuzzy, c-format
+msgid "Failed to change mute state for device '%s'."
+msgstr "Помилка при отриманні сили сигналу пристрою «%s»."
#, c-format
msgid ""
"радіо."
#, c-format
-msgid "Failed to set input %d on device %s."
-msgstr "Не вдалося встановити ввід %d пристрою «%s»."
-
-#, c-format
msgid ""
"Failed to get current output on device '%s'. May be it is a radio device"
msgstr ""
msgid "Failed to set output %d on device %s."
msgstr "Не вдалося встановити вихід %d пристрою «%s»."
-#, c-format
-msgid "Could not enqueue buffers in device '%s'."
-msgstr "Не вдалося опитати буфери від пристрою «%s»."
-
-#, c-format
-msgid "Failed trying to get video frames from device '%s'."
-msgstr "Не вдалося отримати відеокадри з пристрою «%s»."
-
-#, c-format
-msgid "Failed after %d tries. device %s. system error: %s"
-msgstr "Помилка після %d спроб. Пристрій %s. Системна помилка: %s"
-
-#, c-format
-msgid "Could not get parameters on device '%s'"
-msgstr "Не вдалося отримати параметри пристрою «%s»."
-
-msgid "Video input device did not accept new frame rate setting."
-msgstr ""
-"Пристроєм відеовходу не підтримується нове значення параметра частоти кадрів."
-
-#, c-format
-msgid "Could not map buffers from device '%s'"
-msgstr "Не вдалося пов’язати буфери з пристрою «%s»."
-
-#, c-format
-msgid "The driver of device '%s' does not support any known capture method."
-msgstr "Драйвер пристрою «%s» не підтримує жоден відомий метод захоплення."
-
msgid "Changing resolution at runtime is not yet supported."
msgstr "Зміна роздільної здатності при відтворенні ще не підтримується."
msgid "Cannot operate without a clock"
msgstr "Робота без годинника неможлива"
+#~ msgid "Error reading %d bytes on device '%s'."
+#~ msgstr "Помилка під час спроби читання %d байтів на пристрої «%s»."
+
+#~ msgid "Could not enqueue buffers in device '%s'."
+#~ msgstr "Не вдалося опитати буфери від пристрою «%s»."
+
+#~ msgid "Could not establish connection to sound server"
+#~ msgstr "Не вдалося встановити з'єднання із звуковим сервером."
+
+#~ msgid "Failed to query sound server capabilities"
+#~ msgstr "Помилка при запиті можливостей звукового сервера"
+
#~ msgid "Could not read from CD."
#~ msgstr "Не вдається прочитати з компакт-диску."
#~ msgid "Failed getting controls attributes on device '%s.'"
#~ msgstr "Не вдається отримати атрибути органів керування пристрою \"%s\"."
-#~ msgid "Failed to enumerate possible video formats device '%s' can work with"
-#~ msgstr ""
-#~ "Не вдається отримати список усіх можливих відеоформатів, які підтримує "
-#~ "пристрій \"%s\""
-
#~ msgid ""
#~ "The buffer type is not supported, or the index is out of bounds, or no "
#~ "buffers have been allocated yet, or the userptr or length are invalid. "
#~ msgid "Could not exchange data with device '%s'."
#~ msgstr "Помилка при обміні даними з пристроєм \"%s\"."
-#~ msgid "Device '%s' cannot capture at %dx%d"
-#~ msgstr "Пристрій \"%s\" не здатний захоплювати відео у форматі %dx%d"
-
-#~ msgid "Device '%s' cannot capture in the specified format"
-#~ msgstr "Пристрій \"%s\" не здатний захоплювати відео у вказаному форматі"
-
#~ msgid "Could not set parameters on device '%s'"
#~ msgstr "Не вдається встановити параметри пристрою \"%s\"."
msgstr ""
"Project-Id-Version: gst-plugins-good 0.10.23.2\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-05-10 09:38+0100\n"
+"POT-Creation-Date: 2011-11-26 13:52+0000\n"
"PO-Revision-Date: 2010-10-03 18:48+1030\n"
"Last-Translator: Clytie Siddall <clytie@riverland.net.au>\n"
"Language-Team: Vietnamese <vi-VN@googlegroups.com>\n"
"Plural-Forms: nplurals=1; plural=0;\n"
"X-Generator: LocFactoryEditor 1.8\n"
-msgid "Could not establish connection to sound server"
-msgstr "Không thể thiết lập sự kết nối tới máy phục vụ âm thanh"
-
-msgid "Failed to query sound server capabilities"
-msgstr "Lỗi truy vấn khả năng của máy phục vụ âm thanh"
-
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
msgstr "« %s » bởi « %s »"
-msgid "Internal data stream error."
-msgstr "Lỗi luồng dữ liệu nội bộ."
-
msgid "Failed to decode JPEG image"
msgstr "Lỗi giải mã ảnh JPEG"
+msgid "Internal data stream error."
+msgstr "Lỗi luồng dữ liệu nội bộ."
+
msgid "Could not connect to server"
msgstr "Không thể kết nối tới máy phục vụ"
msgstr "Gặp lỗi khi đọc %d byte từ thiết bị « %s »."
#, c-format
+msgid "Failed to enumerate possible video formats device '%s' can work with"
+msgstr ""
+
+#, c-format
+msgid "Could not map buffers from device '%s'"
+msgstr "Không thể ánh xạ các bộ đệm từ thiết bị « %s »."
+
+#, fuzzy, c-format
+msgid "The driver of device '%s' does not support the IO method %d"
+msgstr ""
+"Trình điều khiển của thiết bị « %s » không hỗ trợ phương pháp bắt đã biết "
+"nào."
+
+#, fuzzy, c-format
+msgid "The driver of device '%s' does not support any known IO method."
+msgstr ""
+"Trình điều khiển của thiết bị « %s » không hỗ trợ phương pháp bắt đã biết "
+"nào."
+
+#, fuzzy, c-format
+msgid "Device '%s' does not support video capture"
+msgstr "Thiết bị « %s » không phải là thiết bị xuất ra."
+
+#, fuzzy, c-format
+msgid "Device '%s' is busy"
+msgstr "Thiết bị « %s » không phải là thiết bị xuất ra."
+
+#, fuzzy, c-format
+msgid "Device '%s' cannot capture at %dx%d"
+msgstr "Thiết bị « %s » không phải là thiết bị bắt gì."
+
+#, fuzzy, c-format
+msgid "Device '%s' cannot capture in the specified format"
+msgstr "Thiết bị « %s » không phải là thiết bị bắt gì."
+
+#, c-format
+msgid "Could not get parameters on device '%s'"
+msgstr "Không thể lấy các tham số về thiết bị « %s »."
+
+#, fuzzy
+msgid "Video device did not accept new frame rate setting."
+msgstr "Thiết bị nhập ảnh động vào không chấp nhận thiết lập tốc độ khung mới."
+
+msgid "Video device could not create buffer pool."
+msgstr ""
+
+#, c-format
msgid "Got unexpected frame size of %u instead of %u."
msgstr "Nhận được kích cỡ khung bất thường %u, thay cho %u."
#, c-format
-msgid "Error reading %d bytes on device '%s'."
-msgstr "Gặp lỗi khi đọc %d byte trên thiết bị « %s »."
+msgid "Failed trying to get video frames from device '%s'."
+msgstr "Lỗi khi thử lấy các khung ảnh động từ thiết bị « %s »."
+
+#, c-format
+msgid "Failed after %d tries. device %s. system error: %s"
+msgstr "Lỗi sau %d lần thử. Thiết bị %s. Lỗi hệ thống: %s"
+
+#, fuzzy, c-format
+msgid "Failed to get settings of tuner %d on device '%s'."
+msgstr "Lỗi lấy thiết lập của thiết bị điều hưởng %d trên thiết bị « %s »."
+
+#, fuzzy, c-format
+msgid "Error getting capabilities for device '%s'."
+msgstr "Gặp lỗi khi đọc %d byte từ thiết bị « %s »."
+
+#, fuzzy, c-format
+msgid "Device '%s' is not a tuner."
+msgstr "Thiết bị « %s » không phải là thiết bị xuất ra."
+
+#, fuzzy, c-format
+msgid "Failed to get radio input on device '%s'. "
+msgstr "Lỗi đặt dữ liệu nhập %d vào thiết bị « %s »."
+
+#, c-format
+msgid "Failed to set input %d on device %s."
+msgstr "Lỗi đặt dữ liệu nhập %d vào thiết bị « %s »."
+
+#, fuzzy, c-format
+msgid "Failed to change mute state for device '%s'."
+msgstr "Lỗi lấy biên độ tín hiệu cho thiết bị « %s »."
#, c-format
msgid ""
msgstr ""
"Lỗi lấy kết nhập hiện thời vào thiết bị « %s ». Có thể là thiết bị thu thanh."
-#, c-format
-msgid "Failed to set input %d on device %s."
-msgstr "Lỗi đặt dữ liệu nhập %d vào thiết bị « %s »."
-
#, fuzzy, c-format
msgid ""
"Failed to get current output on device '%s'. May be it is a radio device"
msgid "Failed to set output %d on device %s."
msgstr "Lỗi đặt dữ liệu nhập %d vào thiết bị « %s »."
-#, c-format
-msgid "Could not enqueue buffers in device '%s'."
-msgstr "Không thể phụ thêm các bộ đệm vào hàng đợi trên thiết bị « %s »."
-
-#, c-format
-msgid "Failed trying to get video frames from device '%s'."
-msgstr "Lỗi khi thử lấy các khung ảnh động từ thiết bị « %s »."
-
-#, c-format
-msgid "Failed after %d tries. device %s. system error: %s"
-msgstr "Lỗi sau %d lần thử. Thiết bị %s. Lỗi hệ thống: %s"
-
-#, c-format
-msgid "Could not get parameters on device '%s'"
-msgstr "Không thể lấy các tham số về thiết bị « %s »."
-
-msgid "Video input device did not accept new frame rate setting."
-msgstr "Thiết bị nhập ảnh động vào không chấp nhận thiết lập tốc độ khung mới."
-
-#, c-format
-msgid "Could not map buffers from device '%s'"
-msgstr "Không thể ánh xạ các bộ đệm từ thiết bị « %s »."
-
-#, c-format
-msgid "The driver of device '%s' does not support any known capture method."
-msgstr ""
-"Trình điều khiển của thiết bị « %s » không hỗ trợ phương pháp bắt đã biết "
-"nào."
-
msgid "Changing resolution at runtime is not yet supported."
msgstr "Chưa hỗ trợ khả năng thay đổi độ phân giải trong khi chạy."
msgid "Cannot operate without a clock"
msgstr "Không thể thao tác khi không có đồng hồ."
+
+#~ msgid "Error reading %d bytes on device '%s'."
+#~ msgstr "Gặp lỗi khi đọc %d byte trên thiết bị « %s »."
+
+#~ msgid "Could not enqueue buffers in device '%s'."
+#~ msgstr "Không thể phụ thêm các bộ đệm vào hàng đợi trên thiết bị « %s »."
+
+#~ msgid "Could not establish connection to sound server"
+#~ msgstr "Không thể thiết lập sự kết nối tới máy phục vụ âm thanh"
+
+#~ msgid "Failed to query sound server capabilities"
+#~ msgstr "Lỗi truy vấn khả năng của máy phục vụ âm thanh"
msgstr ""
"Project-Id-Version: gst-plugins-good 0.10.16.2\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-05-10 09:38+0100\n"
+"POT-Creation-Date: 2011-11-26 13:52+0000\n"
"PO-Revision-Date: 2009-11-13 22:20+0800\n"
"Last-Translator: Ji ZhengYu <zhengyuji@gmail.com>\n"
"Language-Team: Chinese (simplified) <i18n-zh@googlegroups.com>\n"
"Content-Type: text/plain; charset=utf-8\n"
"Content-Transfer-Encoding: 8bit\n"
-msgid "Could not establish connection to sound server"
-msgstr "无法建立与音频服务器的连接"
-
-msgid "Failed to query sound server capabilities"
-msgstr "查寻音频服务器的服务失败"
-
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
msgstr "‘%s’ 由 ‘%s’"
-msgid "Internal data stream error."
-msgstr "内部数据流错误。"
-
msgid "Failed to decode JPEG image"
msgstr "解码 JPEG 图像出错"
+msgid "Internal data stream error."
+msgstr "内部数据流错误。"
+
msgid "Could not connect to server"
msgstr "无法连接至服务器"
msgstr "从设备‘%2$s’中读取 %1$d 个字节时出错。"
#, c-format
+msgid "Failed to enumerate possible video formats device '%s' can work with"
+msgstr "枚举设备‘%s’可能支持的视频格式时出错"
+
+#, c-format
+msgid "Could not map buffers from device '%s'"
+msgstr "无法从设备‘%s’中映射出缓冲区"
+
+#, fuzzy, c-format
+msgid "The driver of device '%s' does not support the IO method %d"
+msgstr "设备‘%s’的驱动不支持任何已知的捕获方式。"
+
+#, fuzzy, c-format
+msgid "The driver of device '%s' does not support any known IO method."
+msgstr "设备‘%s’的驱动不支持任何已知的捕获方式。"
+
+#, fuzzy, c-format
+msgid "Device '%s' does not support video capture"
+msgstr "‘%s’不是输出设备。"
+
+#, fuzzy, c-format
+msgid "Device '%s' is busy"
+msgstr "‘%s’不是输出设备。"
+
+#, c-format
+msgid "Device '%s' cannot capture at %dx%d"
+msgstr "设备‘%s’不能在 %dx%d 处捕获"
+
+#, c-format
+msgid "Device '%s' cannot capture in the specified format"
+msgstr "设备‘%s’无法以指定格式捕获"
+
+#, c-format
+msgid "Could not get parameters on device '%s'"
+msgstr "无法获取设备‘%s’的参数"
+
+#, fuzzy
+msgid "Video device did not accept new frame rate setting."
+msgstr "视频输入设备不接受新的帧率设置。"
+
+msgid "Video device could not create buffer pool."
+msgstr ""
+
+#, c-format
msgid "Got unexpected frame size of %u instead of %u."
msgstr "取得了 %u 的不需要的帧大小,而不是 %u。"
#, c-format
-msgid "Error reading %d bytes on device '%s'."
-msgstr "读取设备‘%2$s’中的 %1$d 字节时出错。"
+msgid "Failed trying to get video frames from device '%s'."
+msgstr "从设备‘%s’上获取视频的尝试失败了。"
+
+#, c-format
+msgid "Failed after %d tries. device %s. system error: %s"
+msgstr "在 %d 次尝试后失败。设备 %s。系统错误: %s"
+
+#, fuzzy, c-format
+msgid "Failed to get settings of tuner %d on device '%s'."
+msgstr "获取设备 %2$s 上的微调钮 %1$d 的设置时出错"
+
+#, fuzzy, c-format
+msgid "Error getting capabilities for device '%s'."
+msgstr "从设备‘%2$s’中读取 %1$d 个字节时出错。"
+
+#, fuzzy, c-format
+msgid "Device '%s' is not a tuner."
+msgstr "‘%s’不是输出设备。"
+
+#, fuzzy, c-format
+msgid "Failed to get radio input on device '%s'. "
+msgstr "设置设备 %2$s 上的输入 %1$d 时出错。"
+
+#, c-format
+msgid "Failed to set input %d on device %s."
+msgstr "设置设备 %2$s 上的输入 %1$d 时出错。"
+
+#, fuzzy, c-format
+msgid "Failed to change mute state for device '%s'."
+msgstr "获取设备‘%s’的信号长度时出错。"
#, c-format
msgid ""
msgid "Failed to get current input on device '%s'. May be it is a radio device"
msgstr "获取设备‘%s’上的当前输入出错。也许它是一个广播设备"
-#, c-format
-msgid "Failed to set input %d on device %s."
-msgstr "设置设备 %2$s 上的输入 %1$d 时出错。"
-
#, fuzzy, c-format
msgid ""
"Failed to get current output on device '%s'. May be it is a radio device"
msgid "Failed to set output %d on device %s."
msgstr "设置设备 %2$s 上的输入 %1$d 时出错。"
-#, c-format
-msgid "Could not enqueue buffers in device '%s'."
-msgstr "无法对设备‘%s’中的缓冲区进行排序。"
-
-#, c-format
-msgid "Failed trying to get video frames from device '%s'."
-msgstr "从设备‘%s’上获取视频的尝试失败了。"
-
-#, c-format
-msgid "Failed after %d tries. device %s. system error: %s"
-msgstr "在 %d 次尝试后失败。设备 %s。系统错误: %s"
-
-#, c-format
-msgid "Could not get parameters on device '%s'"
-msgstr "无法获取设备‘%s’的参数"
-
-msgid "Video input device did not accept new frame rate setting."
-msgstr "视频输入设备不接受新的帧率设置。"
-
-#, c-format
-msgid "Could not map buffers from device '%s'"
-msgstr "无法从设备‘%s’中映射出缓冲区"
-
-#, c-format
-msgid "The driver of device '%s' does not support any known capture method."
-msgstr "设备‘%s’的驱动不支持任何已知的捕获方式。"
-
msgid "Changing resolution at runtime is not yet supported."
msgstr "尚不支持在运行时更改分辨率。"
msgid "Cannot operate without a clock"
msgstr "没有时钟的话无法操作"
-#~ msgid "Failed to enumerate possible video formats device '%s' can work with"
-#~ msgstr "枚举设备‘%s’可能支持的视频格式时出错"
+#~ msgid "Error reading %d bytes on device '%s'."
+#~ msgstr "读取设备‘%2$s’中的 %1$d 字节时出错。"
+
+#~ msgid "Could not enqueue buffers in device '%s'."
+#~ msgstr "无法对设备‘%s’中的缓冲区进行排序。"
+
+#~ msgid "Could not establish connection to sound server"
+#~ msgstr "无法建立与音频服务器的连接"
+
+#~ msgid "Failed to query sound server capabilities"
+#~ msgstr "查寻音频服务器的服务失败"
#~ msgid ""
#~ "The buffer type is not supported, or the index is out of bounds, or no "
#~ msgid "No free buffers found in the pool at index %d."
#~ msgstr "在内存池中 %d 处没有可分配的缓冲区。"
-#~ msgid "Device '%s' cannot capture at %dx%d"
-#~ msgstr "设备‘%s’不能在 %dx%d 处捕获"
-
-#~ msgid "Device '%s' cannot capture in the specified format"
-#~ msgstr "设备‘%s’无法以指定格式捕获"
-
#~ msgid "Could not get buffers from device '%s'."
#~ msgstr "无法从设备‘%s’中获取缓冲区。"
msgstr ""
"Project-Id-Version: gst-plugins-good-0.10.2 0.10.2\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-05-10 09:38+0100\n"
+"POT-Creation-Date: 2011-11-26 13:52+0000\n"
"PO-Revision-Date: 2006-08-29 01:08+0800\n"
"Last-Translator: Abel Cheung <abelcheung@gmail.com>\n"
"Language-Team: Chinese (Hong Kong) <community@linuxhall.org>\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
-msgid "Could not establish connection to sound server"
-msgstr ""
-
-msgid "Failed to query sound server capabilities"
-msgstr ""
-
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
msgstr ""
-msgid "Internal data stream error."
-msgstr "內部資料串流發生錯誤。"
-
msgid "Failed to decode JPEG image"
msgstr ""
+msgid "Internal data stream error."
+msgstr "內部資料串流發生錯誤。"
+
msgid "Could not connect to server"
msgstr ""
msgstr ""
#, c-format
-msgid "Got unexpected frame size of %u instead of %u."
+msgid "Failed to enumerate possible video formats device '%s' can work with"
msgstr ""
#, c-format
-msgid "Error reading %d bytes on device '%s'."
+msgid "Could not map buffers from device '%s'"
msgstr ""
#, c-format
-msgid ""
-"Error getting capabilities for device '%s': It isn't a v4l2 driver. Check if "
-"it is a v4l1 driver."
+msgid "The driver of device '%s' does not support the IO method %d"
msgstr ""
#, c-format
-msgid "Failed to query attributes of input %d in device %s"
+msgid "The driver of device '%s' does not support any known IO method."
msgstr ""
#, c-format
-msgid "Failed to get setting of tuner %d on device '%s'."
+msgid "Device '%s' does not support video capture"
msgstr ""
#, c-format
-msgid "Failed to query norm on device '%s'."
+msgid "Device '%s' is busy"
msgstr ""
#, c-format
-msgid "Failed getting controls attributes on device '%s'."
+msgid "Device '%s' cannot capture at %dx%d"
msgstr ""
#, c-format
-msgid "Cannot identify device '%s'."
+msgid "Device '%s' cannot capture in the specified format"
msgstr ""
#, c-format
-msgid "This isn't a device '%s'."
+msgid "Could not get parameters on device '%s'"
+msgstr ""
+
+msgid "Video device did not accept new frame rate setting."
+msgstr ""
+
+msgid "Video device could not create buffer pool."
msgstr ""
#, c-format
-msgid "Could not open device '%s' for reading and writing."
+msgid "Got unexpected frame size of %u instead of %u."
msgstr ""
#, c-format
-msgid "Device '%s' is not a capture device."
+msgid "Failed trying to get video frames from device '%s'."
msgstr ""
#, c-format
-msgid "Device '%s' is not a output device."
+msgid "Failed after %d tries. device %s. system error: %s"
msgstr ""
#, c-format
-msgid "Failed to set norm for device '%s'."
+msgid "Failed to get settings of tuner %d on device '%s'."
msgstr ""
#, c-format
-msgid "Failed to get current tuner frequency for device '%s'."
+msgid "Error getting capabilities for device '%s'."
msgstr ""
#, c-format
-msgid "Failed to set current tuner frequency for device '%s' to %lu Hz."
+msgid "Device '%s' is not a tuner."
msgstr ""
#, c-format
-msgid "Failed to get signal strength for device '%s'."
+msgid "Failed to get radio input on device '%s'. "
msgstr ""
#, c-format
-msgid "Failed to get value for control %d on device '%s'."
+msgid "Failed to set input %d on device %s."
msgstr ""
#, c-format
-msgid "Failed to set value %d for control %d on device '%s'."
+msgid "Failed to change mute state for device '%s'."
msgstr ""
#, c-format
-msgid "Failed to get current input on device '%s'. May be it is a radio device"
+msgid ""
+"Error getting capabilities for device '%s': It isn't a v4l2 driver. Check if "
+"it is a v4l1 driver."
msgstr ""
#, c-format
-msgid "Failed to set input %d on device %s."
+msgid "Failed to query attributes of input %d in device %s"
msgstr ""
#, c-format
-msgid ""
-"Failed to get current output on device '%s'. May be it is a radio device"
+msgid "Failed to get setting of tuner %d on device '%s'."
msgstr ""
#, c-format
-msgid "Failed to set output %d on device %s."
+msgid "Failed to query norm on device '%s'."
msgstr ""
#, c-format
-msgid "Could not enqueue buffers in device '%s'."
+msgid "Failed getting controls attributes on device '%s'."
msgstr ""
#, c-format
-msgid "Failed trying to get video frames from device '%s'."
+msgid "Cannot identify device '%s'."
msgstr ""
#, c-format
-msgid "Failed after %d tries. device %s. system error: %s"
+msgid "This isn't a device '%s'."
msgstr ""
#, c-format
-msgid "Could not get parameters on device '%s'"
+msgid "Could not open device '%s' for reading and writing."
msgstr ""
-msgid "Video input device did not accept new frame rate setting."
+#, c-format
+msgid "Device '%s' is not a capture device."
msgstr ""
#, c-format
-msgid "Could not map buffers from device '%s'"
+msgid "Device '%s' is not a output device."
+msgstr ""
+
+#, c-format
+msgid "Failed to set norm for device '%s'."
+msgstr ""
+
+#, c-format
+msgid "Failed to get current tuner frequency for device '%s'."
msgstr ""
#, c-format
-msgid "The driver of device '%s' does not support any known capture method."
+msgid "Failed to set current tuner frequency for device '%s' to %lu Hz."
+msgstr ""
+
+#, c-format
+msgid "Failed to get signal strength for device '%s'."
+msgstr ""
+
+#, c-format
+msgid "Failed to get value for control %d on device '%s'."
+msgstr ""
+
+#, c-format
+msgid "Failed to set value %d for control %d on device '%s'."
+msgstr ""
+
+#, c-format
+msgid "Failed to get current input on device '%s'. May be it is a radio device"
+msgstr ""
+
+#, c-format
+msgid ""
+"Failed to get current output on device '%s'. May be it is a radio device"
+msgstr ""
+
+#, c-format
+msgid "Failed to set output %d on device %s."
msgstr ""
msgid "Changing resolution at runtime is not yet supported."
msgstr ""
"Project-Id-Version: gst-plugins-good-0.10.2 0.10.2\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-05-10 09:38+0100\n"
+"POT-Creation-Date: 2011-11-26 13:52+0000\n"
"PO-Revision-Date: 2006-08-29 01:08+0800\n"
"Last-Translator: Abel Cheung <abelcheung@gmail.com>\n"
"Language-Team: Chinese (traditional) <zh-l10n@linux.org.tw>\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
-msgid "Could not establish connection to sound server"
-msgstr ""
-
-msgid "Failed to query sound server capabilities"
-msgstr ""
-
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
msgstr ""
-msgid "Internal data stream error."
-msgstr "內部資料串流發生錯誤。"
-
msgid "Failed to decode JPEG image"
msgstr ""
+msgid "Internal data stream error."
+msgstr "內部資料串流發生錯誤。"
+
msgid "Could not connect to server"
msgstr ""
msgstr ""
#, c-format
-msgid "Got unexpected frame size of %u instead of %u."
+msgid "Failed to enumerate possible video formats device '%s' can work with"
msgstr ""
#, c-format
-msgid "Error reading %d bytes on device '%s'."
+msgid "Could not map buffers from device '%s'"
msgstr ""
#, c-format
-msgid ""
-"Error getting capabilities for device '%s': It isn't a v4l2 driver. Check if "
-"it is a v4l1 driver."
+msgid "The driver of device '%s' does not support the IO method %d"
msgstr ""
#, c-format
-msgid "Failed to query attributes of input %d in device %s"
+msgid "The driver of device '%s' does not support any known IO method."
msgstr ""
#, c-format
-msgid "Failed to get setting of tuner %d on device '%s'."
+msgid "Device '%s' does not support video capture"
msgstr ""
#, c-format
-msgid "Failed to query norm on device '%s'."
+msgid "Device '%s' is busy"
msgstr ""
#, c-format
-msgid "Failed getting controls attributes on device '%s'."
+msgid "Device '%s' cannot capture at %dx%d"
msgstr ""
#, c-format
-msgid "Cannot identify device '%s'."
+msgid "Device '%s' cannot capture in the specified format"
msgstr ""
#, c-format
-msgid "This isn't a device '%s'."
+msgid "Could not get parameters on device '%s'"
+msgstr ""
+
+msgid "Video device did not accept new frame rate setting."
+msgstr ""
+
+msgid "Video device could not create buffer pool."
msgstr ""
#, c-format
-msgid "Could not open device '%s' for reading and writing."
+msgid "Got unexpected frame size of %u instead of %u."
msgstr ""
#, c-format
-msgid "Device '%s' is not a capture device."
+msgid "Failed trying to get video frames from device '%s'."
msgstr ""
#, c-format
-msgid "Device '%s' is not a output device."
+msgid "Failed after %d tries. device %s. system error: %s"
msgstr ""
#, c-format
-msgid "Failed to set norm for device '%s'."
+msgid "Failed to get settings of tuner %d on device '%s'."
msgstr ""
#, c-format
-msgid "Failed to get current tuner frequency for device '%s'."
+msgid "Error getting capabilities for device '%s'."
msgstr ""
#, c-format
-msgid "Failed to set current tuner frequency for device '%s' to %lu Hz."
+msgid "Device '%s' is not a tuner."
msgstr ""
#, c-format
-msgid "Failed to get signal strength for device '%s'."
+msgid "Failed to get radio input on device '%s'. "
msgstr ""
#, c-format
-msgid "Failed to get value for control %d on device '%s'."
+msgid "Failed to set input %d on device %s."
msgstr ""
#, c-format
-msgid "Failed to set value %d for control %d on device '%s'."
+msgid "Failed to change mute state for device '%s'."
msgstr ""
#, c-format
-msgid "Failed to get current input on device '%s'. May be it is a radio device"
+msgid ""
+"Error getting capabilities for device '%s': It isn't a v4l2 driver. Check if "
+"it is a v4l1 driver."
msgstr ""
#, c-format
-msgid "Failed to set input %d on device %s."
+msgid "Failed to query attributes of input %d in device %s"
msgstr ""
#, c-format
-msgid ""
-"Failed to get current output on device '%s'. May be it is a radio device"
+msgid "Failed to get setting of tuner %d on device '%s'."
msgstr ""
#, c-format
-msgid "Failed to set output %d on device %s."
+msgid "Failed to query norm on device '%s'."
msgstr ""
#, c-format
-msgid "Could not enqueue buffers in device '%s'."
+msgid "Failed getting controls attributes on device '%s'."
msgstr ""
#, c-format
-msgid "Failed trying to get video frames from device '%s'."
+msgid "Cannot identify device '%s'."
msgstr ""
#, c-format
-msgid "Failed after %d tries. device %s. system error: %s"
+msgid "This isn't a device '%s'."
msgstr ""
#, c-format
-msgid "Could not get parameters on device '%s'"
+msgid "Could not open device '%s' for reading and writing."
msgstr ""
-msgid "Video input device did not accept new frame rate setting."
+#, c-format
+msgid "Device '%s' is not a capture device."
msgstr ""
#, c-format
-msgid "Could not map buffers from device '%s'"
+msgid "Device '%s' is not a output device."
+msgstr ""
+
+#, c-format
+msgid "Failed to set norm for device '%s'."
+msgstr ""
+
+#, c-format
+msgid "Failed to get current tuner frequency for device '%s'."
msgstr ""
#, c-format
-msgid "The driver of device '%s' does not support any known capture method."
+msgid "Failed to set current tuner frequency for device '%s' to %lu Hz."
+msgstr ""
+
+#, c-format
+msgid "Failed to get signal strength for device '%s'."
+msgstr ""
+
+#, c-format
+msgid "Failed to get value for control %d on device '%s'."
+msgstr ""
+
+#, c-format
+msgid "Failed to set value %d for control %d on device '%s'."
+msgstr ""
+
+#, c-format
+msgid "Failed to get current input on device '%s'. May be it is a radio device"
+msgstr ""
+
+#, c-format
+msgid ""
+"Failed to get current output on device '%s'. May be it is a radio device"
+msgstr ""
+
+#, c-format
+msgid "Failed to set output %d on device %s."
msgstr ""
msgid "Changing resolution at runtime is not yet supported."
#include "config.h"
#endif
+#include <gst/base/gstbasesink.h>
+#include <gst/audio/streamvolume.h>
#include "gstdirectsoundsink.h"
#include <math.h>
#endif
#endif
+#define DEFAULT_MUTE FALSE
+
GST_DEBUG_CATEGORY_STATIC (directsoundsink_debug);
#define GST_CAT_DEFAULT directsoundsink_debug
-static void gst_directsound_sink_finalise (GObject * object);
+static void gst_directsound_sink_finalize (GObject * object);
static void gst_directsound_sink_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void gst_directsound_sink_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
-static GstCaps *gst_directsound_sink_getcaps (GstBaseSink * bsink);
+static GstCaps *gst_directsound_sink_getcaps (GstBaseSink * bsink,
+ GstCaps * filter);
static gboolean gst_directsound_sink_prepare (GstAudioSink * asink,
- GstRingBufferSpec * spec);
+ GstAudioRingBufferSpec * spec);
static gboolean gst_directsound_sink_unprepare (GstAudioSink * asink);
-
static gboolean gst_directsound_sink_open (GstAudioSink * asink);
static gboolean gst_directsound_sink_close (GstAudioSink * asink);
-static guint gst_directsound_sink_write (GstAudioSink * asink, gpointer data,
- guint length);
+static gint gst_directsound_sink_write (GstAudioSink * asink,
+ gpointer data, guint length);
static guint gst_directsound_sink_delay (GstAudioSink * asink);
static void gst_directsound_sink_reset (GstAudioSink * asink);
static GstCaps *gst_directsound_probe_supported_formats (GstDirectSoundSink *
dsoundsink, const GstCaps * template_caps);
-/* interfaces */
-static void gst_directsound_sink_interfaces_init (GType type);
-static void
-gst_directsound_sink_implements_interface_init (GstImplementsInterfaceClass *
- iface);
-static void gst_directsound_sink_mixer_interface_init (GstMixerClass * iface);
+static void gst_directsound_sink_set_volume (GstDirectSoundSink * sink,
+ gdouble volume, gboolean store);
+static gdouble gst_directsound_sink_get_volume (GstDirectSoundSink * sink);
+static void gst_directsound_sink_set_mute (GstDirectSoundSink * sink,
+ gboolean mute);
+static gboolean gst_directsound_sink_get_mute (GstDirectSoundSink * sink);
static GstStaticPadTemplate directsoundsink_sink_factory =
GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("audio/x-raw-int, "
- "signed = (boolean) TRUE, "
- "width = (int) 16, "
- "depth = (int) 16, "
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "format = (string) S16LE, "
+ "layout = (string) interleaved, "
"rate = (int) [ 1, MAX ], " "channels = (int) [ 1, 2 ]; "
- "audio/x-raw-int, "
- "signed = (boolean) FALSE, "
- "width = (int) 8, "
- "depth = (int) 8, "
+ "audio/x-raw, "
+ "format = (string) S8, "
+ "layout = (string) interleaved, "
"rate = (int) [ 1, MAX ], " "channels = (int) [ 1, 2 ];"
"audio/x-iec958"));
enum
{
PROP_0,
- PROP_VOLUME
+ PROP_VOLUME,
+ PROP_MUTE
};
-GST_BOILERPLATE_FULL (GstDirectSoundSink, gst_directsound_sink, GstAudioSink,
- GST_TYPE_AUDIO_SINK, gst_directsound_sink_interfaces_init);
-
-/* interfaces stuff */
-static void
-gst_directsound_sink_interfaces_init (GType type)
-{
- static const GInterfaceInfo implements_interface_info = {
- (GInterfaceInitFunc) gst_directsound_sink_implements_interface_init,
- NULL,
- NULL,
- };
-
- static const GInterfaceInfo mixer_interface_info = {
- (GInterfaceInitFunc) gst_directsound_sink_mixer_interface_init,
- NULL,
- NULL,
- };
-
- g_type_add_interface_static (type,
- GST_TYPE_IMPLEMENTS_INTERFACE, &implements_interface_info);
- g_type_add_interface_static (type, GST_TYPE_MIXER, &mixer_interface_info);
-}
-
-static gboolean
-gst_directsound_sink_interface_supported (GstImplementsInterface * iface,
- GType iface_type)
-{
- g_return_val_if_fail (iface_type == GST_TYPE_MIXER, FALSE);
-
- /* for the sake of this example, we'll always support it. However, normally,
- * you would check whether the device you've opened supports mixers. */
- return TRUE;
-}
-
-static void
-gst_directsound_sink_implements_interface_init (GstImplementsInterfaceClass *
- iface)
-{
- iface->supported = gst_directsound_sink_interface_supported;
-}
-
-/*
- * This function returns the list of support tracks (inputs, outputs)
- * on this element instance. Elements usually build this list during
- * _init () or when going from NULL to READY.
- */
-
-static const GList *
-gst_directsound_sink_mixer_list_tracks (GstMixer * mixer)
-{
- GstDirectSoundSink *dsoundsink = GST_DIRECTSOUND_SINK (mixer);
-
- return dsoundsink->tracks;
-}
-
-static void
-gst_directsound_sink_set_volume (GstDirectSoundSink * dsoundsink)
-{
- if (dsoundsink->pDSBSecondary) {
- /* DirectSound controls volume using units of 100th of a decibel,
- * ranging from -10000 to 0. We use a linear scale of 0 - 100
- * here, so remap.
- */
- long dsVolume;
- if (dsoundsink->volume == 0)
- dsVolume = -10000;
- else
- dsVolume = 100 * (long) (20 * log10 ((double) dsoundsink->volume / 100.));
- dsVolume = CLAMP (dsVolume, -10000, 0);
-
- GST_DEBUG_OBJECT (dsoundsink,
- "Setting volume on secondary buffer to %d from %d", (int) dsVolume,
- (int) dsoundsink->volume);
- IDirectSoundBuffer_SetVolume (dsoundsink->pDSBSecondary, dsVolume);
- }
-}
-
-/*
- * Set volume. volumes is an array of size track->num_channels, and
- * each value in the array gives the wanted volume for one channel
- * on the track.
- */
-
-static void
-gst_directsound_sink_mixer_set_volume (GstMixer * mixer,
- GstMixerTrack * track, gint * volumes)
-{
- GstDirectSoundSink *dsoundsink = GST_DIRECTSOUND_SINK (mixer);
-
- if (volumes[0] != dsoundsink->volume) {
- dsoundsink->volume = volumes[0];
-
- gst_directsound_sink_set_volume (dsoundsink);
- }
-}
-
-static void
-gst_directsound_sink_mixer_get_volume (GstMixer * mixer,
- GstMixerTrack * track, gint * volumes)
-{
- GstDirectSoundSink *dsoundsink = GST_DIRECTSOUND_SINK (mixer);
-
- volumes[0] = dsoundsink->volume;
-}
+#define gst_directsound_sink_parent_class parent_class
+G_DEFINE_TYPE_WITH_CODE (GstDirectSoundSink, gst_directsound_sink,
+ GST_TYPE_AUDIO_SINK, G_IMPLEMENT_INTERFACE (GST_TYPE_STREAM_VOLUME, NULL)
+ );
static void
-gst_directsound_sink_mixer_interface_init (GstMixerClass * iface)
+gst_directsound_sink_finalize (GObject * object)
{
- /* the mixer interface requires a definition of the mixer type:
- * hardware or software? */
- GST_MIXER_TYPE (iface) = GST_MIXER_SOFTWARE;
-
- /* virtual function pointers */
- iface->list_tracks = gst_directsound_sink_mixer_list_tracks;
- iface->set_volume = gst_directsound_sink_mixer_set_volume;
- iface->get_volume = gst_directsound_sink_mixer_get_volume;
-}
-
-static void
-gst_directsound_sink_finalise (GObject * object)
-{
- GstDirectSoundSink *dsoundsink = GST_DIRECTSOUND_SINK (object);
-
- g_mutex_free (dsoundsink->dsound_lock);
-
- if (dsoundsink->tracks) {
- g_list_foreach (dsoundsink->tracks, (GFunc) g_object_unref, NULL);
- g_list_free (dsoundsink->tracks);
- dsoundsink->tracks = NULL;
- }
-
G_OBJECT_CLASS (parent_class)->finalize (object);
}
static void
-gst_directsound_sink_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_set_details_simple (element_class,
- "Direct Sound Audio Sink", "Sink/Audio",
- "Output to a sound card via Direct Sound",
- "Sebastien Moutte <sebastien@moutte.net>");
- gst_element_class_add_static_pad_template (element_class,
- &directsoundsink_sink_factory);
-}
-
-static void
gst_directsound_sink_class_init (GstDirectSoundSinkClass * klass)
{
- GObjectClass *gobject_class;
- GstElementClass *gstelement_class;
- GstBaseSinkClass *gstbasesink_class;
- GstBaseAudioSinkClass *gstbaseaudiosink_class;
- GstAudioSinkClass *gstaudiosink_class;
-
- gobject_class = (GObjectClass *) klass;
- gstelement_class = (GstElementClass *) klass;
- gstbasesink_class = (GstBaseSinkClass *) klass;
- gstbaseaudiosink_class = (GstBaseAudioSinkClass *) klass;
- gstaudiosink_class = (GstAudioSinkClass *) klass;
+ GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
+ GstBaseSinkClass *gstbasesink_class = GST_BASE_SINK_CLASS (klass);
+ GstAudioSinkClass *gstaudiosink_class = GST_AUDIO_SINK_CLASS (klass);
+ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
GST_DEBUG_CATEGORY_INIT (directsoundsink_debug, "directsoundsink", 0,
"DirectSound sink");
parent_class = g_type_class_peek_parent (klass);
- gobject_class->finalize = gst_directsound_sink_finalise;
+ gobject_class->finalize = gst_directsound_sink_finalize;
gobject_class->set_property = gst_directsound_sink_set_property;
gobject_class->get_property = gst_directsound_sink_get_property;
g_param_spec_double ("volume", "Volume",
"Volume of this stream", 0.0, 1.0, 1.0,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class,
+ PROP_MUTE,
+ g_param_spec_boolean ("mute", "Mute",
+ "Mute state of this stream", DEFAULT_MUTE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gst_element_class_set_details_simple (element_class,
+ "Direct Sound Audio Sink", "Sink/Audio",
+ "Output to a sound card via Direct Sound",
+ "Sebastien Moutte <sebastien@moutte.net>");
+
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&directsoundsink_sink_factory));
}
static void
-gst_directsound_sink_init (GstDirectSoundSink * dsoundsink,
- GstDirectSoundSinkClass * g_class)
+gst_directsound_sink_init (GstDirectSoundSink * dsoundsink)
{
- GstMixerTrack *track = NULL;
-
- dsoundsink->tracks = NULL;
- track = g_object_new (GST_TYPE_MIXER_TRACK, NULL);
- track->label = g_strdup ("DSoundTrack");
- track->num_channels = 2;
- track->min_volume = 0;
- track->max_volume = 100;
- track->flags = GST_MIXER_TRACK_OUTPUT;
- dsoundsink->tracks = g_list_append (dsoundsink->tracks, track);
-
+ dsoundsink->volume = 100;
+ dsoundsink->mute = FALSE;
dsoundsink->pDS = NULL;
dsoundsink->cached_caps = NULL;
dsoundsink->pDSBSecondary = NULL;
switch (prop_id) {
case PROP_VOLUME:
- sink->volume = (int) (g_value_get_double (value) * 100);
- gst_directsound_sink_set_volume (sink);
+ gst_directsound_sink_set_volume (sink, g_value_get_double (value), TRUE);
+ break;
+ case PROP_MUTE:
+ gst_directsound_sink_set_mute (sink, g_value_get_boolean (value));
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
switch (prop_id) {
case PROP_VOLUME:
- g_value_set_double (value, (double) sink->volume / 100.);
+ g_value_set_double (value, gst_directsound_sink_get_volume (sink));
+ break;
+ case PROP_MUTE:
+ g_value_set_boolean (value, gst_directsound_sink_get_mute (sink));
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
}
static GstCaps *
-gst_directsound_sink_getcaps (GstBaseSink * bsink)
+gst_directsound_sink_getcaps (GstBaseSink * bsink, GstCaps * filter)
{
GstElementClass *element_class;
GstPadTemplate *pad_template;
static gboolean
gst_directsound_sink_open (GstAudioSink * asink)
{
- GstDirectSoundSink *dsoundsink = GST_DIRECTSOUND_SINK (asink);
+ GstDirectSoundSink *dsoundsink;
HRESULT hRes;
+ dsoundsink = GST_DIRECTSOUND_SINK (asink);
+
/* create and initialize a DirecSound object */
if (FAILED (hRes = DirectSoundCreate (NULL, &dsoundsink->pDS, NULL))) {
GST_ELEMENT_ERROR (dsoundsink, RESOURCE, OPEN_READ,
}
static gboolean
-gst_directsound_sink_prepare (GstAudioSink * asink, GstRingBufferSpec * spec)
+gst_directsound_sink_prepare (GstAudioSink * asink,
+ GstAudioRingBufferSpec * spec)
{
- GstDirectSoundSink *dsoundsink = GST_DIRECTSOUND_SINK (asink);
+ GstDirectSoundSink *dsoundsink;
HRESULT hRes;
DSBUFFERDESC descSecondary;
WAVEFORMATEX wfx;
+ dsoundsink = GST_DIRECTSOUND_SINK (asink);
+
/*save number of bytes per sample and buffer format */
- dsoundsink->bytes_per_sample = spec->bytes_per_sample;
- dsoundsink->buffer_format = spec->format;
+ dsoundsink->bytes_per_sample = spec->info.bpf;
+ dsoundsink->type = spec->type;
/* fill the WAVEFORMATEX structure with spec params */
memset (&wfx, 0, sizeof (wfx));
- if (spec->format != GST_IEC958) {
+ if (spec->type != GST_AUDIO_RING_BUFFER_FORMAT_TYPE_IEC958) {
wfx.cbSize = sizeof (wfx);
wfx.wFormatTag = WAVE_FORMAT_PCM;
- wfx.nChannels = spec->channels;
- wfx.nSamplesPerSec = spec->rate;
- wfx.wBitsPerSample = (spec->bytes_per_sample * 8) / wfx.nChannels;
- wfx.nBlockAlign = spec->bytes_per_sample;
+ wfx.nChannels = spec->info.channels;
+ wfx.nSamplesPerSec = spec->info.rate;
+ wfx.wBitsPerSample = (spec->info.bpf * 8) / wfx.nChannels;
+ wfx.nBlockAlign = spec->info.bpf;
wfx.nAvgBytesPerSec = wfx.nSamplesPerSec * wfx.nBlockAlign;
- /* Create directsound buffer with size based on our configured
+ /* Create directsound buffer with size based on our configured
* buffer_size (which is 200 ms by default) */
dsoundsink->buffer_size =
gst_util_uint64_scale_int (wfx.nAvgBytesPerSec, spec->buffer_time,
GST_MSECOND);
/* Make sure we make those numbers multiple of our sample size in bytes */
- dsoundsink->buffer_size += dsoundsink->buffer_size % spec->bytes_per_sample;
+ dsoundsink->buffer_size += dsoundsink->buffer_size % spec->info.bpf;
spec->segsize =
gst_util_uint64_scale_int (wfx.nAvgBytesPerSec, spec->latency_time,
GST_MSECOND);
- spec->segsize += spec->segsize % spec->bytes_per_sample;
+ spec->segsize += spec->segsize % spec->info.bpf;
spec->segtotal = dsoundsink->buffer_size / spec->segsize;
} else {
#ifdef WAVE_FORMAT_DOLBY_AC3_SPDIF
wfx.cbSize = 0;
wfx.wFormatTag = WAVE_FORMAT_DOLBY_AC3_SPDIF;
wfx.nChannels = 2;
- wfx.nSamplesPerSec = spec->rate;
+ wfx.nSamplesPerSec = spec->info.rate;
wfx.wBitsPerSample = 16;
wfx.nBlockAlign = wfx.wBitsPerSample / 8 * wfx.nChannels;
wfx.nAvgBytesPerSec = wfx.nSamplesPerSec * wfx.nBlockAlign;
GST_INFO_OBJECT (dsoundsink,
"GstRingBufferSpec->channels: %d, GstRingBufferSpec->rate: %d, GstRingBufferSpec->bytes_per_sample: %d\n"
"WAVEFORMATEX.nSamplesPerSec: %ld, WAVEFORMATEX.wBitsPerSample: %d, WAVEFORMATEX.nBlockAlign: %d, WAVEFORMATEX.nAvgBytesPerSec: %ld\n"
- "Size of dsound circular buffer=>%d\n", spec->channels, spec->rate,
- spec->bytes_per_sample, wfx.nSamplesPerSec, wfx.wBitsPerSample,
+ "Size of dsound circular buffer=>%d\n", spec->info.channels,
+ spec->info.rate, spec->info.bpf, wfx.nSamplesPerSec, wfx.wBitsPerSample,
wfx.nBlockAlign, wfx.nAvgBytesPerSec, dsoundsink->buffer_size);
/* create a secondary directsound buffer */
memset (&descSecondary, 0, sizeof (DSBUFFERDESC));
descSecondary.dwSize = sizeof (DSBUFFERDESC);
descSecondary.dwFlags = DSBCAPS_GETCURRENTPOSITION2 | DSBCAPS_GLOBALFOCUS;
- if (spec->format != GST_IEC958)
+ if (spec->type != GST_AUDIO_RING_BUFFER_FORMAT_TYPE_IEC958)
descSecondary.dwFlags |= DSBCAPS_CTRLVOLUME;
descSecondary.dwBufferBytes = dsoundsink->buffer_size;
return FALSE;
}
- gst_directsound_sink_set_volume (dsoundsink);
+ gst_directsound_sink_set_volume (dsoundsink, dsoundsink->volume, FALSE);
return TRUE;
}
return TRUE;
}
-static guint
+static gint
gst_directsound_sink_write (GstAudioSink * asink, gpointer data, guint length)
{
GstDirectSoundSink *dsoundsink;
dsoundsink = GST_DIRECTSOUND_SINK (asink);
/* Fix endianness */
- if (dsoundsink->buffer_format == GST_IEC958)
+ if (dsoundsink->type == GST_AUDIO_RING_BUFFER_FORMAT_TYPE_IEC958)
_swab (data, data, length);
GST_DSOUND_LOCK (dsoundsink);
GST_DSOUND_UNLOCK (dsoundsink);
}
-/*
- * gst_directsound_probe_supported_formats:
- *
- * Takes the template caps and returns the subset which is actually
- * supported by this device.
- *
+/*
+ * gst_directsound_probe_supported_formats:
+ *
+ * Takes the template caps and returns the subset which is actually
+ * supported by this device.
+ *
*/
static GstCaps *
caps = gst_caps_copy (template_caps);
- /*
- * Check availability of digital output by trying to create an SPDIF buffer
+ /*
+ * Check availability of digital output by trying to create an SPDIF buffer
*/
#ifdef WAVE_FORMAT_DOLBY_AC3_SPDIF
wfx.nBlockAlign = 4;
wfx.nAvgBytesPerSec = wfx.nSamplesPerSec * wfx.nBlockAlign;
- // create a secondary directsound buffer
+ // create a secondary directsound buffer
memset (&descSecondary, 0, sizeof (DSBUFFERDESC));
descSecondary.dwSize = sizeof (DSBUFFERDESC);
descSecondary.dwFlags = DSBCAPS_GETCURRENTPOSITION2 | DSBCAPS_GLOBALFOCUS;
"(IDirectSound_CreateSoundBuffer returned: %s)\n",
DXGetErrorString9 (hRes));
caps =
- gst_caps_subtract (caps, gst_caps_new_simple ("audio/x-iec958", NULL));
+ gst_caps_subtract (caps, gst_caps_new_empty_simple ("audio/x-iec958"));
} else {
GST_INFO_OBJECT (dsoundsink, "AC3 passthrough supported");
hRes = IDirectSoundBuffer_Release (dsoundsink->pDSBSecondary);
return caps;
}
+
+static void
+gst_directsound_sink_set_volume (GstDirectSoundSink * dsoundsink,
+ gdouble dvolume, gboolean store)
+{
+ glong volume;
+
+ volume = dvolume * 100;
+ if (store)
+ dsoundsink->volume = volume;
+
+ if (dsoundsink->pDSBSecondary) {
+ /* DirectSound controls volume using units of 100th of a decibel,
+ * ranging from -10000 to 0. We use a linear scale of 0 - 100
+ * here, so remap.
+ */
+ long dsVolume;
+ if (dsoundsink->volume == 0)
+ dsVolume = -10000;
+ else
+ dsVolume = 100 * (long) (20 * log10 ((double) dsoundsink->volume / 100.));
+ dsVolume = CLAMP (dsVolume, -10000, 0);
+
+ GST_DEBUG_OBJECT (dsoundsink,
+ "Setting volume on secondary buffer to %d from %d", (int) dsVolume,
+ (int) dsoundsink->volume);
+ IDirectSoundBuffer_SetVolume (dsoundsink->pDSBSecondary, dsVolume);
+ }
+}
+
+gdouble
+gst_directsound_sink_get_volume (GstDirectSoundSink * dsoundsink)
+{
+ return (gdouble) dsoundsink->volume / 100;
+}
+
+static void
+gst_directsound_sink_set_mute (GstDirectSoundSink * dsoundsink, gboolean mute)
+{
+ if (mute)
+ gst_directsound_sink_set_volume (dsoundsink, 0, FALSE);
+ else
+ gst_directsound_sink_set_volume (dsoundsink, dsoundsink->volume, FALSE);
+}
+
+static gboolean
+gst_directsound_sink_get_mute (GstDirectSoundSink * dsoundsink)
+{
+ return FALSE;
+}
#include <gst/gst.h>
#include <gst/audio/gstaudiosink.h>
-#include <gst/interfaces/mixer.h>
#include <windows.h>
#include <dxerr9.h>
{
GstAudioSink sink;
+
/* directsound object interface pointer */
LPDIRECTSOUND pDS;
/* current volume setup by mixer interface */
glong volume;
-
- /* tracks list of our mixer interface implementation */
- GList *tracks;
+ gboolean mute;
GstCaps *cached_caps;
-
/* lock used to protect writes and resets */
GMutex *dsound_lock;
gboolean first_buffer_after_reset;
- GstBufferFormat buffer_format;
+ GstAudioRingBufferFormatType type;
};
struct _GstDirectSoundSinkClass
gst_oss_helper_get_format_structure (unsigned int format_bit)
{
GstStructure *structure;
- int endianness;
- gboolean sign;
- int width;
+ const gchar *format;
switch (format_bit) {
case AFMT_U8:
- endianness = 0;
- sign = FALSE;
- width = 8;
+ format = "U8";
break;
case AFMT_S16_LE:
- endianness = G_LITTLE_ENDIAN;
- sign = TRUE;
- width = 16;
+ format = "S16_LE";
break;
case AFMT_S16_BE:
- endianness = G_BIG_ENDIAN;
- sign = TRUE;
- width = 16;
+ format = "S16_BE";
break;
case AFMT_S8:
- endianness = 0;
- sign = TRUE;
- width = 8;
+ format = "S8";
break;
case AFMT_U16_LE:
- endianness = G_LITTLE_ENDIAN;
- sign = FALSE;
- width = 16;
+ format = "U16_LE";
break;
case AFMT_U16_BE:
- endianness = G_BIG_ENDIAN;
- sign = FALSE;
- width = 16;
+ format = "U16_BE";
break;
default:
g_assert_not_reached ();
return NULL;
}
- structure = gst_structure_new ("audio/x-raw-int",
- "width", G_TYPE_INT, width,
- "depth", G_TYPE_INT, width, "signed", G_TYPE_BOOLEAN, sign, NULL);
-
- if (endianness) {
- gst_structure_set (structure, "endianness", G_TYPE_INT, endianness, NULL);
- }
+ structure = gst_structure_new ("audio/x-raw",
+ "format", G_TYPE_STRING, format, NULL);
return structure;
}
} \
\
static void \
-interface_as_function ## _interface_init (GstMixerClass * klass) \
+interface_as_function ## _interface_init (GstMixerInterface * iface) \
{ \
- GST_MIXER_TYPE (klass) = GST_MIXER_HARDWARE; \
+ GST_MIXER_TYPE (iface) = GST_MIXER_HARDWARE; \
\
/* set up the interface hooks */ \
- klass->list_tracks = interface_as_function ## _list_tracks; \
- klass->set_volume = interface_as_function ## _set_volume; \
- klass->get_volume = interface_as_function ## _get_volume; \
- klass->set_mute = interface_as_function ## _set_mute; \
- klass->set_record = interface_as_function ## _set_record; \
+ iface->list_tracks = interface_as_function ## _list_tracks; \
+ iface->set_volume = interface_as_function ## _set_volume; \
+ iface->get_volume = interface_as_function ## _get_volume; \
+ iface->set_mute = interface_as_function ## _set_mute; \
+ iface->set_record = interface_as_function ## _set_record; \
}
PROP_DEVICE,
};
+#define FORMATS "{" GST_AUDIO_NE(S16)","GST_AUDIO_NE(U16)", S8, U8 }"
+
static GstStaticPadTemplate osssink_sink_factory =
GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("audio/x-raw-int, "
- "endianness = (int) { " G_STRINGIFY (G_BYTE_ORDER) " }, "
- "signed = (boolean) { TRUE, FALSE }, "
- "width = (int) 16, "
- "depth = (int) 16, "
- "rate = (int) [ 1, MAX ], " "channels = (int) [ 1, 2 ]; "
- "audio/x-raw-int, "
- "signed = (boolean) { TRUE, FALSE }, "
- "width = (int) 8, "
- "depth = (int) 8, "
- "rate = (int) [ 1, MAX ], " "channels = (int) [ 1, 2 ]")
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "format = (string) " FORMATS ", "
+ "layout = (string) interleaved, "
+ "rate = (int) [ 1, MAX ], "
+ "channels = (int) 1; "
+ "audio/x-raw, "
+ "format = (string) " FORMATS ", "
+ "layout = (string) interleaved, "
+ "rate = (int) [ 1, MAX ], "
+ "channels = (int) 2, " "channel-mask = (bitmask) 0x3")
);
static GstElementClass *parent_class = NULL;
"Erik Walthinsen <omega@cse.ogi.edu>, "
"Wim Taymans <wim.taymans@chello.be>");
- gst_element_class_add_static_pad_template (element_class,
- &osssink_sink_factory);
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&osssink_sink_factory));
}
static void
static guint gst_oss_src_delay (GstAudioSrc * asrc);
static void gst_oss_src_reset (GstAudioSrc * asrc);
-
+#define FORMATS "{" GST_AUDIO_NE(S16)","GST_AUDIO_NE(U16)", S8, U8 }"
static GstStaticPadTemplate osssrc_src_factory = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("audio/x-raw-int, "
- "endianness = (int) { " G_STRINGIFY (G_BYTE_ORDER) " }, "
- "signed = (boolean) { TRUE, FALSE }, "
- "width = (int) 16, "
- "depth = (int) 16, "
- "rate = (int) [ 1, MAX ], " "channels = (int) [ 1, 2 ]; "
- "audio/x-raw-int, "
- "signed = (boolean) { TRUE, FALSE }, "
- "width = (int) 8, "
- "depth = (int) 8, "
- "rate = (int) [ 1, MAX ], " "channels = (int) [ 1, 2 ]")
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "format = (string) " FORMATS ", "
+ "layout = (string) interleaved, "
+ "rate = (int) [ 1, MAX ], "
+ "channels = (int) 1; "
+ "audio/x-raw, "
+ "format = (string) " FORMATS ", "
+ "layout = (string) interleaved, "
+ "rate = (int) [ 1, MAX ], "
+ "channels = (int) 2, " "channel-mask = (bitmask) 0x3")
);
-
static void
gst_oss_src_dispose (GObject * object)
{
"Capture from a sound card via OSS",
"Erik Walthinsen <omega@cse.ogi.edu>, " "Wim Taymans <wim@fluendo.com>");
- gst_element_class_add_static_pad_template (element_class,
- &osssrc_src_factory);
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&osssrc_src_factory));
}
static void
}
ret = gst_oss4_mixer_set_control_val (s->mixer, s->mc, volume);
} else {
- ret = gst_oss4_mixer_set_control_val (s->mixer, s->mc->mute, ! !mute);
+ ret = gst_oss4_mixer_set_control_val (s->mixer, s->mc->mute, !!mute);
}
if (mute) {
if (s->mc->mute != NULL && s->mc->mute->changed) {
gst_mixer_mute_toggled (GST_MIXER (s->mixer), track,
- ! !s->mc->mute->last_val);
+ !!s->mc->mute->last_val);
} else {
/* nothing to do here, since we don't/can't easily implement the record
* flag */
mixer->watch_cond = g_cond_new ();
mixer->watch_shutdown = FALSE;
-#if !GLIB_CHECK_VERSION (2, 31, 0)
- mixer->watch_thread = g_thread_create (gst_oss4_mixer_watch_thread,
- gst_object_ref (mixer), TRUE, &err);
-#else
mixer->watch_thread = g_thread_try_new ("oss4-mixer-thread",
gst_oss4_mixer_watch_thread, gst_object_ref (mixer), &err);
-#endif
if (mixer->watch_thread == NULL) {
GST_ERROR_OBJECT (mixer, "Could not create watch thread: %s", err->message);
}
static void
-gst_oss4_mixer_interface_init (GstMixerClass * klass)
+gst_oss4_mixer_interface_init (GstMixerInterface * iface)
{
- GST_MIXER_TYPE (klass) = GST_MIXER_HARDWARE;
-
- klass->list_tracks = gst_oss4_mixer_list_tracks;
- klass->set_volume = gst_oss4_mixer_set_volume;
- klass->get_volume = gst_oss4_mixer_get_volume;
- klass->set_mute = gst_oss4_mixer_set_mute;
- klass->set_record = gst_oss4_mixer_set_record;
- klass->set_option = gst_oss4_mixer_set_option;
- klass->get_option = gst_oss4_mixer_get_option;
- klass->get_mixer_flags = gst_oss4_mixer_get_mixer_flags;
+ GST_MIXER_TYPE (iface) = GST_MIXER_HARDWARE;
+
+ iface->list_tracks = gst_oss4_mixer_list_tracks;
+ iface->set_volume = gst_oss4_mixer_set_volume;
+ iface->get_volume = gst_oss4_mixer_get_volume;
+ iface->set_mute = gst_oss4_mixer_set_mute;
+ iface->set_record = gst_oss4_mixer_set_record;
+ iface->set_option = gst_oss4_mixer_set_option;
+ iface->get_option = gst_oss4_mixer_get_option;
+ iface->get_mixer_flags = gst_oss4_mixer_get_mixer_flags;
}
/* Implement the horror that is GstImplementsInterface */
templ = gst_pad_template_new ("sink", GST_PAD_SINK, GST_PAD_ALWAYS,
gst_oss4_audio_get_template_caps ());
gst_element_class_add_pad_template (element_class, templ);
- gst_object_unref (templ);
}
static void
if (ioctl (oss->fd, SNDCTL_DSP_GET_PLAYTGT_NAMES, &routings) != -1) {
GST_LOG_OBJECT (oss, "%u output routings (static list: %d)",
- routings.nvalues, !!(routings.version == 0));
+ routings.nvalues, ! !(routings.version == 0));
for (i = 0; i < routings.nvalues; ++i) {
GST_LOG_OBJECT (oss, " output routing %d: %s", i,
&routings.strings[routings.strindex[i]]);
templ = gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS,
gst_oss4_audio_get_template_caps ());
gst_element_class_add_pad_template (element_class, templ);
- gst_object_unref (templ);
}
static void
}
static void
-gst_oss4_source_mixer_interface_init (GstMixerClass * klass)
+gst_oss4_source_mixer_interface_init (GstMixerInterface * iface)
{
- GST_MIXER_TYPE (klass) = GST_MIXER_HARDWARE;
+ GST_MIXER_TYPE (iface) = GST_MIXER_HARDWARE;
- klass->list_tracks = gst_oss4_source_mixer_list_tracks;
- klass->set_volume = gst_oss4_source_mixer_set_volume;
- klass->get_volume = gst_oss4_source_mixer_get_volume;
- klass->set_mute = gst_oss4_source_mixer_set_mute;
- klass->set_record = gst_oss4_source_mixer_set_record;
+ iface->list_tracks = gst_oss4_source_mixer_list_tracks;
+ iface->set_volume = gst_oss4_source_mixer_set_volume;
+ iface->get_volume = gst_oss4_source_mixer_get_volume;
+ iface->set_mute = gst_oss4_source_mixer_set_mute;
+ iface->set_record = gst_oss4_source_mixer_set_record;
}
/* Implement the horror that is GstImplementsInterface */
#include "gstosxaudioelement.h"
static void
-gst_osx_audio_element_class_init (GstOsxAudioElementInterface * klass);
+gst_osx_audio_element_interface_init (GstOsxAudioElementInterface * iface);
GType
gst_osx_audio_element_get_type (void)
if (!gst_osxaudioelement_type) {
static const GTypeInfo gst_osxaudioelement_info = {
sizeof (GstOsxAudioElementInterface),
- (GBaseInitFunc) gst_osx_audio_element_class_init,
+ (GBaseInitFunc) gst_osx_audio_element_interface_init,
NULL,
NULL,
NULL,
}
static void
-gst_osx_audio_element_class_init (GstOsxAudioElementInterface * klass)
+gst_osx_audio_element_interface_init (GstOsxAudioElementInterface * iface)
{
static gboolean initialized = FALSE;
}
/* default virtual functions */
- klass->io_proc = NULL;
+ iface->io_proc = NULL;
}
{
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
- gst_element_class_add_static_pad_template (element_class, &sink_factory);
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&sink_factory));
gst_element_class_set_details_simple (element_class, "Audio Sink (OSX)",
"Sink/Audio",
{
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
- gst_element_class_add_static_pad_template (element_class, &src_factory);
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&src_factory));
gst_element_class_set_details_simple (element_class, "Audio Source (OSX)",
"Source/Audio",
"Sink/Video", "OSX native videosink",
"Zaheer Abbas Merali <zaheerabbas at merali dot org>");
- gst_element_class_add_static_pad_template (element_class,
- &gst_osx_video_sink_sink_template_factory);
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_osx_video_sink_sink_template_factory));
}
static void
} \
\
static void \
-interface_as_function ## _interface_init (GstMixerClass * klass) \
+interface_as_function ## _interface_init (GstMixerInterface * iface) \
{ \
- GST_MIXER_TYPE (klass) = GST_MIXER_HARDWARE; \
+ GST_MIXER_TYPE (iface) = GST_MIXER_HARDWARE; \
\
/* set up the interface hooks */ \
- klass->list_tracks = interface_as_function ## _list_tracks; \
- klass->set_volume = interface_as_function ## _set_volume; \
- klass->get_volume = interface_as_function ## _get_volume; \
- klass->set_mute = interface_as_function ## _set_mute; \
- klass->set_record = interface_as_function ## _set_record; \
- klass->get_option = interface_as_function ## _get_option; \
- klass->set_option = interface_as_function ## _set_option; \
- klass->get_mixer_flags = interface_as_function ## _get_mixer_flags; \
+ iface->list_tracks = interface_as_function ## _list_tracks; \
+ iface->set_volume = interface_as_function ## _set_volume; \
+ iface->get_volume = interface_as_function ## _get_volume; \
+ iface->set_mute = interface_as_function ## _set_mute; \
+ iface->set_record = interface_as_function ## _set_record; \
+ iface->get_option = interface_as_function ## _get_option; \
+ iface->set_option = interface_as_function ## _set_option; \
+ iface->get_mixer_flags = interface_as_function ## _get_mixer_flags; \
}
G_END_DECLS
{
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
- gst_element_class_add_static_pad_template (element_class,
- &gst_sunaudiosink_factory);
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_sunaudiosink_factory));
gst_element_class_set_details_simple (element_class, "Sun Audio Sink",
"Sink/Audio",
"Audio sink for Sun Audio devices",
{
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
- gst_element_class_add_static_pad_template (element_class,
- &gst_sunaudiosrc_factory);
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_sunaudiosrc_factory));
gst_element_class_set_details_simple (element_class, "Sun Audio Source",
"Source/Audio",
"Audio source for Sun Audio devices",
plugin_LTLIBRARIES = libgstvideo4linux2.la
if USE_XVIDEO
-xv_source = gstv4l2xoverlay.c
+xv_source = gstv4l2videooverlay.c
xv_libs = $(X_LIBS) $(XVIDEO_LIBS)
else
xv_source =
gstv4l2tuner.c \
gstv4l2vidorient.c \
v4l2_calls.c \
- v4l2src_calls.c \
$(xv_source)
-
-if BUILD_EXPERIMENTAL
libgstvideo4linux2_la_SOURCES += gstv4l2sink.c
-endif
libgstvideo4linux2_la_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) \
$(GST_BASE_CFLAGS) \
- $(GST_CONTROLLER_CFLAGS) \
$(GST_CFLAGS) \
$(X_CFLAGS) \
$(LIBV4L2_CFLAGS) \
libgstvideo4linux2_la_LIBADD = $(GST_PLUGINS_BASE_LIBS) \
$(GST_BASE_LIBS) \
- $(GST_CONTROLLER_LIBS) \
$(GST_PLUGINS_BASE_LIBS) \
-lgstvideo-$(GST_MAJORMINOR) \
-lgstinterfaces-$(GST_MAJORMINOR) \
gstv4l2radio.h \
gstv4l2tuner.h \
gstv4l2vidorient.h \
- gstv4l2xoverlay.h \
- v4l2_calls.h \
- v4l2src_calls.h
+ gstv4l2videooverlay.h \
+ v4l2_calls.h
#include "gst/gst-i18n-plugin.h"
#include <gst/gst.h>
-#include <gst/controller/gstcontroller.h>
#include "gstv4l2object.h"
#include "gstv4l2src.h"
-#ifdef HAVE_EXPERIMENTAL
#include "gstv4l2sink.h"
-#endif
#include "gstv4l2radio.h"
/* #include "gstv4l2jpegsrc.h" */
/* #include "gstv4l2mjpegsrc.h" */
GST_DEBUG_CATEGORY_INIT (v4l2_debug, "v4l2", 0, "V4L2 API calls");
GST_DEBUG_CATEGORY_GET (GST_CAT_PERFORMANCE, "GST_PERFORMANCE");
- /* initialize gst controller library */
- gst_controller_init (NULL, NULL);
-
if (!gst_element_register (plugin, "v4l2src", GST_RANK_PRIMARY,
GST_TYPE_V4L2SRC) ||
-#ifdef HAVE_EXPERIMENTAL
!gst_element_register (plugin, "v4l2sink", GST_RANK_NONE,
GST_TYPE_V4L2SINK) ||
-#endif
!gst_element_register (plugin, "v4l2radio", GST_RANK_NONE,
GST_TYPE_V4L2RADIO) ||
/* !gst_element_register (plugin, "v4l2jpegsrc", */
#include <unistd.h>
#include "gst/video/video.h"
+#include "gst/video/gstvideometa.h"
+#include "gst/video/gstvideopool.h"
#include <gstv4l2bufferpool.h>
+
#include "gstv4l2src.h"
-#ifdef HAVE_EXPERIMENTAL
#include "gstv4l2sink.h"
-#endif
#include "v4l2_calls.h"
#include "gst/gst-i18n-plugin.h"
#include <gst/glib-compat-private.h>
GST_DEBUG_CATEGORY_EXTERN (v4l2_debug);
#define GST_CAT_DEFAULT v4l2_debug
-
/*
* GstV4l2Buffer:
*/
-
-static GstBufferClass *v4l2buffer_parent_class = NULL;
-
-static void
-gst_v4l2_buffer_finalize (GstV4l2Buffer * buffer)
+const GstMetaInfo *
+gst_v4l2_meta_get_info (void)
{
- GstV4l2BufferPool *pool;
- gboolean resuscitated = FALSE;
- gint index;
-
- pool = buffer->pool;
-
- index = buffer->vbuffer.index;
-
- GST_LOG_OBJECT (pool->v4l2elem, "finalizing buffer %p %d", buffer, index);
-
- GST_V4L2_BUFFER_POOL_LOCK (pool);
- if (pool->running) {
- if (pool->requeuebuf) {
- if (!gst_v4l2_buffer_pool_qbuf (pool, buffer)) {
- GST_WARNING ("could not requeue buffer %p %d", buffer, index);
- } else {
- resuscitated = TRUE;
- }
- } else {
- resuscitated = TRUE;
- /* XXX double check this... I think it is ok to not synchronize this
- * w.r.t. destruction of the pool, since the buffer is still live and
- * the buffer holds a ref to the pool..
- */
- g_async_queue_push (pool->avail_buffers, buffer);
- }
- } else {
- GST_LOG_OBJECT (pool->v4l2elem, "the pool is shutting down");
- }
-
- if (resuscitated) {
- /* FIXME: check that the caps didn't change */
- GST_LOG_OBJECT (pool->v4l2elem, "reviving buffer %p, %d", buffer, index);
- gst_buffer_ref (GST_BUFFER (buffer));
- GST_BUFFER_SIZE (buffer) = 0;
- pool->buffers[index] = buffer;
- }
-
- GST_V4L2_BUFFER_POOL_UNLOCK (pool);
-
- if (!resuscitated) {
- GST_LOG_OBJECT (pool->v4l2elem,
- "buffer %p (data %p, len %u) not recovered, unmapping",
- buffer, GST_BUFFER_DATA (buffer), buffer->vbuffer.length);
- gst_mini_object_unref (GST_MINI_OBJECT (pool));
- v4l2_munmap ((void *) GST_BUFFER_DATA (buffer), buffer->vbuffer.length);
-
- GST_MINI_OBJECT_CLASS (v4l2buffer_parent_class)->finalize (GST_MINI_OBJECT
- (buffer));
+ static const GstMetaInfo *meta_info = NULL;
+
+ if (meta_info == NULL) {
+ meta_info =
+ gst_meta_register ("GstV4l2Meta", "GstV4l2Meta",
+ sizeof (GstV4l2Meta), (GstMetaInitFunction) NULL,
+ (GstMetaFreeFunction) NULL, (GstMetaCopyFunction) NULL,
+ (GstMetaTransformFunction) NULL);
}
+ return meta_info;
}
-static void
-gst_v4l2_buffer_class_init (gpointer g_class, gpointer class_data)
-{
- GstMiniObjectClass *mini_object_class = GST_MINI_OBJECT_CLASS (g_class);
-
- v4l2buffer_parent_class = g_type_class_peek_parent (g_class);
-
- mini_object_class->finalize = (GstMiniObjectFinalizeFunction)
- gst_v4l2_buffer_finalize;
-}
+/*
+ * GstV4l2BufferPool:
+ */
+#define gst_v4l2_buffer_pool_parent_class parent_class
+G_DEFINE_TYPE (GstV4l2BufferPool, gst_v4l2_buffer_pool, GST_TYPE_BUFFER_POOL);
-GType
-gst_v4l2_buffer_get_type (void)
-{
- static GType _gst_v4l2_buffer_type;
-
- if (G_UNLIKELY (_gst_v4l2_buffer_type == 0)) {
- static const GTypeInfo v4l2_buffer_info = {
- sizeof (GstBufferClass),
- NULL,
- NULL,
- gst_v4l2_buffer_class_init,
- NULL,
- NULL,
- sizeof (GstV4l2Buffer),
- 0,
- NULL,
- NULL
- };
- _gst_v4l2_buffer_type = g_type_register_static (GST_TYPE_BUFFER,
- "GstV4l2Buffer", &v4l2_buffer_info, 0);
- }
- return _gst_v4l2_buffer_type;
-}
+static void gst_v4l2_buffer_pool_release_buffer (GstBufferPool * bpool,
+ GstBuffer * buffer);
-static GstV4l2Buffer *
-gst_v4l2_buffer_new (GstV4l2BufferPool * pool, guint index, GstCaps * caps)
+static void
+gst_v4l2_buffer_pool_free_buffer (GstBufferPool * bpool, GstBuffer * buffer)
{
- GstV4l2Buffer *ret;
- guint8 *data;
-
- ret = (GstV4l2Buffer *) gst_mini_object_new (GST_TYPE_V4L2_BUFFER);
-
- GST_LOG_OBJECT (pool->v4l2elem, "creating buffer %u, %p in pool %p", index,
- ret, pool);
+ GstV4l2BufferPool *pool = GST_V4L2_BUFFER_POOL (bpool);
+ GstV4l2Object *obj;
- ret->pool =
- (GstV4l2BufferPool *) gst_mini_object_ref (GST_MINI_OBJECT (pool));
+ obj = pool->obj;
- ret->vbuffer.index = index;
- ret->vbuffer.type = pool->type;
- ret->vbuffer.memory = V4L2_MEMORY_MMAP;
-
- if (v4l2_ioctl (pool->video_fd, VIDIOC_QUERYBUF, &ret->vbuffer) < 0)
- goto querybuf_failed;
+ switch (obj->mode) {
+ case GST_V4L2_IO_RW:
+ break;
+ case GST_V4L2_IO_MMAP:
+ {
+ GstV4l2Meta *meta;
+ gint index;
- GST_LOG_OBJECT (pool->v4l2elem, " index: %u", ret->vbuffer.index);
- GST_LOG_OBJECT (pool->v4l2elem, " type: %d", ret->vbuffer.type);
- GST_LOG_OBJECT (pool->v4l2elem, " bytesused: %u", ret->vbuffer.bytesused);
- GST_LOG_OBJECT (pool->v4l2elem, " flags: %08x", ret->vbuffer.flags);
- GST_LOG_OBJECT (pool->v4l2elem, " field: %d", ret->vbuffer.field);
- GST_LOG_OBJECT (pool->v4l2elem, " memory: %d", ret->vbuffer.memory);
- if (ret->vbuffer.memory == V4L2_MEMORY_MMAP)
- GST_LOG_OBJECT (pool->v4l2elem, " MMAP offset: %u",
- ret->vbuffer.m.offset);
- GST_LOG_OBJECT (pool->v4l2elem, " length: %u", ret->vbuffer.length);
- GST_LOG_OBJECT (pool->v4l2elem, " input: %u", ret->vbuffer.input);
+ meta = GST_V4L2_META_GET (buffer);
+ g_assert (meta != NULL);
- data = (guint8 *) v4l2_mmap (0, ret->vbuffer.length,
- PROT_READ | PROT_WRITE, MAP_SHARED, pool->video_fd,
- ret->vbuffer.m.offset);
+ index = meta->vbuffer.index;
+ GST_LOG_OBJECT (pool,
+ "mmap buffer %p idx %d (data %p, len %u) freed, unmapping", buffer,
+ index, meta->mem, meta->vbuffer.length);
- if (data == MAP_FAILED)
- goto mmap_failed;
+ v4l2_munmap (meta->mem, meta->vbuffer.length);
+ pool->buffers[index] = NULL;
+ break;
+ }
+ case GST_V4L2_IO_USERPTR:
+ default:
+ g_assert_not_reached ();
+ break;
+ }
+ gst_buffer_unref (buffer);
+}
- GST_BUFFER_DATA (ret) = data;
- GST_BUFFER_SIZE (ret) = ret->vbuffer.length;
+static GstFlowReturn
+gst_v4l2_buffer_pool_alloc_buffer (GstBufferPool * bpool, GstBuffer ** buffer,
+ GstBufferPoolParams * params)
+{
+ GstV4l2BufferPool *pool = GST_V4L2_BUFFER_POOL (bpool);
+ GstBuffer *newbuf;
+ GstV4l2Meta *meta;
+ GstV4l2Object *obj;
+ GstVideoInfo *info;
+ guint index;
+
+ obj = pool->obj;
+ info = &obj->info;
+
+ switch (obj->mode) {
+ case GST_V4L2_IO_RW:
+ {
+ newbuf =
+ gst_buffer_new_allocate (pool->allocator, pool->size, pool->align);
+ break;
+ }
+ case GST_V4L2_IO_MMAP:
+ {
+ newbuf = gst_buffer_new ();
+ meta = GST_V4L2_META_ADD (newbuf);
+
+ index = pool->num_allocated;
+
+ GST_LOG_OBJECT (pool, "creating buffer %u, %p", index, newbuf);
+
+ meta->vbuffer.index = index;
+ meta->vbuffer.type = obj->type;
+ meta->vbuffer.memory = V4L2_MEMORY_MMAP;
+
+ if (v4l2_ioctl (pool->video_fd, VIDIOC_QUERYBUF, &meta->vbuffer) < 0)
+ goto querybuf_failed;
+
+ GST_LOG_OBJECT (pool, " index: %u", meta->vbuffer.index);
+ GST_LOG_OBJECT (pool, " type: %d", meta->vbuffer.type);
+ GST_LOG_OBJECT (pool, " bytesused: %u", meta->vbuffer.bytesused);
+ GST_LOG_OBJECT (pool, " flags: %08x", meta->vbuffer.flags);
+ GST_LOG_OBJECT (pool, " field: %d", meta->vbuffer.field);
+ GST_LOG_OBJECT (pool, " memory: %d", meta->vbuffer.memory);
+ if (meta->vbuffer.memory == V4L2_MEMORY_MMAP)
+ GST_LOG_OBJECT (pool, " MMAP offset: %u", meta->vbuffer.m.offset);
+ GST_LOG_OBJECT (pool, " length: %u", meta->vbuffer.length);
+ GST_LOG_OBJECT (pool, " input: %u", meta->vbuffer.input);
+
+ meta->mem = v4l2_mmap (0, meta->vbuffer.length,
+ PROT_READ | PROT_WRITE, MAP_SHARED, pool->video_fd,
+ meta->vbuffer.m.offset);
+ if (meta->mem == MAP_FAILED)
+ goto mmap_failed;
+
+ gst_buffer_take_memory (newbuf, -1,
+ gst_memory_new_wrapped (0,
+ meta->mem, NULL, meta->vbuffer.length, 0, meta->vbuffer.length));
+
+ /* add metadata to raw video buffers */
+ if (pool->add_videometa && info->finfo) {
+ gsize offset[GST_VIDEO_MAX_PLANES];
+ gint stride[GST_VIDEO_MAX_PLANES];
+
+ offset[0] = 0;
+ stride[0] = obj->bytesperline;
+
+ GST_DEBUG_OBJECT (pool, "adding video meta, stride %d", stride[0]);
+ gst_buffer_add_video_meta_full (newbuf, info->flags,
+ GST_VIDEO_INFO_FORMAT (info), GST_VIDEO_INFO_WIDTH (info),
+ GST_VIDEO_INFO_HEIGHT (info), GST_VIDEO_INFO_N_PLANES (info),
+ offset, stride);
+ }
+ break;
+ }
+ case GST_V4L2_IO_USERPTR:
+ default:
+ g_assert_not_reached ();
+ break;
+ }
- GST_BUFFER_FLAG_SET (ret, GST_BUFFER_FLAG_READONLY);
+ pool->num_allocated++;
- gst_buffer_set_caps (GST_BUFFER (ret), caps);
+ *buffer = newbuf;
- return ret;
+ return GST_FLOW_OK;
/* ERRORS */
querybuf_failed:
gint errnosave = errno;
GST_WARNING ("Failed QUERYBUF: %s", g_strerror (errnosave));
- gst_buffer_unref (GST_BUFFER (ret));
+ gst_buffer_unref (newbuf);
errno = errnosave;
- return NULL;
+ return GST_FLOW_ERROR;
}
mmap_failed:
{
gint errnosave = errno;
GST_WARNING ("Failed to mmap: %s", g_strerror (errnosave));
- gst_buffer_unref (GST_BUFFER (ret));
+ gst_buffer_unref (newbuf);
errno = errnosave;
- return NULL;
+ return GST_FLOW_ERROR;
}
}
+static gboolean
+gst_v4l2_buffer_pool_set_config (GstBufferPool * bpool, GstStructure * config)
+{
+ GstV4l2BufferPool *pool = GST_V4L2_BUFFER_POOL (bpool);
+ GstV4l2Object *obj = pool->obj;
+ const GstCaps *caps;
+ guint size, min_buffers, max_buffers;
+ guint prefix, align;
-/*
- * GstV4l2BufferPool:
- */
+ GST_DEBUG_OBJECT (pool, "set config");
-static GstMiniObjectClass *buffer_pool_parent_class = NULL;
+ pool->add_videometa =
+ gst_buffer_pool_config_has_option (config,
+ GST_BUFFER_POOL_OPTION_VIDEO_META);
-static void
-gst_v4l2_buffer_pool_finalize (GstV4l2BufferPool * pool)
-{
- g_mutex_free (pool->lock);
- pool->lock = NULL;
+ if (!pool->add_videometa) {
+ gint stride;
- g_async_queue_unref (pool->avail_buffers);
- pool->avail_buffers = NULL;
+ /* we don't have video metadata, see if the strides are compatible */
+ stride = GST_VIDEO_INFO_PLANE_STRIDE (&obj->info, 0);
- if (pool->video_fd >= 0)
- v4l2_close (pool->video_fd);
+ GST_DEBUG_OBJECT (pool, "no videometadata, checking strides %d and %u",
+ stride, obj->bytesperline);
- if (pool->buffers) {
- g_free (pool->buffers);
- pool->buffers = NULL;
+ if (stride != obj->bytesperline)
+ goto missing_video_api;
}
- GST_MINI_OBJECT_CLASS (buffer_pool_parent_class)->finalize (GST_MINI_OBJECT
- (pool));
-}
+ /* parse the config and keep around */
+ if (!gst_buffer_pool_config_get (config, &caps, &size, &min_buffers,
+ &max_buffers, &prefix, &align))
+ goto wrong_config;
-static void
-gst_v4l2_buffer_pool_init (GstV4l2BufferPool * pool, gpointer g_class)
-{
- pool->lock = g_mutex_new ();
- pool->running = FALSE;
- pool->num_live_buffers = 0;
-}
+ GST_DEBUG_OBJECT (pool, "config %" GST_PTR_FORMAT, config);
-static void
-gst_v4l2_buffer_pool_class_init (gpointer g_class, gpointer class_data)
-{
- GstMiniObjectClass *mini_object_class = GST_MINI_OBJECT_CLASS (g_class);
+ pool->size = size;
+ pool->max_buffers = MAX (min_buffers, max_buffers);
+ pool->min_buffers = MIN (pool->max_buffers, min_buffers);
+ pool->prefix = prefix;
+ pool->align = align;
- buffer_pool_parent_class = g_type_class_peek_parent (g_class);
+ gst_buffer_pool_config_set (config, caps, size, min_buffers,
+ max_buffers, prefix, align);
- mini_object_class->finalize = (GstMiniObjectFinalizeFunction)
- gst_v4l2_buffer_pool_finalize;
-}
+ return GST_BUFFER_POOL_CLASS (parent_class)->set_config (bpool, config);
-GType
-gst_v4l2_buffer_pool_get_type (void)
-{
- static GType _gst_v4l2_buffer_pool_type;
-
- if (G_UNLIKELY (_gst_v4l2_buffer_pool_type == 0)) {
- static const GTypeInfo v4l2_buffer_pool_info = {
- sizeof (GstMiniObjectClass),
- NULL,
- NULL,
- gst_v4l2_buffer_pool_class_init,
- NULL,
- NULL,
- sizeof (GstV4l2BufferPool),
- 0,
- (GInstanceInitFunc) gst_v4l2_buffer_pool_init,
- NULL
- };
- _gst_v4l2_buffer_pool_type = g_type_register_static (GST_TYPE_MINI_OBJECT,
- "GstV4l2BufferPool", &v4l2_buffer_pool_info, 0);
- }
- return _gst_v4l2_buffer_pool_type;
+ /* ERRORS */
+missing_video_api:
+ {
+ GST_ERROR_OBJECT (pool, "missing GstMetaVideo API in config, "
+ "default stride: %d, wanted stride %u",
+ GST_VIDEO_INFO_PLANE_STRIDE (&obj->info, 0), obj->bytesperline);
+ return FALSE;
+ }
+wrong_config:
+ {
+ GST_ERROR_OBJECT (pool, "invalid config %" GST_PTR_FORMAT, config);
+ return FALSE;
+ }
}
-
-/* this is somewhat of a hack.. but better to keep the hack in
- * one place than copy/pasting it around..
- */
-static GstV4l2Object *
-get_v4l2_object (GstElement * v4l2elem)
+static gboolean
+start_streaming (GstV4l2BufferPool * pool)
{
- GstV4l2Object *v4l2object = NULL;
- if (GST_IS_V4L2SRC (v4l2elem)) {
- v4l2object = (GST_V4L2SRC (v4l2elem))->v4l2object;
-#ifdef HAVE_EXPERIMENTAL
- } else if (GST_IS_V4L2SINK (v4l2elem)) {
- v4l2object = (GST_V4L2SINK (v4l2elem))->v4l2object;
-#endif
- } else {
- GST_ERROR_OBJECT (v4l2elem, "unknown v4l2 element");
+ GstV4l2Object *obj = pool->obj;
+
+ switch (obj->mode) {
+ case GST_V4L2_IO_RW:
+ break;
+ case GST_V4L2_IO_MMAP:
+ case GST_V4L2_IO_USERPTR:
+ GST_DEBUG_OBJECT (pool, "STREAMON");
+ if (v4l2_ioctl (pool->video_fd, VIDIOC_STREAMON, &obj->type) < 0)
+ goto start_failed;
+ break;
+ default:
+ g_assert_not_reached ();
+ break;
}
- return v4l2object;
-}
+ pool->streaming = TRUE;
+ return TRUE;
-/**
- * gst_v4l2_buffer_pool_new:
- * @v4l2elem: the v4l2 element (src or sink) that owns this pool
- * @fd: the video device file descriptor
- * @num_buffers: the requested number of buffers in the pool
- * @caps: the caps to set on the buffer
- * @requeuebuf: if %TRUE, and if the pool is still in the running state, a
- * buffer with no remaining references is immediately passed back to v4l2
- * (VIDIOC_QBUF), otherwise it is returned to the pool of available buffers
- * (which can be accessed via gst_v4l2_buffer_pool_get().
- *
- * Construct a new buffer pool.
- *
- * Returns: the new pool, use gst_v4l2_buffer_pool_destroy() to free resources
- */
-GstV4l2BufferPool *
-gst_v4l2_buffer_pool_new (GstElement * v4l2elem, gint fd, gint num_buffers,
- GstCaps * caps, gboolean requeuebuf, enum v4l2_buf_type type)
+ /* ERRORS */
+start_failed:
+ {
+ GST_ERROR_OBJECT (pool, "error with STREAMON %d (%s)", errno,
+ g_strerror (errno));
+ return FALSE;
+ }
+}
+
+static gboolean
+gst_v4l2_buffer_pool_start (GstBufferPool * bpool)
{
- GstV4l2BufferPool *pool;
+ GstV4l2BufferPool *pool = GST_V4L2_BUFFER_POOL (bpool);
+ GstV4l2Object *obj = pool->obj;
gint n;
struct v4l2_requestbuffers breq;
+ gint min_buffers, max_buffers;
- pool = (GstV4l2BufferPool *) gst_mini_object_new (GST_TYPE_V4L2_BUFFER_POOL);
-
- pool->video_fd = v4l2_dup (fd);
- if (pool->video_fd < 0)
- goto dup_failed;
+ min_buffers = pool->min_buffers;
+ max_buffers = pool->max_buffers;
+ switch (obj->mode) {
+ case GST_V4L2_IO_RW:
+ {
+ break;
+ }
+ case GST_V4L2_IO_MMAP:
+ {
+ /* first, lets request buffers, and see how many we can get: */
+ GST_DEBUG_OBJECT (pool, "starting, requesting %d MMAP buffers",
+ max_buffers);
- /* first, lets request buffers, and see how many we can get: */
- GST_DEBUG_OBJECT (v4l2elem, "STREAMING, requesting %d MMAP buffers",
- num_buffers);
+ if (max_buffers == 0)
+ max_buffers = 4;
- memset (&breq, 0, sizeof (struct v4l2_requestbuffers));
- breq.type = type;
- breq.count = num_buffers;
- breq.memory = V4L2_MEMORY_MMAP;
+ memset (&breq, 0, sizeof (struct v4l2_requestbuffers));
+ breq.type = obj->type;
+ breq.count = max_buffers;
+ breq.memory = V4L2_MEMORY_MMAP;
- if (v4l2_ioctl (fd, VIDIOC_REQBUFS, &breq) < 0)
- goto reqbufs_failed;
+ if (v4l2_ioctl (pool->video_fd, VIDIOC_REQBUFS, &breq) < 0)
+ goto reqbufs_failed;
- GST_LOG_OBJECT (v4l2elem, " count: %u", breq.count);
- GST_LOG_OBJECT (v4l2elem, " type: %d", breq.type);
- GST_LOG_OBJECT (v4l2elem, " memory: %d", breq.memory);
+ GST_LOG_OBJECT (pool, " count: %u", breq.count);
+ GST_LOG_OBJECT (pool, " type: %d", breq.type);
+ GST_LOG_OBJECT (pool, " memory: %d", breq.memory);
- if (breq.count < GST_V4L2_MIN_BUFFERS)
- goto no_buffers;
+ if (breq.count < GST_V4L2_MIN_BUFFERS)
+ goto no_buffers;
- if (num_buffers != breq.count) {
- GST_WARNING_OBJECT (v4l2elem, "using %u buffers instead", breq.count);
- num_buffers = breq.count;
+ if (max_buffers != breq.count) {
+ GST_WARNING_OBJECT (pool, "using %u buffers instead", breq.count);
+ max_buffers = breq.count;
+ }
+ break;
+ }
+ case GST_V4L2_IO_USERPTR:
+ default:
+ g_assert_not_reached ();
+ break;
}
- pool->v4l2elem = v4l2elem;
- pool->requeuebuf = requeuebuf;
- pool->type = type;
- pool->buffer_count = num_buffers;
- pool->buffers = g_new0 (GstV4l2Buffer *, num_buffers);
- pool->avail_buffers = g_async_queue_new ();
+ pool->obj = obj;
+ pool->max_buffers = max_buffers;
+ pool->buffers = g_new0 (GstBuffer *, max_buffers);
+ pool->num_allocated = 0;
- /* now, map the buffers: */
- for (n = 0; n < num_buffers; n++) {
- pool->buffers[n] = gst_v4l2_buffer_new (pool, n, caps);
- if (!pool->buffers[n])
+ /* now, allocate the buffers: */
+ for (n = 0; n < min_buffers; n++) {
+ GstBuffer *buffer;
+
+ if (gst_v4l2_buffer_pool_alloc_buffer (bpool, &buffer, NULL) != GST_FLOW_OK)
goto buffer_new_failed;
- pool->num_live_buffers++;
- g_async_queue_push (pool->avail_buffers, pool->buffers[n]);
- }
- return pool;
+ gst_v4l2_buffer_pool_release_buffer (bpool, buffer);
+ }
- /* ERRORS */
-dup_failed:
- {
- gint errnosave = errno;
+ /* we can start capturing now, we wait for the playback case until we queued
+ * the first buffer */
+ if (obj->type == V4L2_BUF_TYPE_VIDEO_CAPTURE)
+ if (!start_streaming (pool))
+ goto start_failed;
- gst_mini_object_unref (GST_MINI_OBJECT (pool));
+ gst_poll_set_flushing (obj->poll, FALSE);
- errno = errnosave;
+ return TRUE;
- return NULL;
- }
+ /* ERRORS */
reqbufs_failed:
{
- GstV4l2Object *v4l2object = get_v4l2_object (v4l2elem);
- GST_ELEMENT_ERROR (v4l2elem, RESOURCE, READ,
- (_("Could not get buffers from device '%s'."),
- v4l2object->videodev),
- ("error requesting %d buffers: %s", num_buffers, g_strerror (errno)));
- return NULL;
+ GST_ERROR_OBJECT (pool,
+ "error requesting %d buffers: %s", max_buffers, g_strerror (errno));
+ return FALSE;
}
no_buffers:
{
- GstV4l2Object *v4l2object = get_v4l2_object (v4l2elem);
- GST_ELEMENT_ERROR (v4l2elem, RESOURCE, READ,
- (_("Could not get enough buffers from device '%s'."),
- v4l2object->videodev),
- ("we received %d from device '%s', we want at least %d",
- breq.count, v4l2object->videodev, GST_V4L2_MIN_BUFFERS));
- return NULL;
+ GST_ERROR_OBJECT (pool,
+ "we received %d from device '%s', we want at least %d",
+ breq.count, obj->videodev, GST_V4L2_MIN_BUFFERS);
+ return FALSE;
}
buffer_new_failed:
{
- gint errnosave = errno;
+ GST_ERROR_OBJECT (pool, "failed to create a buffer");
+ return FALSE;
+ }
+start_failed:
+ {
+ GST_ERROR_OBJECT (pool, "failed to start streaming");
+ return FALSE;
+ }
+}
- gst_v4l2_buffer_pool_destroy (pool);
+static gboolean
+gst_v4l2_buffer_pool_stop (GstBufferPool * bpool)
+{
+ gboolean ret;
+ GstV4l2BufferPool *pool = GST_V4L2_BUFFER_POOL (bpool);
+ GstV4l2Object *obj = pool->obj;
+ guint n;
+
+ GST_DEBUG_OBJECT (pool, "stopping pool");
+
+ gst_poll_set_flushing (obj->poll, TRUE);
+
+ if (pool->streaming) {
+ switch (obj->mode) {
+ case GST_V4L2_IO_RW:
+ break;
+ case GST_V4L2_IO_MMAP:
+ case GST_V4L2_IO_USERPTR:
+ /* we actually need to sync on all queued buffers but not
+ * on the non-queued ones */
+ GST_DEBUG_OBJECT (pool, "STREAMOFF");
+ if (v4l2_ioctl (pool->video_fd, VIDIOC_STREAMOFF, &obj->type) < 0)
+ goto stop_failed;
+ break;
+ default:
+ g_assert_not_reached ();
+ break;
+ }
+ pool->streaming = FALSE;
+ }
- errno = errnosave;
+ /* first free the buffers in the queue */
+ ret = GST_BUFFER_POOL_CLASS (parent_class)->stop (bpool);
- return NULL;
+ /* then free the remaining buffers */
+ for (n = 0; n < pool->num_allocated; n++) {
+ if (pool->buffers[n])
+ gst_v4l2_buffer_pool_free_buffer (bpool, pool->buffers[n]);
+ }
+ g_free (pool->buffers);
+ pool->buffers = NULL;
+
+ return ret;
+
+ /* ERRORS */
+stop_failed:
+ {
+ GST_ERROR_OBJECT (pool, "error with STREAMOFF %d (%s)", errno,
+ g_strerror (errno));
+ return FALSE;
}
}
-/**
- * gst_v4l2_buffer_pool_destroy:
- * @pool: the pool
- *
- * Free all resources in the pool and the pool itself.
- */
-void
-gst_v4l2_buffer_pool_destroy (GstV4l2BufferPool * pool)
+static GstFlowReturn
+gst_v4l2_object_poll (GstV4l2Object * v4l2object)
{
- gint n;
+ gint ret;
+
+ if (v4l2object->can_poll_device) {
+ GST_LOG_OBJECT (v4l2object->element, "polling device");
+ ret = gst_poll_wait (v4l2object->poll, GST_CLOCK_TIME_NONE);
+ if (G_UNLIKELY (ret < 0)) {
+ if (errno == EBUSY)
+ goto stopped;
+ if (errno == ENXIO) {
+ GST_WARNING_OBJECT (v4l2object->element,
+ "v4l2 device doesn't support polling. Disabling");
+ v4l2object->can_poll_device = FALSE;
+ } else {
+ if (errno != EAGAIN && errno != EINTR)
+ goto select_error;
+ }
+ }
+ }
+ return GST_FLOW_OK;
- GST_V4L2_BUFFER_POOL_LOCK (pool);
- pool->running = FALSE;
- GST_V4L2_BUFFER_POOL_UNLOCK (pool);
+ /* ERRORS */
+stopped:
+ {
+ GST_DEBUG ("stop called");
+ return GST_FLOW_FLUSHING;
+ }
+select_error:
+ {
+ GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, READ, (NULL),
+ ("poll error %d: %s (%d)", ret, g_strerror (errno), errno));
+ return GST_FLOW_ERROR;
+ }
+}
- GST_DEBUG_OBJECT (pool->v4l2elem, "destroy pool");
+static GstFlowReturn
+gst_v4l2_buffer_pool_qbuf (GstV4l2BufferPool * pool, GstBuffer * buf)
+{
+ GstV4l2Meta *meta;
+ gint index;
- /* after this point, no more buffers will be queued or dequeued; no buffer
- * from pool->buffers that is NULL will be set to a buffer, and no buffer that
- * is not NULL will be pushed out. */
+ meta = GST_V4L2_META_GET (buf);
+ g_assert (meta != NULL);
- /* miniobjects have no dispose, so they can't break ref-cycles, as buffers ref
- * the pool, we need to unref the buffer to properly finalize te pool */
- for (n = 0; n < pool->buffer_count; n++) {
- GstBuffer *buf;
+ index = meta->vbuffer.index;
- GST_V4L2_BUFFER_POOL_LOCK (pool);
- buf = GST_BUFFER (pool->buffers[n]);
- GST_V4L2_BUFFER_POOL_UNLOCK (pool);
+ GST_LOG_OBJECT (pool, "enqueue buffer %p, index:%d, queued:%d", buf,
+ index, pool->num_queued);
- if (buf)
- /* we own the ref if the buffer is in pool->buffers; drop it. */
- gst_buffer_unref (buf);
- }
+ if (pool->buffers[index] != NULL)
+ goto already_queued;
+
+ if (v4l2_ioctl (pool->video_fd, VIDIOC_QBUF, &meta->vbuffer) < 0)
+ goto queue_failed;
- gst_mini_object_unref (GST_MINI_OBJECT (pool));
+ pool->buffers[index] = buf;
+ pool->num_queued++;
+
+ return GST_FLOW_OK;
+
+ /* ERRORS */
+already_queued:
+ {
+ GST_WARNING_OBJECT (pool, "the buffer was already queued");
+ return GST_FLOW_ERROR;
+ }
+queue_failed:
+ {
+ GST_WARNING_OBJECT (pool, "could not queue a buffer %d (%s)", errno,
+ g_strerror (errno));
+ return GST_FLOW_ERROR;
+ }
}
-/**
- * gst_v4l2_buffer_pool_get:
- * @pool: the "this" object
- * @blocking: should this call suspend until there is a buffer available
- * in the buffer pool?
- *
- * Get an available buffer in the pool
- */
-GstV4l2Buffer *
-gst_v4l2_buffer_pool_get (GstV4l2BufferPool * pool, gboolean blocking)
+static GstFlowReturn
+gst_v4l2_buffer_pool_dqbuf (GstV4l2BufferPool * pool, GstBuffer ** buffer)
{
- GstV4l2Buffer *buf;
+ GstFlowReturn res;
+ GstBuffer *outbuf;
+ struct v4l2_buffer vbuffer;
+ GstV4l2Object *obj = pool->obj;
+
+ if (obj->type == V4L2_BUF_TYPE_VIDEO_CAPTURE) {
+ /* select works for input devices when data is available. According to the
+ * specs we can also poll to find out when a frame has been displayed but
+ * that just seems to lock up here */
+ if ((res = gst_v4l2_object_poll (obj)) != GST_FLOW_OK)
+ goto poll_error;
+ }
- if (blocking) {
- buf = g_async_queue_pop (pool->avail_buffers);
- } else {
- buf = g_async_queue_try_pop (pool->avail_buffers);
+ memset (&vbuffer, 0x00, sizeof (vbuffer));
+ vbuffer.type = obj->type;
+ vbuffer.memory = V4L2_MEMORY_MMAP;
+
+ GST_LOG_OBJECT (pool, "doing DQBUF");
+ if (v4l2_ioctl (pool->video_fd, VIDIOC_DQBUF, &vbuffer) < 0)
+ goto error;
+
+ /* get our GstBuffer with that index from the pool, if the buffer was
+ * outstanding we have a serious problem.
+ */
+ outbuf = pool->buffers[vbuffer.index];
+ if (outbuf == NULL)
+ goto no_buffer;
+
+ /* mark the buffer outstanding */
+ pool->buffers[vbuffer.index] = NULL;
+ pool->num_queued--;
+
+ GST_LOG_OBJECT (pool,
+ "dequeued buffer %p seq:%d (ix=%d), used %d, flags %08x, pool-queued=%d, buffer=%p",
+ outbuf, vbuffer.sequence, vbuffer.index, vbuffer.bytesused, vbuffer.flags,
+ pool->num_queued, outbuf);
+
+ /* set top/bottom field first if v4l2_buffer has the information */
+ if (vbuffer.field == V4L2_FIELD_INTERLACED_TB) {
+ GST_BUFFER_FLAG_SET (outbuf, GST_VIDEO_BUFFER_FLAG_TFF);
+ GST_BUFFER_FLAG_SET (outbuf, GST_VIDEO_BUFFER_FLAG_INTERLACED);
+ }
+ if (vbuffer.field == V4L2_FIELD_INTERLACED_BT) {
+ GST_BUFFER_FLAG_UNSET (outbuf, GST_VIDEO_BUFFER_FLAG_TFF);
+ GST_BUFFER_FLAG_SET (outbuf, GST_VIDEO_BUFFER_FLAG_INTERLACED);
}
- if (buf) {
- GST_V4L2_BUFFER_POOL_LOCK (pool);
- GST_BUFFER_SIZE (buf) = buf->vbuffer.length;
- GST_BUFFER_FLAG_UNSET (buf, 0xffffffff);
- GST_V4L2_BUFFER_POOL_UNLOCK (pool);
+ /* this can change at every frame, esp. with jpeg */
+ if (obj->type == V4L2_BUF_TYPE_VIDEO_CAPTURE)
+ gst_buffer_resize (outbuf, 0, vbuffer.bytesused);
+ else
+ gst_buffer_resize (outbuf, 0, vbuffer.length);
+
+ *buffer = outbuf;
+
+ return GST_FLOW_OK;
+
+ /* ERRORS */
+poll_error:
+ {
+ GST_DEBUG_OBJECT (pool, "poll error %s", gst_flow_get_name (res));
+ return res;
+ }
+error:
+ {
+ GST_WARNING_OBJECT (pool,
+ "problem dequeuing frame %d (ix=%d), pool-ct=%d, buf.flags=%d",
+ vbuffer.sequence, vbuffer.index,
+ GST_MINI_OBJECT_REFCOUNT (pool), vbuffer.flags);
+
+ switch (errno) {
+ case EAGAIN:
+ GST_WARNING_OBJECT (pool,
+ "Non-blocking I/O has been selected using O_NONBLOCK and"
+ " no buffer was in the outgoing queue. device %s", obj->videodev);
+ break;
+ case EINVAL:
+ GST_ERROR_OBJECT (pool,
+ "The buffer type is not supported, or the index is out of bounds, "
+ "or no buffers have been allocated yet, or the userptr "
+ "or length are invalid. device %s", obj->videodev);
+ break;
+ case ENOMEM:
+ GST_ERROR_OBJECT (pool,
+ "insufficient memory to enqueue a user pointer buffer");
+ break;
+ case EIO:
+ GST_INFO_OBJECT (pool,
+ "VIDIOC_DQBUF failed due to an internal error."
+ " Can also indicate temporary problems like signal loss."
+ " Note the driver might dequeue an (empty) buffer despite"
+ " returning an error, or even stop capturing."
+ " device %s", obj->videodev);
+ /* have we de-queued a buffer ? */
+ if (!(vbuffer.flags & (V4L2_BUF_FLAG_QUEUED | V4L2_BUF_FLAG_DONE))) {
+ GST_DEBUG_OBJECT (pool, "reenqueing buffer");
+ /* FIXME ... should we do something here? */
+ }
+ break;
+ case EINTR:
+ GST_WARNING_OBJECT (pool,
+ "could not sync on a buffer on device %s", obj->videodev);
+ break;
+ default:
+ GST_WARNING_OBJECT (pool,
+ "Grabbing frame got interrupted on %s unexpectedly. %d: %s.",
+ obj->videodev, errno, g_strerror (errno));
+ break;
+ }
+ return GST_FLOW_ERROR;
+ }
+no_buffer:
+ {
+ GST_ERROR_OBJECT (pool, "No free buffer found in the pool at index %d.",
+ vbuffer.index);
+ return GST_FLOW_ERROR;
}
+}
- pool->running = TRUE;
+static GstFlowReturn
+gst_v4l2_buffer_pool_acquire_buffer (GstBufferPool * bpool, GstBuffer ** buffer,
+ GstBufferPoolParams * params)
+{
+ GstFlowReturn ret;
+ GstV4l2BufferPool *pool = GST_V4L2_BUFFER_POOL (bpool);
+ GstV4l2Object *obj = pool->obj;
+
+ GST_DEBUG_OBJECT (pool, "acquire");
+
+ if (GST_BUFFER_POOL_IS_FLUSHING (bpool))
+ goto flushing;
+
+ switch (obj->type) {
+ case V4L2_BUF_TYPE_VIDEO_CAPTURE:
+ /* capture, This function should return a buffer with new captured data */
+ switch (obj->mode) {
+ case GST_V4L2_IO_RW:
+ /* take empty buffer from the pool */
+ ret = GST_BUFFER_POOL_CLASS (parent_class)->acquire_buffer (bpool,
+ buffer, params);
+ break;
+
+ case GST_V4L2_IO_MMAP:
+ /* just dequeue a buffer, we basically use the queue of v4l2 as the
+ * storage for our buffers. This function does poll first so we can
+ * interrupt it fine. */
+ ret = gst_v4l2_buffer_pool_dqbuf (pool, buffer);
+ break;
+
+ case GST_V4L2_IO_USERPTR:
+ default:
+ g_assert_not_reached ();
+ break;
+ }
+ break;
- return buf;
+ case V4L2_BUF_TYPE_VIDEO_OUTPUT:
+ /* playback, This function should return an empty buffer */
+ switch (obj->mode) {
+ case GST_V4L2_IO_RW:
+ /* get an empty buffer */
+ ret = GST_BUFFER_POOL_CLASS (parent_class)->acquire_buffer (bpool,
+ buffer, params);
+ break;
+
+ case GST_V4L2_IO_MMAP:
+ /* get a free unqueued buffer */
+ ret = GST_BUFFER_POOL_CLASS (parent_class)->acquire_buffer (bpool,
+ buffer, params);
+ break;
+
+ case GST_V4L2_IO_USERPTR:
+ default:
+ g_assert_not_reached ();
+ break;
+ }
+ break;
+
+ default:
+ g_assert_not_reached ();
+ break;
+ }
+ return ret;
+
+ /* ERRORS */
+flushing:
+ {
+ GST_DEBUG_OBJECT (pool, "We are flushing");
+ return GST_FLOW_FLUSHING;
+ }
}
+static void
+gst_v4l2_buffer_pool_release_buffer (GstBufferPool * bpool, GstBuffer * buffer)
+{
+ GstV4l2BufferPool *pool = GST_V4L2_BUFFER_POOL (bpool);
+ GstV4l2Object *obj = pool->obj;
+
+ GST_DEBUG_OBJECT (pool, "release buffer %p", buffer);
+
+ switch (obj->type) {
+ case V4L2_BUF_TYPE_VIDEO_CAPTURE:
+ /* capture, put the buffer back in the queue so that we can refill it
+ * later. */
+ switch (obj->mode) {
+ case GST_V4L2_IO_RW:
+ /* release back in the pool */
+ GST_BUFFER_POOL_CLASS (parent_class)->release_buffer (bpool, buffer);
+ break;
+
+ case GST_V4L2_IO_MMAP:
+ /* queue back in the device */
+ gst_v4l2_buffer_pool_qbuf (pool, buffer);
+ break;
+
+ case GST_V4L2_IO_USERPTR:
+ default:
+ g_assert_not_reached ();
+ break;
+ }
+ break;
-/**
- * gst_v4l2_buffer_pool_qbuf:
- * @pool: the pool
- * @buf: the buffer to queue
- *
- * Queue a buffer to the driver
- *
- * Returns: %TRUE for success
- */
-gboolean
-gst_v4l2_buffer_pool_qbuf (GstV4l2BufferPool * pool, GstV4l2Buffer * buf)
+ case V4L2_BUF_TYPE_VIDEO_OUTPUT:
+ switch (obj->mode) {
+ case GST_V4L2_IO_RW:
+ /* release back in the pool */
+ GST_BUFFER_POOL_CLASS (parent_class)->release_buffer (bpool, buffer);
+ break;
+
+ case GST_V4L2_IO_MMAP:
+ {
+ GstV4l2Meta *meta;
+
+ meta = GST_V4L2_META_GET (buffer);
+ g_assert (meta != NULL);
+
+ if (pool->buffers[meta->vbuffer.index] == NULL) {
+ GST_LOG_OBJECT (pool, "buffer not queued, putting on free list");
+ /* playback, put the buffer back in the queue to refill later. */
+ GST_BUFFER_POOL_CLASS (parent_class)->release_buffer (bpool,
+ buffer);
+ } else {
+ /* the buffer is queued in the device but maybe not played yet. We just
+ * leave it there and not make it available for future calls to acquire
+ * for now. The buffer will be dequeued and reused later. */
+ GST_LOG_OBJECT (pool, "buffer is queued");
+ }
+ break;
+ }
+
+ case GST_V4L2_IO_USERPTR:
+ default:
+ g_assert_not_reached ();
+ break;
+ }
+ break;
+
+ default:
+ g_assert_not_reached ();
+ break;
+ }
+}
+
+static void
+gst_v4l2_buffer_pool_finalize (GObject * object)
{
- GST_LOG_OBJECT (pool->v4l2elem, "enqueue pool buffer %d", buf->vbuffer.index);
+ GstV4l2BufferPool *pool = GST_V4L2_BUFFER_POOL (object);
- if (v4l2_ioctl (pool->video_fd, VIDIOC_QBUF, &buf->vbuffer) < 0)
- return FALSE;
+ if (pool->video_fd >= 0)
+ v4l2_close (pool->video_fd);
- pool->num_live_buffers--;
- GST_DEBUG_OBJECT (pool->v4l2elem, "num_live_buffers--: %d",
- pool->num_live_buffers);
+ g_free (pool->buffers);
- return TRUE;
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static void
+gst_v4l2_buffer_pool_init (GstV4l2BufferPool * pool)
+{
+}
+
+static void
+gst_v4l2_buffer_pool_class_init (GstV4l2BufferPoolClass * klass)
+{
+ GObjectClass *object_class = G_OBJECT_CLASS (klass);
+ GstBufferPoolClass *bufferpool_class = GST_BUFFER_POOL_CLASS (klass);
+
+ object_class->finalize = gst_v4l2_buffer_pool_finalize;
+
+ bufferpool_class->start = gst_v4l2_buffer_pool_start;
+ bufferpool_class->stop = gst_v4l2_buffer_pool_stop;
+ bufferpool_class->set_config = gst_v4l2_buffer_pool_set_config;
+ bufferpool_class->alloc_buffer = gst_v4l2_buffer_pool_alloc_buffer;
+ bufferpool_class->acquire_buffer = gst_v4l2_buffer_pool_acquire_buffer;
+ bufferpool_class->release_buffer = gst_v4l2_buffer_pool_release_buffer;
+ bufferpool_class->free_buffer = gst_v4l2_buffer_pool_free_buffer;
}
/**
- * gst_v4l2_buffer_pool_dqbuf:
- * @pool: the pool
+ * gst_v4l2_buffer_pool_new:
+ * @obj: the v4l2 object owning the pool
*
- * Dequeue a buffer from the driver. Some generic error handling is done in
- * this function, but any error handling specific to v4l2src (capture) or
- * v4l2sink (output) can be done outside this function by checking 'errno'
+ * Construct a new buffer pool.
*
- * Returns: a buffer
+ * Returns: the new pool, use gst_object_unref() to free resources
*/
-GstV4l2Buffer *
-gst_v4l2_buffer_pool_dqbuf (GstV4l2BufferPool * pool)
+GstBufferPool *
+gst_v4l2_buffer_pool_new (GstV4l2Object * obj, GstCaps * caps)
{
- GstV4l2Object *v4l2object = get_v4l2_object (pool->v4l2elem);
- GstV4l2Buffer *pool_buffer;
- struct v4l2_buffer buffer;
+ GstV4l2BufferPool *pool;
+ gint fd;
- memset (&buffer, 0x00, sizeof (buffer));
- buffer.type = pool->type;
- buffer.memory = V4L2_MEMORY_MMAP;
+ fd = v4l2_dup (obj->video_fd);
+ if (fd < 0)
+ goto dup_failed;
+ pool = (GstV4l2BufferPool *) g_object_new (GST_TYPE_V4L2_BUFFER_POOL, NULL);
+ pool->video_fd = fd;
+ pool->obj = obj;
- if (v4l2_ioctl (pool->video_fd, VIDIOC_DQBUF, &buffer) >= 0) {
+ gst_buffer_pool_config_set (GST_BUFFER_POOL_CAST (pool)->config, caps,
+ obj->sizeimage, 2, 0, 0, 0);
- GST_V4L2_BUFFER_POOL_LOCK (pool);
+ return GST_BUFFER_POOL (pool);
- /* get our GstBuffer with that index from the pool, if the buffer was
- * outstanding we have a serious problem.
- */
- pool_buffer = pool->buffers[buffer.index];
+ /* ERRORS */
+dup_failed:
+ {
+ GST_DEBUG ("failed to dup fd %d (%s)", errno, g_strerror (errno));
+ return NULL;
+ }
+}
- if (pool_buffer == NULL) {
- GST_ELEMENT_ERROR (pool->v4l2elem, RESOURCE, FAILED,
- (_("Failed trying to get video frames from device '%s'."),
- v4l2object->videodev),
- (_("No free buffers found in the pool at index %d."), buffer.index));
- GST_V4L2_BUFFER_POOL_UNLOCK (pool);
- return NULL;
- }
+static GstFlowReturn
+gst_v4l2_do_read (GstV4l2BufferPool * pool, GstBuffer * buf)
+{
+ GstFlowReturn res;
+ GstV4l2Object *obj = pool->obj;
+ gint amount;
+ GstMapInfo map;
+ gint toread;
- GST_LOG_OBJECT (pool->v4l2elem,
- "grabbed frame %d (ix=%d), flags %08x, pool-ct=%d, buffer=%p",
- buffer.sequence, buffer.index, buffer.flags, pool->num_live_buffers,
- pool_buffer);
+ toread = obj->sizeimage;
- pool->num_live_buffers++;
- GST_DEBUG_OBJECT (pool->v4l2elem, "num_live_buffers++: %d",
- pool->num_live_buffers);
+ GST_LOG_OBJECT (pool, "reading %d bytes into buffer %p", toread, buf);
- /* set top/bottom field first if v4l2_buffer has the information */
- if (buffer.field == V4L2_FIELD_INTERLACED_TB)
- GST_BUFFER_FLAG_SET (pool_buffer, GST_VIDEO_BUFFER_TFF);
- if (buffer.field == V4L2_FIELD_INTERLACED_BT)
- GST_BUFFER_FLAG_UNSET (pool_buffer, GST_VIDEO_BUFFER_TFF);
+ gst_buffer_map (buf, &map, GST_MAP_WRITE);
- /* this can change at every frame, esp. with jpeg */
- GST_BUFFER_SIZE (pool_buffer) = buffer.bytesused;
+ do {
+ if ((res = gst_v4l2_object_poll (obj)) != GST_FLOW_OK)
+ goto poll_error;
- GST_V4L2_BUFFER_POOL_UNLOCK (pool);
+ amount = v4l2_read (obj->video_fd, map.data, toread);
- return pool_buffer;
- }
+ if (amount == toread) {
+ break;
+ } else if (amount == -1) {
+ if (errno == EAGAIN || errno == EINTR) {
+ continue;
+ } else
+ goto read_error;
+ } else {
+ /* short reads can happen if a signal interrupts the read */
+ continue;
+ }
+ } while (TRUE);
+ GST_LOG_OBJECT (pool, "read %d bytes", amount);
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_resize (buf, 0, amount);
- GST_WARNING_OBJECT (pool->v4l2elem,
- "problem grabbing frame %d (ix=%d), pool-ct=%d, buf.flags=%d",
- buffer.sequence, buffer.index,
- GST_MINI_OBJECT_REFCOUNT (pool), buffer.flags);
+ return GST_FLOW_OK;
- switch (errno) {
- case EAGAIN:
- GST_WARNING_OBJECT (pool->v4l2elem,
- "Non-blocking I/O has been selected using O_NONBLOCK and"
- " no buffer was in the outgoing queue. device %s",
- v4l2object->videodev);
- break;
- case EINVAL:
- GST_ELEMENT_ERROR (pool->v4l2elem, RESOURCE, FAILED,
- (_("Failed trying to get video frames from device '%s'."),
- v4l2object->videodev),
- (_("The buffer type is not supported, or the index is out of bounds,"
- " or no buffers have been allocated yet, or the userptr"
- " or length are invalid. device %s"), v4l2object->videodev));
- break;
- case ENOMEM:
- GST_ELEMENT_ERROR (pool->v4l2elem, RESOURCE, FAILED,
- (_("Failed trying to get video frames from device '%s'. Not enough memory."), v4l2object->videodev), (_("insufficient memory to enqueue a user pointer buffer. device %s."), v4l2object->videodev));
- break;
- case EIO:
- GST_INFO_OBJECT (pool->v4l2elem,
- "VIDIOC_DQBUF failed due to an internal error."
- " Can also indicate temporary problems like signal loss."
- " Note the driver might dequeue an (empty) buffer despite"
- " returning an error, or even stop capturing."
- " device %s", v4l2object->videodev);
- /* have we de-queued a buffer ? */
- if (!(buffer.flags & (V4L2_BUF_FLAG_QUEUED | V4L2_BUF_FLAG_DONE))) {
- GST_DEBUG_OBJECT (pool->v4l2elem, "reenqueing buffer");
- /* FIXME ... should we do something here? */
- }
- break;
- case EINTR:
- GST_WARNING_OBJECT (pool->v4l2elem,
- "could not sync on a buffer on device %s", v4l2object->videodev);
- break;
- default:
- GST_WARNING_OBJECT (pool->v4l2elem,
- "Grabbing frame got interrupted on %s unexpectedly. %d: %s.",
- v4l2object->videodev, errno, g_strerror (errno));
- break;
+ /* ERRORS */
+poll_error:
+ {
+ GST_DEBUG ("poll error %s", gst_flow_get_name (res));
+ goto cleanup;
+ }
+read_error:
+ {
+ GST_ELEMENT_ERROR (obj->element, RESOURCE, READ,
+ (_("Error reading %d bytes from device '%s'."),
+ toread, obj->videodev), GST_ERROR_SYSTEM);
+ res = GST_FLOW_ERROR;
+ goto cleanup;
+ }
+cleanup:
+ {
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_resize (buf, 0, 0);
+ return res;
}
-
- return NULL;
}
/**
- * gst_v4l2_buffer_pool_available_buffers:
- * @pool: the pool
+ * gst_v4l2_buffer_pool_process:
+ * @bpool: a #GstBufferPool
+ * @buf: a #GstBuffer
*
- * Check the number of buffers available to the driver, ie. buffers that
- * have been QBUF'd but not yet DQBUF'd.
+ * Process @buf in @bpool. For capture devices, this functions fills @buf with
+ * data from the device. For output devices, this functions send the contents of
+ * @buf to the device for playback.
*
- * Returns: the number of buffers available.
+ * Returns: %GST_FLOW_OK on success.
*/
-gint
-gst_v4l2_buffer_pool_available_buffers (GstV4l2BufferPool * pool)
+GstFlowReturn
+gst_v4l2_buffer_pool_process (GstV4l2BufferPool * pool, GstBuffer * buf)
{
- return pool->buffer_count - pool->num_live_buffers;
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstBufferPool *bpool = GST_BUFFER_POOL_CAST (pool);
+ GstV4l2Object *obj = pool->obj;
+
+ GST_DEBUG_OBJECT (pool, "process buffer %p", buf);
+
+ switch (obj->type) {
+ case V4L2_BUF_TYPE_VIDEO_CAPTURE:
+ /* capture */
+ switch (obj->mode) {
+ case GST_V4L2_IO_RW:
+ /* capture into the buffer */
+ ret = gst_v4l2_do_read (pool, buf);
+ break;
+
+ case GST_V4L2_IO_MMAP:
+ {
+ GstBuffer *tmp;
+
+ if (buf->pool == bpool)
+ /* nothing, data was inside the buffer when we did _acquire() */
+ goto done;
+
+ /* buffer not from our pool, grab a frame and copy it into the target */
+ if ((ret = gst_v4l2_buffer_pool_dqbuf (pool, &tmp)) != GST_FLOW_OK)
+ goto done;
+
+ if (!gst_v4l2_object_copy (obj, buf, tmp))
+ goto copy_failed;
+
+ /* an queue the buffer again after the copy */
+ if ((ret = gst_v4l2_buffer_pool_qbuf (pool, tmp)) != GST_FLOW_OK)
+ goto done;
+ break;
+ }
+
+ case GST_V4L2_IO_USERPTR:
+ default:
+ g_assert_not_reached ();
+ break;
+ }
+ break;
+
+ case V4L2_BUF_TYPE_VIDEO_OUTPUT:
+ /* playback */
+ switch (obj->mode) {
+ case GST_V4L2_IO_RW:
+ /* FIXME, do write() */
+ GST_WARNING_OBJECT (pool, "implement write()");
+ break;
+
+ case GST_V4L2_IO_MMAP:
+ {
+ GstBuffer *to_queue;
+
+ if (buf->pool == bpool) {
+ /* nothing, we can queue directly */
+ to_queue = buf;
+ GST_LOG_OBJECT (pool, "processing buffer from our pool");
+ } else {
+ GST_LOG_OBJECT (pool, "alloc buffer from our pool");
+ if (!gst_buffer_pool_is_active (bpool)) {
+ GstStructure *config;
+
+ /* this pool was not activated, configure and activate */
+ GST_DEBUG_OBJECT (pool, "activating pool");
+
+ config = gst_buffer_pool_get_config (bpool);
+ gst_buffer_pool_config_add_option (config,
+ GST_BUFFER_POOL_OPTION_VIDEO_META);
+ gst_buffer_pool_set_config (bpool, config);
+
+ if (!gst_buffer_pool_set_active (bpool, TRUE))
+ goto activate_failed;
+ }
+
+ /* this can block if all buffers are outstanding which would be
+ * strange because we would expect the upstream element to have
+ * allocated them and returned to us.. */
+ ret = GST_BUFFER_POOL_CLASS (parent_class)->acquire_buffer (bpool,
+ &to_queue, NULL);
+ if (ret != GST_FLOW_OK)
+ goto acquire_failed;
+
+ /* copy into it and queue */
+ if (!gst_v4l2_object_copy (obj, to_queue, buf))
+ goto copy_failed;
+ }
+
+ if ((ret = gst_v4l2_buffer_pool_qbuf (pool, to_queue)) != GST_FLOW_OK)
+ goto done;
+
+ /* if we are not streaming yet (this is the first buffer, start
+ * streaming now */
+ if (!pool->streaming)
+ if (!start_streaming (pool))
+ goto start_failed;
+
+ if (pool->num_queued == pool->num_allocated) {
+ /* all buffers are queued, try to dequeue one and release it back
+ * into the pool so that _acquire can get to it again. */
+ ret = gst_v4l2_buffer_pool_dqbuf (pool, &to_queue);
+ if (ret != GST_FLOW_OK)
+ goto done;
+
+ /* release the rendered buffer back into the pool. This wakes up any
+ * thread waiting for a buffer in _acquire() */
+ gst_v4l2_buffer_pool_release_buffer (bpool, to_queue);
+ }
+ break;
+ }
+
+ case GST_V4L2_IO_USERPTR:
+ default:
+ g_assert_not_reached ();
+ break;
+ }
+ break;
+ default:
+ g_assert_not_reached ();
+ break;
+ }
+done:
+ return ret;
+
+ /* ERRORS */
+activate_failed:
+ {
+ GST_ERROR_OBJECT (obj->element, "failed to activate pool");
+ return GST_FLOW_ERROR;
+ }
+acquire_failed:
+ {
+ GST_WARNING_OBJECT (obj->element, "failed to acquire a buffer: %s",
+ gst_flow_get_name (ret));
+ return ret;
+ }
+copy_failed:
+ {
+ GST_ERROR_OBJECT (obj->element, "failed to copy data");
+ return GST_FLOW_ERROR;
+ }
+start_failed:
+ {
+ GST_ERROR_OBJECT (obj->element, "failed to start streaming");
+ return GST_FLOW_ERROR;
+ }
}
* Boston, MA 02111-1307, USA.
*/
-#ifndef __GSTV4L2BUFFER_H__
-#define __GSTV4L2BUFFER_H__
+#ifndef __GST_V4L2_BUFFER_POOL_H__
+#define __GST_V4L2_BUFFER_POOL_H__
#include <gst/gst.h>
-#include "v4l2_calls.h"
+
+typedef struct _GstV4l2BufferPool GstV4l2BufferPool;
+typedef struct _GstV4l2BufferPoolClass GstV4l2BufferPoolClass;
+typedef struct _GstV4l2Meta GstV4l2Meta;
+
+#include "gstv4l2object.h"
+//#include "v4l2_calls.h"
GST_DEBUG_CATEGORY_EXTERN (v4l2buffer_debug);
G_BEGIN_DECLS
-GType gst_v4l2_buffer_get_type (void);
-#define GST_TYPE_V4L2_BUFFER (gst_v4l2_buffer_get_type())
-#define GST_IS_V4L2_BUFFER(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_V4L2_BUFFER))
-#define GST_V4L2_BUFFER(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_V4L2_BUFFER, GstV4l2Buffer))
-
-GType gst_v4l2_buffer_pool_get_type (void);
-#define GST_TYPE_V4L2_BUFFER_POOL (gst_v4l2_buffer_pool_get_type())
-#define GST_IS_V4L2_BUFFER_POOL(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_V4L2_BUFFER_POOL))
-#define GST_V4L2_BUFFER_POOL(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_V4L2_BUFFER_POOL, GstV4l2BufferPool))
+#define GST_TYPE_V4L2_BUFFER_POOL (gst_v4l2_buffer_pool_get_type())
+#define GST_IS_V4L2_BUFFER_POOL(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_V4L2_BUFFER_POOL))
+#define GST_V4L2_BUFFER_POOL(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_V4L2_BUFFER_POOL, GstV4l2BufferPool))
+#define GST_V4L2_BUFFER_POOL_CAST(obj) ((GstV4l2BufferPool*)(obj))
+struct _GstV4l2BufferPool
+{
+ GstBufferPool parent;
+ GstV4l2Object *obj; /* the v4l2 object */
+ gint video_fd; /* a dup(2) of the v4l2object's video_fd */
-typedef struct _GstV4l2BufferPool GstV4l2BufferPool;
-typedef struct _GstV4l2Buffer GstV4l2Buffer;
+ GstAllocator *allocator;
+ guint size;
+ guint min_buffers;
+ guint max_buffers;
+ guint prefix;
+ guint align;
+ gboolean add_videometa;
+ guint num_allocated; /* number of buffers allocated by the driver */
+ guint num_queued; /* number of buffers queued in the driver */
-struct _GstV4l2BufferPool
-{
- GstMiniObject parent;
+ gboolean streaming;
- GstElement *v4l2elem; /* the v4l2 src/sink that owns us.. maybe we should be owned by v4l2object? */
- gboolean requeuebuf; /* if true, unusued buffers are automatically re-QBUF'd */
- enum v4l2_buf_type type; /* V4L2_BUF_TYPE_VIDEO_CAPTURE, V4L2_BUF_TYPE_VIDEO_OUTPUT */
+ GstBuffer **buffers;
+};
- GMutex *lock;
- gboolean running; /* with lock */
- gint num_live_buffers; /* number of buffers not with driver */
- GAsyncQueue* avail_buffers;/* pool of available buffers, not with the driver and which aren't held outside the bufferpool */
- gint video_fd; /* a dup(2) of the v4l2object's video_fd */
- guint buffer_count;
- GstV4l2Buffer **buffers;
+struct _GstV4l2BufferPoolClass
+{
+ GstBufferPoolClass parent_class;
};
-struct _GstV4l2Buffer {
- GstBuffer buffer;
+struct _GstV4l2Meta {
+ GstMeta meta;
+ gpointer mem;
struct v4l2_buffer vbuffer;
-
- /* FIXME: have GstV4l2Src* instead, as this has GstV4l2BufferPool* */
- /* FIXME: do we really want to fix this if GstV4l2Buffer/Pool is shared
- * between v4l2src and v4l2sink??
- */
- GstV4l2BufferPool *pool;
};
-void gst_v4l2_buffer_pool_destroy (GstV4l2BufferPool * pool);
-GstV4l2BufferPool *gst_v4l2_buffer_pool_new (GstElement *v4l2elem, gint fd, gint num_buffers, GstCaps * caps, gboolean requeuebuf, enum v4l2_buf_type type);
-
+const GstMetaInfo * gst_v4l2_meta_get_info (void);
+#define GST_V4L2_META_GET(buf) ((GstV4l2Meta *)gst_buffer_get_meta(buf,gst_v4l2_meta_get_info()))
+#define GST_V4L2_META_ADD(buf) ((GstV4l2Meta *)gst_buffer_add_meta(buf,gst_v4l2_meta_get_info(),NULL))
-GstV4l2Buffer *gst_v4l2_buffer_pool_get (GstV4l2BufferPool *pool, gboolean blocking);
-gboolean gst_v4l2_buffer_pool_qbuf (GstV4l2BufferPool *pool, GstV4l2Buffer *buf);
-GstV4l2Buffer *gst_v4l2_buffer_pool_dqbuf (GstV4l2BufferPool *pool);
-
-gint gst_v4l2_buffer_pool_available_buffers (GstV4l2BufferPool *pool);
+GType gst_v4l2_buffer_pool_get_type (void);
+GstBufferPool * gst_v4l2_buffer_pool_new (GstV4l2Object *obj, GstCaps *caps);
-#define GST_V4L2_BUFFER_POOL_LOCK(pool) g_mutex_lock ((pool)->lock)
-#define GST_V4L2_BUFFER_POOL_UNLOCK(pool) g_mutex_unlock ((pool)->lock)
+GstFlowReturn gst_v4l2_buffer_pool_process (GstV4l2BufferPool * bpool, GstBuffer * buf);
G_END_DECLS
-#endif /* __GSTV4L2BUFFER_H__ */
+#endif /*__GST_V4L2_BUFFER_POOL_H__ */
#include "gstv4l2colorbalance.h"
#include "gstv4l2object.h"
-GST_BOILERPLATE (GstV4l2ColorBalanceChannel,
- gst_v4l2_color_balance_channel,
- GstColorBalanceChannel, GST_TYPE_COLOR_BALANCE_CHANNEL);
-
-static void
-gst_v4l2_color_balance_channel_base_init (gpointer g_class)
-{
-}
+#define gst_v4l2_color_balance_channel_parent_class parent_class
+G_DEFINE_TYPE (GstV4l2ColorBalanceChannel,
+ gst_v4l2_color_balance_channel, GST_TYPE_COLOR_BALANCE_CHANNEL);
static void
gst_v4l2_color_balance_channel_class_init (GstV4l2ColorBalanceChannelClass *
}
static void
-gst_v4l2_color_balance_channel_init (GstV4l2ColorBalanceChannel * channel,
- GstV4l2ColorBalanceChannelClass * klass)
+gst_v4l2_color_balance_channel_init (GstV4l2ColorBalanceChannel * channel)
{
channel->id = (guint32) - 1;
}
#define __GST_V4L2_COLOR_BALANCE_H__
#include <gst/gst.h>
-#include <gst/interfaces/colorbalance.h>
+#include <gst/video/colorbalance.h>
#include "v4l2_calls.h"
G_BEGIN_DECLS
} \
\
static void \
-interface_as_function ## _color_balance_interface_init (GstColorBalanceClass * klass) \
+interface_as_function ## _color_balance_interface_init (GstColorBalanceInterface * iface) \
{ \
- GST_COLOR_BALANCE_TYPE (klass) = GST_COLOR_BALANCE_HARDWARE; \
+ GST_COLOR_BALANCE_TYPE (iface) = GST_COLOR_BALANCE_HARDWARE; \
\
/* default virtual functions */ \
- klass->list_channels = interface_as_function ## _color_balance_list_channels; \
- klass->set_value = interface_as_function ## _color_balance_set_value; \
- klass->get_value = interface_as_function ## _color_balance_get_value; \
+ iface->list_channels = interface_as_function ## _color_balance_list_channels; \
+ iface->set_value = interface_as_function ## _color_balance_set_value; \
+ iface->get_value = interface_as_function ## _color_balance_get_value; \
} \
#endif /* __GST_V4L2_COLOR_BALANCE_H__ */
#include "v4l2_calls.h"
#include "gstv4l2tuner.h"
#ifdef HAVE_XVIDEO
-#include "gstv4l2xoverlay.h"
+#include "gstv4l2videooverlay.h"
#endif
#include "gstv4l2colorbalance.h"
#include "gst/gst-i18n-plugin.h"
+#include <gst/video/video.h>
+
/* videodev2.h is not versioned and we can't easily check for the presence
* of enum values at compile time, but the V4L2_CAP_VIDEO_OUTPUT_OVERLAY define
* was added in the same commit as V4L2_FIELD_INTERLACED_{TB,BT} (b2787845) */
#endif
GST_DEBUG_CATEGORY_EXTERN (v4l2_debug);
+GST_DEBUG_CATEGORY_EXTERN (GST_CAT_PERFORMANCE);
#define GST_CAT_DEFAULT v4l2_debug
-
#define DEFAULT_PROP_DEVICE_NAME NULL
#define DEFAULT_PROP_DEVICE_FD -1
#define DEFAULT_PROP_FLAGS 0
#define DEFAULT_PROP_TV_NORM 0
#define DEFAULT_PROP_CHANNEL NULL
#define DEFAULT_PROP_FREQUENCY 0
+#define DEFAULT_PROP_IO_MODE GST_V4L2_IO_AUTO
enum
{
V4L2_STD_OBJECT_PROPS,
};
+#if 0
G_LOCK_DEFINE_STATIC (probe_lock);
const GList *
GObjectClass *klass = G_OBJECT_GET_CLASS (probe);
static GList *list = NULL;
- /* well, not perfect, but better than no locking at all.
- * In the worst case we leak a list node, so who cares? */
G_LOCK (probe_lock);
if (!list) {
return array;
}
+#endif
#define GST_TYPE_V4L2_DEVICE_FLAGS (gst_v4l2_device_get_type ())
static GType
return v4l2_tv_norm;
}
+#define GST_TYPE_V4L2_IO_MODE (gst_v4l2_io_mode_get_type ())
+static GType
+gst_v4l2_io_mode_get_type (void)
+{
+ static GType v4l2_io_mode = 0;
+
+ if (!v4l2_io_mode) {
+ static const GEnumValue io_modes[] = {
+ {GST_V4L2_IO_AUTO, "GST_V4L2_IO_AUTO", "auto"},
+ {GST_V4L2_IO_RW, "GST_V4L2_IO_RW", "rw"},
+ {GST_V4L2_IO_MMAP, "GST_V4L2_IO_MMAP", "mmap"},
+ {GST_V4L2_IO_USERPTR, "GST_V4L2_IO_USERPTR", "userptr"},
+
+ {0, NULL, NULL}
+ };
+ v4l2_io_mode = g_enum_register_static ("GstV4l2IOMode", io_modes);
+ }
+ return v4l2_io_mode;
+}
+
void
gst_v4l2_object_install_properties_helper (GObjectClass * gobject_class,
const char *default_device)
"video standard",
GST_TYPE_V4L2_TV_NORM, DEFAULT_PROP_TV_NORM,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstV4l2Src:io-mode
+ *
+ * IO Mode
+ */
+ g_object_class_install_property (gobject_class, PROP_IO_MODE,
+ g_param_spec_enum ("io-mode", "IO mode",
+ "I/O mode",
+ GST_TYPE_V4L2_IO_MODE, DEFAULT_PROP_IO_MODE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
}
GstV4l2Object *
v4l2object->video_fd = -1;
v4l2object->poll = gst_poll_new (TRUE);
- v4l2object->buffer = NULL;
+ v4l2object->active = FALSE;
v4l2object->videodev = g_strdup (default_device);
v4l2object->norms = NULL;
}
break;
#endif
+ case PROP_IO_MODE:
+ v4l2object->req_mode = g_value_get_enum (value);
+ break;
default:
return FALSE;
break;
case PROP_TV_NORM:
g_value_set_enum (value, v4l2object->tv_norm);
break;
+ case PROP_IO_MODE:
+ g_value_set_enum (value, v4l2object->req_mode);
+ break;
default:
return FALSE;
break;
}
gboolean
-gst_v4l2_object_start (GstV4l2Object * v4l2object)
+gst_v4l2_object_open (GstV4l2Object * v4l2object)
{
if (gst_v4l2_open (v4l2object))
gst_v4l2_set_defaults (v4l2object);
return FALSE;
#ifdef HAVE_XVIDEO
- gst_v4l2_xoverlay_start (v4l2object);
+ gst_v4l2_video_overlay_start (v4l2object);
#endif
return TRUE;
}
gboolean
-gst_v4l2_object_stop (GstV4l2Object * v4l2object)
+gst_v4l2_object_close (GstV4l2Object * v4l2object)
{
#ifdef HAVE_XVIDEO
- gst_v4l2_xoverlay_stop (v4l2object);
+ gst_v4l2_video_overlay_stop (v4l2object);
#endif
if (!gst_v4l2_close (v4l2object))
case V4L2_PIX_FMT_PJPG: /* Progressive-JPEG */
#endif
case V4L2_PIX_FMT_JPEG: /* JFIF JPEG */
- structure = gst_structure_new ("image/jpeg", NULL);
+ structure = gst_structure_new_empty ("image/jpeg");
+ break;
+ case V4L2_PIX_FMT_YYUV: /* 16 YUV 4:2:2 */
+ case V4L2_PIX_FMT_HI240: /* 8 8-bit color */
+ /* FIXME: get correct fourccs here */
break;
case V4L2_PIX_FMT_RGB332:
- case V4L2_PIX_FMT_RGB555:
case V4L2_PIX_FMT_RGB555X:
- case V4L2_PIX_FMT_RGB565:
case V4L2_PIX_FMT_RGB565X:
+ /* FIXME: get correct fourccs here */
+ break;
+ case V4L2_PIX_FMT_GREY: /* 8 Greyscale */
+ case V4L2_PIX_FMT_RGB555:
+ case V4L2_PIX_FMT_RGB565:
case V4L2_PIX_FMT_RGB24:
case V4L2_PIX_FMT_BGR24:
case V4L2_PIX_FMT_RGB32:
- case V4L2_PIX_FMT_BGR32:{
- guint depth = 0, bpp = 0;
-
- gint endianness = 0;
-
- guint32 r_mask = 0, b_mask = 0, g_mask = 0;
-
- switch (fourcc) {
- case V4L2_PIX_FMT_RGB332:
- bpp = depth = 8;
- endianness = G_BYTE_ORDER; /* 'like, whatever' */
- r_mask = 0xe0;
- g_mask = 0x1c;
- b_mask = 0x03;
- break;
- case V4L2_PIX_FMT_RGB555:
- case V4L2_PIX_FMT_RGB555X:
- bpp = 16;
- depth = 15;
- endianness =
- fourcc == V4L2_PIX_FMT_RGB555X ? G_BIG_ENDIAN : G_LITTLE_ENDIAN;
- r_mask = 0x7c00;
- g_mask = 0x03e0;
- b_mask = 0x001f;
- break;
- case V4L2_PIX_FMT_RGB565:
- case V4L2_PIX_FMT_RGB565X:
- bpp = depth = 16;
- endianness =
- fourcc == V4L2_PIX_FMT_RGB565X ? G_BIG_ENDIAN : G_LITTLE_ENDIAN;
- r_mask = 0xf800;
- g_mask = 0x07e0;
- b_mask = 0x001f;
- break;
- case V4L2_PIX_FMT_RGB24:
- bpp = depth = 24;
- endianness = G_BIG_ENDIAN;
- r_mask = 0xff0000;
- g_mask = 0x00ff00;
- b_mask = 0x0000ff;
- break;
- case V4L2_PIX_FMT_BGR24:
- bpp = depth = 24;
- endianness = G_BIG_ENDIAN;
- r_mask = 0x0000ff;
- g_mask = 0x00ff00;
- b_mask = 0xff0000;
- break;
- case V4L2_PIX_FMT_RGB32:
- bpp = depth = 32;
- endianness = G_BIG_ENDIAN;
- r_mask = 0xff000000;
- g_mask = 0x00ff0000;
- b_mask = 0x0000ff00;
- break;
- case V4L2_PIX_FMT_BGR32:
- bpp = depth = 32;
- endianness = G_BIG_ENDIAN;
- r_mask = 0x000000ff;
- g_mask = 0x0000ff00;
- b_mask = 0x00ff0000;
- break;
- default:
- g_assert_not_reached ();
- break;
- }
- structure = gst_structure_new ("video/x-raw-rgb",
- "bpp", G_TYPE_INT, bpp,
- "depth", G_TYPE_INT, depth,
- "red_mask", G_TYPE_INT, r_mask,
- "green_mask", G_TYPE_INT, g_mask,
- "blue_mask", G_TYPE_INT, b_mask,
- "endianness", G_TYPE_INT, endianness, NULL);
- break;
- }
- case V4L2_PIX_FMT_GREY: /* 8 Greyscale */
- structure = gst_structure_new ("video/x-raw-gray",
- "bpp", G_TYPE_INT, 8, NULL);
- break;
- case V4L2_PIX_FMT_YYUV: /* 16 YUV 4:2:2 */
- case V4L2_PIX_FMT_HI240: /* 8 8-bit color */
- /* FIXME: get correct fourccs here */
- break;
+ case V4L2_PIX_FMT_BGR32:
case V4L2_PIX_FMT_NV12: /* 12 Y/CbCr 4:2:0 */
case V4L2_PIX_FMT_NV21: /* 12 Y/CrCb 4:2:0 */
case V4L2_PIX_FMT_YVU410:
case V4L2_PIX_FMT_YUYV:
case V4L2_PIX_FMT_YVU420:
case V4L2_PIX_FMT_UYVY:
+#if 0
case V4L2_PIX_FMT_Y41P:
+#endif
case V4L2_PIX_FMT_YUV422P:
#ifdef V4L2_PIX_FMT_YVYU
case V4L2_PIX_FMT_YVYU:
#endif
case V4L2_PIX_FMT_YUV411P:{
- guint32 fcc = 0;
+ GstVideoFormat format;
switch (fourcc) {
+ case V4L2_PIX_FMT_GREY: /* 8 Greyscale */
+ format = GST_VIDEO_FORMAT_GRAY8;
+ break;
+ case V4L2_PIX_FMT_RGB555:
+ format = GST_VIDEO_FORMAT_RGB15;
+ break;
+ case V4L2_PIX_FMT_RGB565:
+ format = GST_VIDEO_FORMAT_RGB16;
+ break;
+ case V4L2_PIX_FMT_RGB24:
+ format = GST_VIDEO_FORMAT_RGB;
+ break;
+ case V4L2_PIX_FMT_BGR24:
+ format = GST_VIDEO_FORMAT_BGR;
+ break;
+ case V4L2_PIX_FMT_RGB32:
+ format = GST_VIDEO_FORMAT_RGBx;
+ break;
+ case V4L2_PIX_FMT_BGR32:
+ format = GST_VIDEO_FORMAT_BGRx;
+ break;
case V4L2_PIX_FMT_NV12:
- fcc = GST_MAKE_FOURCC ('N', 'V', '1', '2');
+ format = GST_VIDEO_FORMAT_NV12;
break;
case V4L2_PIX_FMT_NV21:
- fcc = GST_MAKE_FOURCC ('N', 'V', '2', '1');
+ format = GST_VIDEO_FORMAT_NV21;
break;
case V4L2_PIX_FMT_YVU410:
- fcc = GST_MAKE_FOURCC ('Y', 'V', 'U', '9');
+ format = GST_VIDEO_FORMAT_YVU9;
break;
case V4L2_PIX_FMT_YUV410:
- fcc = GST_MAKE_FOURCC ('Y', 'U', 'V', '9');
+ format = GST_VIDEO_FORMAT_YUV9;
break;
case V4L2_PIX_FMT_YUV420:
- fcc = GST_MAKE_FOURCC ('I', '4', '2', '0');
+ format = GST_VIDEO_FORMAT_I420;
break;
case V4L2_PIX_FMT_YUYV:
- fcc = GST_MAKE_FOURCC ('Y', 'U', 'Y', '2');
+ format = GST_VIDEO_FORMAT_YUY2;
break;
case V4L2_PIX_FMT_YVU420:
- fcc = GST_MAKE_FOURCC ('Y', 'V', '1', '2');
+ format = GST_VIDEO_FORMAT_YV12;
break;
case V4L2_PIX_FMT_UYVY:
- fcc = GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y');
+ format = GST_VIDEO_FORMAT_UYVY;
break;
+#if 0
case V4L2_PIX_FMT_Y41P:
- fcc = GST_MAKE_FOURCC ('Y', '4', '1', 'P');
+ format = GST_VIDEO_FORMAT_Y41P;
break;
+#endif
case V4L2_PIX_FMT_YUV411P:
- fcc = GST_MAKE_FOURCC ('Y', '4', '1', 'B');
+ format = GST_VIDEO_FORMAT_Y41B;
break;
case V4L2_PIX_FMT_YUV422P:
- fcc = GST_MAKE_FOURCC ('Y', '4', '2', 'B');
+ format = GST_VIDEO_FORMAT_Y42B;
break;
#ifdef V4L2_PIX_FMT_YVYU
case V4L2_PIX_FMT_YVYU:
- fcc = GST_MAKE_FOURCC ('Y', 'V', 'Y', 'U');
+ format = GST_VIDEO_FORMAT_YVYU;
break;
#endif
default:
g_assert_not_reached ();
break;
}
- structure = gst_structure_new ("video/x-raw-yuv",
- "format", GST_TYPE_FOURCC, fcc, NULL);
+ structure = gst_structure_new ("video/x-raw",
+ "format", G_TYPE_STRING, gst_video_format_to_string (format), NULL);
break;
}
case V4L2_PIX_FMT_DV:
NULL);
break;
case V4L2_PIX_FMT_MPEG: /* MPEG */
- structure = gst_structure_new ("video/mpegts", NULL);
+ structure = gst_structure_new_empty ("video/mpegts");
break;
case V4L2_PIX_FMT_WNVA: /* Winnov hw compres */
break;
#ifdef V4L2_PIX_FMT_SBGGR8
case V4L2_PIX_FMT_SBGGR8:
- structure = gst_structure_new ("video/x-raw-bayer", NULL);
+ structure = gst_structure_new_empty ("video/x-raw-bayer");
break;
#endif
#ifdef V4L2_PIX_FMT_SN9C10X
case V4L2_PIX_FMT_SN9C10X:
- structure = gst_structure_new ("video/x-sonix", NULL);
+ structure = gst_structure_new_empty ("video/x-sonix");
break;
#endif
#ifdef V4L2_PIX_FMT_PWC1
case V4L2_PIX_FMT_PWC1:
- structure = gst_structure_new ("video/x-pwc1", NULL);
+ structure = gst_structure_new_empty ("video/x-pwc1");
break;
#endif
#ifdef V4L2_PIX_FMT_PWC2
case V4L2_PIX_FMT_PWC2:
- structure = gst_structure_new ("video/x-pwc2", NULL);
+ structure = gst_structure_new_empty ("video/x-pwc2");
break;
#endif
default:
* @fps_n/@fps_d: location for framerate
* @size: location for expected size of the frame or 0 if unknown
*/
-gboolean
+static gboolean
gst_v4l2_object_get_caps_info (GstV4l2Object * v4l2object, GstCaps * caps,
- struct v4l2_fmtdesc ** format, gint * w, gint * h,
- gboolean * interlaced, guint * fps_n, guint * fps_d, guint * size)
+ struct v4l2_fmtdesc **format, GstVideoInfo * info)
{
GstStructure *structure;
- const GValue *framerate;
guint32 fourcc;
const gchar *mimetype;
- guint outsize;
+ struct v4l2_fmtdesc *fmt;
/* default unknown values */
fourcc = 0;
- outsize = 0;
structure = gst_caps_get_structure (caps, 0);
mimetype = gst_structure_get_name (structure);
- if (strcmp (mimetype, "video/mpegts") == 0) {
- fourcc = V4L2_PIX_FMT_MPEG;
- *fps_n = 0;
- *fps_d = 1;
- goto done;
- }
-
- if (!gst_structure_get_int (structure, "width", w))
- return FALSE;
-
- if (!gst_structure_get_int (structure, "height", h))
- return FALSE;
-
- if (!gst_structure_get_boolean (structure, "interlaced", interlaced))
- *interlaced = FALSE;
+ if (g_str_equal (mimetype, "video/x-raw")) {
+ /* raw caps, parse into video info */
+ if (!gst_video_info_from_caps (info, caps))
+ goto invalid_format;
- framerate = gst_structure_get_value (structure, "framerate");
- if (!framerate)
- return FALSE;
-
- *fps_n = gst_value_get_fraction_numerator (framerate);
- *fps_d = gst_value_get_fraction_denominator (framerate);
-
- if (!strcmp (mimetype, "video/x-raw-yuv")) {
- gst_structure_get_fourcc (structure, "format", &fourcc);
-
- switch (fourcc) {
- case GST_MAKE_FOURCC ('I', '4', '2', '0'):
- case GST_MAKE_FOURCC ('I', 'Y', 'U', 'V'):
+ switch (GST_VIDEO_INFO_FORMAT (info)) {
+ case GST_VIDEO_FORMAT_I420:
fourcc = V4L2_PIX_FMT_YUV420;
- outsize = GST_ROUND_UP_4 (*w) * GST_ROUND_UP_2 (*h);
- outsize += 2 * ((GST_ROUND_UP_8 (*w) / 2) * (GST_ROUND_UP_2 (*h) / 2));
break;
- case GST_MAKE_FOURCC ('Y', 'U', 'Y', '2'):
+ case GST_VIDEO_FORMAT_YUY2:
fourcc = V4L2_PIX_FMT_YUYV;
- outsize = (GST_ROUND_UP_2 (*w) * 2) * *h;
break;
- case GST_MAKE_FOURCC ('Y', '4', '1', 'P'):
+#if 0
+ case GST_VIDEO_FORMAT_Y41P:
fourcc = V4L2_PIX_FMT_Y41P;
- outsize = (GST_ROUND_UP_2 (*w) * 2) * *h;
break;
- case GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y'):
+#endif
+ case GST_VIDEO_FORMAT_UYVY:
fourcc = V4L2_PIX_FMT_UYVY;
- outsize = (GST_ROUND_UP_2 (*w) * 2) * *h;
break;
- case GST_MAKE_FOURCC ('Y', 'V', '1', '2'):
+ case GST_VIDEO_FORMAT_YV12:
fourcc = V4L2_PIX_FMT_YVU420;
- outsize = GST_ROUND_UP_4 (*w) * GST_ROUND_UP_2 (*h);
- outsize += 2 * ((GST_ROUND_UP_8 (*w) / 2) * (GST_ROUND_UP_2 (*h) / 2));
break;
- case GST_MAKE_FOURCC ('Y', '4', '1', 'B'):
+ case GST_VIDEO_FORMAT_Y41B:
fourcc = V4L2_PIX_FMT_YUV411P;
- outsize = GST_ROUND_UP_4 (*w) * *h;
- outsize += 2 * ((GST_ROUND_UP_8 (*w) / 4) * *h);
break;
- case GST_MAKE_FOURCC ('Y', '4', '2', 'B'):
+ case GST_VIDEO_FORMAT_Y42B:
fourcc = V4L2_PIX_FMT_YUV422P;
- outsize = GST_ROUND_UP_4 (*w) * *h;
- outsize += 2 * ((GST_ROUND_UP_8 (*w) / 2) * *h);
break;
- case GST_MAKE_FOURCC ('N', 'V', '1', '2'):
+ case GST_VIDEO_FORMAT_NV12:
fourcc = V4L2_PIX_FMT_NV12;
- outsize = GST_ROUND_UP_4 (*w) * GST_ROUND_UP_2 (*h);
- outsize += (GST_ROUND_UP_4 (*w) * *h) / 2;
break;
- case GST_MAKE_FOURCC ('N', 'V', '2', '1'):
+ case GST_VIDEO_FORMAT_NV21:
fourcc = V4L2_PIX_FMT_NV21;
- outsize = GST_ROUND_UP_4 (*w) * GST_ROUND_UP_2 (*h);
- outsize += (GST_ROUND_UP_4 (*w) * *h) / 2;
break;
#ifdef V4L2_PIX_FMT_YVYU
- case GST_MAKE_FOURCC ('Y', 'V', 'Y', 'U'):
+ case GST_VIDEO_FORMAT_YVYU:
fourcc = V4L2_PIX_FMT_YVYU;
- outsize = (GST_ROUND_UP_2 (*w) * 2) * *h;
break;
#endif
- }
- } else if (!strcmp (mimetype, "video/x-raw-rgb")) {
- gint depth, endianness, r_mask;
-
- gst_structure_get_int (structure, "depth", &depth);
- gst_structure_get_int (structure, "endianness", &endianness);
- gst_structure_get_int (structure, "red_mask", &r_mask);
-
- switch (depth) {
- case 8:
- fourcc = V4L2_PIX_FMT_RGB332;
+ case GST_VIDEO_FORMAT_RGB15:
+ fourcc = V4L2_PIX_FMT_RGB555;
+ break;
+ case GST_VIDEO_FORMAT_RGB16:
+ fourcc = V4L2_PIX_FMT_RGB565;
break;
- case 15:
- fourcc = (endianness == G_LITTLE_ENDIAN) ?
- V4L2_PIX_FMT_RGB555 : V4L2_PIX_FMT_RGB555X;
+ case GST_VIDEO_FORMAT_RGB:
+ fourcc = V4L2_PIX_FMT_RGB24;
break;
- case 16:
- fourcc = (endianness == G_LITTLE_ENDIAN) ?
- V4L2_PIX_FMT_RGB565 : V4L2_PIX_FMT_RGB565X;
+ case GST_VIDEO_FORMAT_BGR:
+ fourcc = V4L2_PIX_FMT_BGR24;
break;
- case 24:
- fourcc = (r_mask == 0xFF) ? V4L2_PIX_FMT_BGR24 : V4L2_PIX_FMT_RGB24;
+ case GST_VIDEO_FORMAT_RGBx:
+ case GST_VIDEO_FORMAT_RGBA:
+ fourcc = V4L2_PIX_FMT_RGB32;
break;
- case 32:
- fourcc = (r_mask == 0xFF) ? V4L2_PIX_FMT_BGR32 : V4L2_PIX_FMT_RGB32;
+ case GST_VIDEO_FORMAT_BGRx:
+ case GST_VIDEO_FORMAT_BGRA:
+ fourcc = V4L2_PIX_FMT_BGR32;
+ break;
+ case GST_VIDEO_FORMAT_GRAY8:
+ fourcc = V4L2_PIX_FMT_GREY;
+ default:
break;
}
- } else if (strcmp (mimetype, "video/x-dv") == 0) {
- fourcc = V4L2_PIX_FMT_DV;
- } else if (strcmp (mimetype, "image/jpeg") == 0) {
- fourcc = V4L2_PIX_FMT_JPEG;
+ } else {
+ gboolean dimensions = TRUE;
+
+ /* no video caps, construct videoinfo ourselves */
+ gst_video_info_init (info);
+
+ if (g_str_equal (mimetype, "video/mpegts")) {
+ fourcc = V4L2_PIX_FMT_MPEG;
+ dimensions = FALSE;
+ } else if (g_str_equal (mimetype, "video/x-dv")) {
+ fourcc = V4L2_PIX_FMT_DV;
+ } else if (g_str_equal (mimetype, "image/jpeg")) {
+ fourcc = V4L2_PIX_FMT_JPEG;
#ifdef V4L2_PIX_FMT_SBGGR8
- } else if (strcmp (mimetype, "video/x-raw-bayer") == 0) {
- fourcc = V4L2_PIX_FMT_SBGGR8;
+ } else if (g_str_equal (mimetype, "video/x-raw-bayer")) {
+ fourcc = V4L2_PIX_FMT_SBGGR8;
#endif
#ifdef V4L2_PIX_FMT_SN9C10X
- } else if (strcmp (mimetype, "video/x-sonix") == 0) {
- fourcc = V4L2_PIX_FMT_SN9C10X;
+ } else if (g_str_equal (mimetype, "video/x-sonix")) {
+ fourcc = V4L2_PIX_FMT_SN9C10X;
#endif
#ifdef V4L2_PIX_FMT_PWC1
- } else if (strcmp (mimetype, "video/x-pwc1") == 0) {
- fourcc = V4L2_PIX_FMT_PWC1;
+ } else if (g_str_equal (mimetype, "video/x-pwc1")) {
+ fourcc = V4L2_PIX_FMT_PWC1;
#endif
#ifdef V4L2_PIX_FMT_PWC2
- } else if (strcmp (mimetype, "video/x-pwc2") == 0) {
- fourcc = V4L2_PIX_FMT_PWC2;
+ } else if (g_str_equal (mimetype, "video/x-pwc2")) {
+ fourcc = V4L2_PIX_FMT_PWC2;
+ }
#endif
- } else if (strcmp (mimetype, "video/x-raw-gray") == 0) {
- fourcc = V4L2_PIX_FMT_GREY;
+
+ if (dimensions) {
+ const gchar *interlace_mode;
+
+ if (!gst_structure_get_int (structure, "width", &info->width))
+ goto no_width;
+
+ if (!gst_structure_get_int (structure, "height", &info->height))
+ goto no_height;
+
+ interlace_mode = gst_structure_get_string (structure, "interlace-mode");
+ if (g_str_equal (interlace_mode, "progressive")) {
+ info->interlace_mode = GST_VIDEO_INTERLACE_MODE_PROGRESSIVE;
+ } else {
+ info->interlace_mode = GST_VIDEO_INTERLACE_MODE_MIXED;
+ }
+ if (!gst_structure_get_fraction (structure, "framerate", &info->fps_n,
+ &info->fps_d))
+ goto no_framerate;
+ }
}
if (fourcc == 0)
- return FALSE;
+ goto unhandled_format;
-done:
- *format = gst_v4l2_object_get_format_from_fourcc (v4l2object, fourcc);
- *size = outsize;
+ fmt = gst_v4l2_object_get_format_from_fourcc (v4l2object, fourcc);
+ if (fmt == NULL)
+ goto unsupported_format;
+
+ *format = fmt;
return TRUE;
+
+ /* ERRORS */
+no_width:
+ {
+ GST_DEBUG_OBJECT (v4l2object, "no width");
+ return FALSE;
+ }
+no_height:
+ {
+ GST_DEBUG_OBJECT (v4l2object, "no height");
+ return FALSE;
+ }
+no_framerate:
+ {
+ GST_DEBUG_OBJECT (v4l2object, "no framerate");
+ return FALSE;
+ }
+invalid_format:
+ {
+ GST_DEBUG_OBJECT (v4l2object, "invalid format");
+ return FALSE;
+ }
+unhandled_format:
+ {
+ GST_DEBUG_OBJECT (v4l2object, "unhandled format");
+ return FALSE;
+ }
+unsupported_format:
+ {
+ GST_DEBUG_OBJECT (v4l2object, "unsupported format");
+ return FALSE;
+ }
}
s = gst_structure_copy (template);
gst_structure_set (s, "width", G_TYPE_INT, (gint) width,
"height", G_TYPE_INT, (gint) height,
- "interlaced", G_TYPE_BOOLEAN, interlaced,
+ "interlace-mode", G_TYPE_STRING, (interlaced ? "mixed" : "progressive"),
"pixel-aspect-ratio", GST_TYPE_FRACTION, 1, 1, NULL);
if (G_IS_VALUE (&rates)) {
guint32 w, h;
if (pixelformat == GST_MAKE_FOURCC ('M', 'P', 'E', 'G'))
- return gst_caps_new_simple ("video/mpegts", NULL);
+ return gst_caps_new_empty_simple ("video/mpegts");
memset (&size, 0, sizeof (struct v4l2_frmsizeenum));
size.index = 0;
else
gst_structure_set (tmp, "height", GST_TYPE_INT_RANGE, min_h, max_h, NULL);
- gst_structure_set (tmp, "interlaced", G_TYPE_BOOLEAN, interlaced, NULL);
- gst_structure_set (tmp, "pixel-aspect-ratio",
- GST_TYPE_FRACTION, 1, 1, NULL);
+ gst_structure_set (tmp, "interlace-mode", G_TYPE_STRING,
+ (interlaced ? "mixed" : "progressive"), NULL);
+ gst_structure_set (tmp, "pixel-aspect-ratio", GST_TYPE_FRACTION, 1, 1,
+ NULL);
gst_caps_append_structure (ret, tmp);
return FALSE;
}
+static gboolean
+gst_v4l2_object_setup_pool (GstV4l2Object * v4l2object, GstCaps * caps)
+{
+ GstV4l2IOMode mode;
+
+ GST_DEBUG_OBJECT (v4l2object->element, "initializing the capture system");
+
+ GST_V4L2_CHECK_OPEN (v4l2object);
+ GST_V4L2_CHECK_NOT_ACTIVE (v4l2object);
+
+ /* find transport */
+ mode = v4l2object->req_mode;
+
+ if (v4l2object->vcap.capabilities & V4L2_CAP_READWRITE) {
+ if (v4l2object->req_mode == GST_V4L2_IO_AUTO)
+ mode = GST_V4L2_IO_RW;
+ } else if (v4l2object->req_mode == GST_V4L2_IO_RW)
+ goto method_not_supported;
+
+ if (v4l2object->vcap.capabilities & V4L2_CAP_STREAMING) {
+ if (v4l2object->req_mode == GST_V4L2_IO_AUTO)
+ mode = GST_V4L2_IO_MMAP;
+ } else if (v4l2object->req_mode == GST_V4L2_IO_MMAP)
+ goto method_not_supported;
+
+ /* if still no transport selected, error out */
+ if (mode == GST_V4L2_IO_AUTO)
+ goto no_supported_capture_method;
+
+ GST_INFO_OBJECT (v4l2object->element, "accessing buffers via mode %d", mode);
+ v4l2object->mode = mode;
+
+ /* Map the buffers */
+ GST_LOG_OBJECT (v4l2object->element, "initiating buffer pool");
+
+ if (!(v4l2object->pool = gst_v4l2_buffer_pool_new (v4l2object, caps)))
+ goto buffer_pool_new_failed;
+
+ GST_V4L2_SET_ACTIVE (v4l2object);
+
+ return TRUE;
+
+ /* ERRORS */
+buffer_pool_new_failed:
+ {
+ GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, READ,
+ (_("Could not map buffers from device '%s'"),
+ v4l2object->videodev),
+ ("Failed to create buffer pool: %s", g_strerror (errno)));
+ return FALSE;
+ }
+method_not_supported:
+ {
+ GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, READ,
+ (_("The driver of device '%s' does not support the IO method %d"),
+ v4l2object->videodev, mode), (NULL));
+ return FALSE;
+ }
+no_supported_capture_method:
+ {
+ GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, READ,
+ (_("The driver of device '%s' does not support any known IO "
+ "method."), v4l2object->videodev), (NULL));
+ return FALSE;
+ }
+}
+
+
+/* Note about fraction simplification
+ * * n1/d1 == n2/d2 is also written as n1 == ( n2 * d1 ) / d2
+ * */
+#define fractions_are_equal(n1,d1,n2,d2) ((n1) == gst_util_uint64_scale_int((n2), (d1), (d2)))
gboolean
-gst_v4l2_object_set_format (GstV4l2Object * v4l2object, guint32 pixelformat,
- guint32 width, guint32 height, gboolean interlaced)
+gst_v4l2_object_set_format (GstV4l2Object * v4l2object, GstCaps * caps)
{
gint fd = v4l2object->video_fd;
struct v4l2_format format;
+ struct v4l2_streamparm streamparm;
enum v4l2_field field;
-
- if (interlaced) {
+ guint32 pixelformat;
+ struct v4l2_fmtdesc *fmtdesc;
+ GstVideoInfo info;
+ gint width, height, fps_n, fps_d, stride;
+
+ if (!gst_v4l2_object_get_caps_info (v4l2object, caps, &fmtdesc, &info))
+ goto invalid_caps;
+
+ pixelformat = fmtdesc->pixelformat;
+ width = GST_VIDEO_INFO_WIDTH (&info);
+ height = GST_VIDEO_INFO_HEIGHT (&info);
+ fps_n = GST_VIDEO_INFO_FPS_N (&info);
+ fps_d = GST_VIDEO_INFO_FPS_D (&info);
+ stride = GST_VIDEO_INFO_PLANE_STRIDE (&info, 0);
+
+ if (info.flags & GST_VIDEO_FLAG_INTERLACED) {
GST_DEBUG_OBJECT (v4l2object->element, "interlaced video");
/* ideally we would differentiate between types of interlaced video
* but there is not sufficient information in the caps..
field = V4L2_FIELD_NONE;
}
- GST_DEBUG_OBJECT (v4l2object->element, "Setting format to %dx%d, format "
- "%" GST_FOURCC_FORMAT, width, height, GST_FOURCC_ARGS (pixelformat));
+ GST_DEBUG_OBJECT (v4l2object->element, "Desired format %dx%d, format "
+ "%" GST_FOURCC_FORMAT " stride: %d", width, height,
+ GST_FOURCC_ARGS (pixelformat), stride);
GST_V4L2_CHECK_OPEN (v4l2object);
GST_V4L2_CHECK_NOT_ACTIVE (v4l2object);
/* Only unconditionally accept mpegts for sources */
if ((v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE) &&
(pixelformat == GST_MAKE_FOURCC ('M', 'P', 'E', 'G')))
- return TRUE;
+ goto done;
memset (&format, 0x00, sizeof (struct v4l2_format));
format.type = v4l2object->type;
if (v4l2_ioctl (fd, VIDIOC_G_FMT, &format) < 0)
goto get_fmt_failed;
- if (format.type == v4l2object->type &&
- format.fmt.pix.width == width &&
- format.fmt.pix.height == height &&
- format.fmt.pix.pixelformat == pixelformat &&
- format.fmt.pix.field == field) {
- /* Nothing to do. We want to succeed immediately
- * here because setting the same format back
- * can still fail due to EBUSY. By short-circuiting
- * here, we allow pausing and re-playing pipelines
- * with changed caps, as long as the changed caps
- * do not change the webcam's format. Otherwise,
- * any caps change would require us to go to NULL
- * state to close the device and set format.
- */
- return TRUE;
+ GST_DEBUG_OBJECT (v4l2object->element, "Got format to %dx%d, format "
+ "%" GST_FOURCC_FORMAT " bytesperline %d, colorspace %d",
+ format.fmt.pix.width, format.fmt.pix.height,
+ GST_FOURCC_ARGS (format.fmt.pix.pixelformat), format.fmt.pix.bytesperline,
+ format.fmt.pix.colorspace);
+
+ if (format.type != v4l2object->type ||
+ format.fmt.pix.width != width ||
+ format.fmt.pix.height != height ||
+ format.fmt.pix.pixelformat != pixelformat ||
+ format.fmt.pix.field != field || format.fmt.pix.bytesperline != stride) {
+ /* something different, set the format */
+ GST_DEBUG_OBJECT (v4l2object->element, "Setting format to %dx%d, format "
+ "%" GST_FOURCC_FORMAT " bytesperline %d", width, height,
+ GST_FOURCC_ARGS (pixelformat), stride);
+
+ format.type = v4l2object->type;
+ format.fmt.pix.width = width;
+ format.fmt.pix.height = height;
+ format.fmt.pix.pixelformat = pixelformat;
+ format.fmt.pix.field = field;
+ /* try to ask our prefered stride */
+ format.fmt.pix.bytesperline = stride;
+
+ if (v4l2_ioctl (fd, VIDIOC_S_FMT, &format) < 0)
+ goto set_fmt_failed;
+
+ GST_DEBUG_OBJECT (v4l2object->element, "Got format to %dx%d, format "
+ "%" GST_FOURCC_FORMAT " stride %d", format.fmt.pix.width,
+ format.fmt.pix.height, GST_FOURCC_ARGS (format.fmt.pix.pixelformat),
+ format.fmt.pix.bytesperline);
+
+ if (format.fmt.pix.width != width || format.fmt.pix.height != height)
+ goto invalid_dimensions;
+
+ if (format.fmt.pix.pixelformat != pixelformat)
+ goto invalid_pixelformat;
}
- format.type = v4l2object->type;
- format.fmt.pix.width = width;
- format.fmt.pix.height = height;
- format.fmt.pix.pixelformat = pixelformat;
- format.fmt.pix.field = field;
+ /* figure out the frame layout */
+ v4l2object->bytesperline = format.fmt.pix.bytesperline;
+ v4l2object->sizeimage = format.fmt.pix.sizeimage;
+
+ GST_DEBUG_OBJECT (v4l2object->element, "Got sizeimage %u",
+ v4l2object->sizeimage);
+
+ /* Is there a reason we require the caller to always specify a framerate? */
+ GST_DEBUG_OBJECT (v4l2object->element, "Desired framerate: %u/%u", fps_n,
+ fps_d);
+
+ memset (&streamparm, 0x00, sizeof (struct v4l2_streamparm));
+ streamparm.type = v4l2object->type;
+
+ if (v4l2_ioctl (fd, VIDIOC_G_PARM, &streamparm) < 0)
+ goto get_parm_failed;
+
+ GST_VIDEO_INFO_FPS_N (&info) =
+ streamparm.parm.capture.timeperframe.denominator;
+ GST_VIDEO_INFO_FPS_D (&info) = streamparm.parm.capture.timeperframe.numerator;
+
+ if (v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE) {
+ GST_DEBUG_OBJECT (v4l2object->element, "Got framerate: %u/%u",
+ streamparm.parm.capture.timeperframe.denominator,
+ streamparm.parm.capture.timeperframe.numerator);
+
+ /* We used to skip frame rate setup if the camera was already setup
+ * with the requested frame rate. This breaks some cameras though,
+ * causing them to not output data (several models of Thinkpad cameras
+ * have this problem at least).
+ * So, don't skip. */
+ GST_LOG_OBJECT (v4l2object->element, "Setting framerate to %u/%u", fps_n,
+ fps_d);
+ /* We want to change the frame rate, so check whether we can. Some cheap USB
+ * cameras don't have the capability */
+ if ((streamparm.parm.capture.capability & V4L2_CAP_TIMEPERFRAME) == 0) {
+ GST_DEBUG_OBJECT (v4l2object->element,
+ "Not setting framerate (not supported)");
+ goto done;
+ }
- if (v4l2_ioctl (fd, VIDIOC_S_FMT, &format) < 0) {
- goto set_fmt_failed;
+ /* Note: V4L2 wants the frame interval, we have the frame rate */
+ streamparm.parm.capture.timeperframe.numerator = fps_d;
+ streamparm.parm.capture.timeperframe.denominator = fps_n;
+
+ /* some cheap USB cam's won't accept any change */
+ if (v4l2_ioctl (fd, VIDIOC_S_PARM, &streamparm) < 0)
+ goto set_parm_failed;
+
+ /* get new values */
+ fps_d = streamparm.parm.capture.timeperframe.numerator;
+ fps_n = streamparm.parm.capture.timeperframe.denominator;
+
+ GST_INFO_OBJECT (v4l2object->element, "Set framerate to %u/%u", fps_n,
+ fps_d);
+
+ GST_VIDEO_INFO_FPS_N (&info) = fps_n;
+ GST_VIDEO_INFO_FPS_D (&info) = fps_d;
}
- if (format.fmt.pix.width != width || format.fmt.pix.height != height)
- goto invalid_dimensions;
+done:
+ /* if we have a framerate pre-calculate duration */
+ if (fps_n > 0 && fps_d > 0) {
+ v4l2object->duration = gst_util_uint64_scale_int (GST_SECOND, fps_d, fps_n);
+ } else {
+ v4l2object->duration = GST_CLOCK_TIME_NONE;
+ }
+ v4l2object->info = info;
+ v4l2object->fmtdesc = fmtdesc;
- if (format.fmt.pix.pixelformat != pixelformat)
- goto invalid_pixelformat;
+ /* now configure ther pools */
+ if (!gst_v4l2_object_setup_pool (v4l2object, caps))
+ goto pool_failed;
return TRUE;
/* ERRORS */
+invalid_caps:
+ {
+ GST_DEBUG_OBJECT (v4l2object->element, "can't parse caps %" GST_PTR_FORMAT,
+ caps);
+ return FALSE;
+ }
get_fmt_failed:
{
GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, SETTINGS,
GST_FOURCC_ARGS (format.fmt.pix.pixelformat)));
return FALSE;
}
+get_parm_failed:
+ {
+ /* it's possible that this call is not supported */
+ if (errno != EINVAL) {
+ GST_ELEMENT_WARNING (v4l2object->element, RESOURCE, SETTINGS,
+ (_("Could not get parameters on device '%s'"),
+ v4l2object->videodev), GST_ERROR_SYSTEM);
+ }
+ goto done;
+ }
+set_parm_failed:
+ {
+ GST_ELEMENT_WARNING (v4l2object->element, RESOURCE, SETTINGS,
+ (_("Video device did not accept new frame rate setting.")),
+ GST_ERROR_SYSTEM);
+ goto done;
+ }
+pool_failed:
+ {
+ GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, SETTINGS,
+ (_("Video device could not create buffer pool.")), GST_ERROR_SYSTEM);
+ return FALSE;
+ }
}
gboolean
-gst_v4l2_object_start_streaming (GstV4l2Object * v4l2object)
+gst_v4l2_object_unlock (GstV4l2Object * v4l2object)
{
- if (v4l2_ioctl (v4l2object->video_fd, VIDIOC_STREAMON,
- &(v4l2object->type)) < 0) {
- GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, OPEN_READ,
- (_("Error starting streaming on device '%s'."), v4l2object->videodev),
- GST_ERROR_SYSTEM);
- return FALSE;
+ GST_LOG_OBJECT (v4l2object->element, "flush poll");
+ gst_poll_set_flushing (v4l2object->poll, TRUE);
+
+ return TRUE;
+}
+
+gboolean
+gst_v4l2_object_unlock_stop (GstV4l2Object * v4l2object)
+{
+ GST_LOG_OBJECT (v4l2object->element, "flush stop poll");
+ gst_poll_set_flushing (v4l2object->poll, FALSE);
+
+ return TRUE;
+}
+
+gboolean
+gst_v4l2_object_stop (GstV4l2Object * v4l2object)
+{
+ GST_DEBUG_OBJECT (v4l2object->element, "stopping");
+
+ if (!GST_V4L2_IS_OPEN (v4l2object))
+ goto done;
+ if (!GST_V4L2_IS_ACTIVE (v4l2object))
+ goto done;
+
+ if (v4l2object->pool) {
+ GST_DEBUG_OBJECT (v4l2object->element, "deactivating pool");
+ gst_buffer_pool_set_active (GST_BUFFER_POOL_CAST (v4l2object->pool), FALSE);
+ gst_object_unref (v4l2object->pool);
+ v4l2object->pool = NULL;
}
+
+ GST_V4L2_SET_INACTIVE (v4l2object);
+
+done:
return TRUE;
}
+#if 0
+static GstFlowReturn
+gst_v4l2_object_get_mmap (GstV4l2Object * v4l2object, GstBuffer ** buf)
+{
+ GstFlowReturn res;
+#define NUM_TRIALS 50
+ GstBufferPool *pool;
+ gint32 trials = NUM_TRIALS;
+ GstBuffer *pool_buffer;
+ gboolean need_copy;
+
+ pool = v4l2object->pool;
+ if (!pool)
+ goto no_buffer_pool;
+
+ GST_DEBUG_OBJECT (v4l2object->element, "grab frame");
+
+ for (;;) {
+ if ((res = gst_v4l2_object_poll (v4l2object)) != GST_FLOW_OK)
+ goto poll_error;
+
+ res = gst_buffer_pool_acquire_buffer (pool, &pool_buffer, NULL);
+ if (res != GST_FLOW_OK)
+ goto no_buffer;
+
+ if (v4l2object->size > 0) {
+ gsize size = gst_buffer_get_size (pool_buffer);
+
+ /* if size does not match what we expected, try again */
+ if (size != v4l2object->size) {
+ GST_ELEMENT_WARNING (v4l2object->element, RESOURCE, READ,
+ (_("Got unexpected frame size of %u instead of %u."),
+ size, v4l2object->size), (NULL));
+ gst_buffer_unref (pool_buffer);
+ goto no_buffer;
+ }
+ }
+ /* when we get here all is fine */
+ break;
+
+ no_buffer:
+ GST_WARNING_OBJECT (v4l2object->element, "trials=%d", trials);
+
+ /* if the sync() got interrupted, we can retry */
+ switch (errno) {
+ case EINVAL:
+ case ENOMEM:
+ /* fatal */
+ return GST_FLOW_ERROR;
+
+ case EAGAIN:
+ case EIO:
+ case EINTR:
+ default:
+ /* try again, until too many trials */
+ break;
+ }
+
+ /* check nr. of attempts to capture */
+ if (--trials == -1) {
+ goto too_many_trials;
+ }
+ }
+
+
+ /* if we are handing out the last buffer in the pool, we need to make a
+ * copy and bring the buffer back in the pool. */
+ need_copy = v4l2object->always_copy
+ || !gst_v4l2_buffer_pool_available_buffers (pool);
+
+ if (G_UNLIKELY (need_copy)) {
+ if (!v4l2object->always_copy) {
+ GST_CAT_LOG_OBJECT (GST_CAT_PERFORMANCE, v4l2object->element,
+ "running out of buffers, making a copy to reuse current one");
+ }
+ *buf = gst_buffer_copy (pool_buffer);
+ /* this will requeue */
+ gst_buffer_unref (pool_buffer);
+ } else {
+ *buf = pool_buffer;
+ }
+
+ return GST_FLOW_OK;
+
+ /* ERRORS */
+no_buffer_pool:
+ {
+ GST_DEBUG_OBJECT (v4l2object->element, "no buffer pool");
+ return GST_FLOW_FLUSHING;
+ }
+poll_error:
+ {
+ return res;
+ }
+too_many_trials:
+ {
+ GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, FAILED,
+ (_("Failed trying to get video frames from device '%s'."),
+ v4l2object->videodev),
+ (_("Failed after %d tries. device %s. system error: %s"),
+ NUM_TRIALS, v4l2object->videodev, g_strerror (errno)));
+ return GST_FLOW_ERROR;
+ }
+}
+#endif
+
gboolean
-gst_v4l2_object_stop_streaming (GstV4l2Object * v4l2object)
+gst_v4l2_object_copy (GstV4l2Object * v4l2object, GstBuffer * dest,
+ GstBuffer * src)
{
- if (v4l2_ioctl (v4l2object->video_fd, VIDIOC_STREAMOFF,
- &(v4l2object->type)) < 0) {
- GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, OPEN_READ,
- (_("Error stopping streaming on device '%s'."), v4l2object->videodev),
- GST_ERROR_SYSTEM);
- return FALSE;
+ if (v4l2object->info.finfo) {
+ GstVideoFrame src_frame, dest_frame;
+
+ GST_DEBUG_OBJECT (v4l2object->element, "copy video frame");
+
+ /* we have raw video, use videoframe copy to get strides right */
+ if (!gst_video_frame_map (&src_frame, &v4l2object->info, src, GST_MAP_READ))
+ goto invalid_buffer;
+
+ if (!gst_video_frame_map (&dest_frame, &v4l2object->info, dest,
+ GST_MAP_WRITE)) {
+ gst_video_frame_unmap (&src_frame);
+ goto invalid_buffer;
+ }
+
+ gst_video_frame_copy (&dest_frame, &src_frame);
+
+ gst_video_frame_unmap (&src_frame);
+ gst_video_frame_unmap (&dest_frame);
+ } else {
+ GstMapInfo map;
+
+ GST_DEBUG_OBJECT (v4l2object->element, "copy raw bytes");
+ gst_buffer_map (src, &map, GST_MAP_READ);
+ gst_buffer_fill (dest, 0, map.data, map.size);
+ gst_buffer_unmap (src, &map);
}
+ GST_CAT_LOG_OBJECT (GST_CAT_PERFORMANCE, v4l2object->element,
+ "slow copy into buffer %p", dest);
+
return TRUE;
+
+ /* ERRORS */
+invalid_buffer:
+ {
+ /* No Window available to put our image into */
+ GST_WARNING_OBJECT (v4l2object->element, "could not map image");
+ return FALSE;
+ }
}
#include <gst/gst.h>
#include <gst/base/gstpushsrc.h>
-#include <gst/controller/gstcontroller.h>
-#include <gst/interfaces/propertyprobe.h>
+#include <gst/video/video.h>
+typedef struct _GstV4l2Object GstV4l2Object;
+typedef struct _GstV4l2ObjectClassHelper GstV4l2ObjectClassHelper;
+typedef struct _GstV4l2Xv GstV4l2Xv;
+
+#include <gstv4l2bufferpool.h>
/* size of v4l2 buffer pool in streaming case */
#define GST_V4L2_MAX_BUFFERS 16
/* max frame width/height */
#define GST_V4L2_MAX_SIZE (1<<15) /* 2^15 == 32768 */
-
-
G_BEGIN_DECLS
#define GST_V4L2_OBJECT(obj) (GstV4l2Object *)(obj)
-typedef struct _GstV4l2Object GstV4l2Object;
-typedef struct _GstV4l2ObjectClassHelper GstV4l2ObjectClassHelper;
-typedef struct _GstV4l2Xv GstV4l2Xv;
+typedef enum {
+ GST_V4L2_IO_AUTO = 0,
+ GST_V4L2_IO_RW = 1,
+ GST_V4L2_IO_MMAP = 2,
+ GST_V4L2_IO_USERPTR = 3
+} GstV4l2IOMode;
typedef gboolean (*GstV4l2GetInOutFunction) (GstV4l2Object * v4l2object, gint * input);
typedef gboolean (*GstV4l2SetInOutFunction) (GstV4l2Object * v4l2object, gint input);
typedef gboolean (*GstV4l2UpdateFpsFunction) (GstV4l2Object * v4l2object);
+#define GST_V4L2_WIDTH(o) (GST_VIDEO_INFO_WIDTH (&(o)->info))
+#define GST_V4L2_HEIGHT(o) (GST_VIDEO_INFO_HEIGHT (&(o)->info))
+#define GST_V4L2_PIXELFORMAT(o) ((o)->fmtdesc->pixelformat)
+#define GST_V4L2_FPS_N(o) (GST_VIDEO_INFO_FPS_N (&(o)->info))
+#define GST_V4L2_FPS_D(o) (GST_VIDEO_INFO_FPS_D (&(o)->info))
+
+/* simple check whether the device is open */
+#define GST_V4L2_IS_OPEN(o) ((o)->video_fd > 0)
+
+/* check whether the device is 'active' */
+#define GST_V4L2_IS_ACTIVE(o) ((o)->active)
+#define GST_V4L2_SET_ACTIVE(o) ((o)->active = TRUE)
+#define GST_V4L2_SET_INACTIVE(o) ((o)->active = FALSE)
+
struct _GstV4l2Object {
GstElement * element;
+ enum v4l2_buf_type type; /* V4L2_BUF_TYPE_VIDEO_CAPTURE, V4L2_BUF_TYPE_VIDEO_OUTPUT */
+
/* the video device */
char *videodev;
/* the video-device's file descriptor */
gint video_fd;
+ GstV4l2IOMode mode;
GstPoll * poll;
gboolean can_poll_device;
- /* the video buffer (mmap()'ed) */
- guint8 **buffer;
+ gboolean active;
+ gboolean streaming;
- enum v4l2_buf_type type; /* V4L2_BUF_TYPE_VIDEO_CAPTURE, V4L2_BUF_TYPE_VIDEO_OUTPUT */
+ /* the current format */
+ struct v4l2_fmtdesc *fmtdesc;
+ GstVideoInfo info;
+
+ guint32 bytesperline;
+ guint32 sizeimage;
+ GstClockTime duration;
+
+ /* wanted mode */
+ GstV4l2IOMode req_mode;
+
+ /* optional pool */
+ gboolean always_copy;
+ GstBufferPool *pool;
/* the video device's capabilities */
struct v4l2_capability vcap;
PROP_CONTRAST, \
PROP_SATURATION, \
PROP_HUE, \
- PROP_TV_NORM
+ PROP_TV_NORM, \
+ PROP_IO_MODE
/* create/destroy */
GstV4l2Object * gst_v4l2_object_new (GstElement * element,
gboolean gst_v4l2_object_get_property_helper (GstV4l2Object *v4l2object,
guint prop_id, GValue * value,
GParamSpec * pspec);
-/* starting/stopping */
-gboolean gst_v4l2_object_start (GstV4l2Object *v4l2object);
-gboolean gst_v4l2_object_stop (GstV4l2Object *v4l2object);
+/* open/close */
+gboolean gst_v4l2_object_open (GstV4l2Object *v4l2object);
+gboolean gst_v4l2_object_close (GstV4l2Object *v4l2object);
/* probing */
+#if 0
const GList* gst_v4l2_probe_get_properties (GstPropertyProbe * probe);
void gst_v4l2_probe_probe_property (GstPropertyProbe * probe, guint prop_id,
GValueArray* gst_v4l2_probe_get_values (GstPropertyProbe * probe, guint prop_id,
const GParamSpec * pspec,
GList ** klass_devices);
+#endif
GstCaps* gst_v4l2_object_probe_caps_for_format (GstV4l2Object *v4l2object, guint32 pixelformat,
const GstStructure * template);
-gboolean gst_v4l2_object_get_caps_info (GstV4l2Object *v4l2object, GstCaps *caps,
- struct v4l2_fmtdesc **format, gint *w, gint *h,
- gboolean * interlaced, guint *fps_n, guint *fps_d, guint *size);
-
-
GSList* gst_v4l2_object_get_format_list (GstV4l2Object *v4l2object);
GstCaps* gst_v4l2_object_get_all_caps (void);
GstStructure* gst_v4l2_object_v4l2fourcc_to_structure (guint32 fourcc);
-gboolean gst_v4l2_object_set_format (GstV4l2Object *v4l2object, guint32 pixelformat, guint32 width, guint32 height, gboolean interlaced);
+gboolean gst_v4l2_object_set_format (GstV4l2Object *v4l2object, GstCaps * caps);
+
+gboolean gst_v4l2_object_unlock (GstV4l2Object *v4l2object);
+gboolean gst_v4l2_object_unlock_stop (GstV4l2Object *v4l2object);
+
+gboolean gst_v4l2_object_stop (GstV4l2Object *v4l2object);
+
-gboolean gst_v4l2_object_start_streaming (GstV4l2Object *v4l2object);
-gboolean gst_v4l2_object_stop_streaming (GstV4l2Object *v4l2object);
+gboolean gst_v4l2_object_copy (GstV4l2Object * v4l2object,
+ GstBuffer * dest, GstBuffer *src);
#define GST_IMPLEMENT_V4L2_PROBE_METHODS(Type_Class, interface_as_function) \
return gst_v4l2radio_set_mute_on (radio, FALSE);
}
-GST_IMPLEMENT_V4L2_PROBE_METHODS (GstV4l2RadioClass, gst_v4l2radio);
GST_IMPLEMENT_V4L2_TUNER_METHODS (GstV4l2Radio, gst_v4l2radio);
static void gst_v4l2radio_uri_handler_init (gpointer g_iface,
gpointer iface_data);
-static gboolean
-gst_v4l2radio_interface_supported (GstImplementsInterface * iface,
- GType iface_type)
-{
- if (iface_type == GST_TYPE_TUNER)
- return TRUE;
- else
- return FALSE;
-}
-
static void
-gst_v4l2radio_implements_interface_init (GstImplementsInterfaceClass * iface)
-{
- iface->supported = gst_v4l2radio_interface_supported;
-}
-
-static void
-gst_v4l2radio_tuner_interface_reinit (GstTunerClass * iface)
+gst_v4l2radio_tuner_interface_reinit (GstTunerInterface * iface)
{
gst_v4l2radio_tuner_interface_init (iface);
}
-static void
-gst_v4l2radio_interfaces (GType type)
-{
- static const GInterfaceInfo urihandler_info = {
- (GInterfaceInitFunc) gst_v4l2radio_uri_handler_init,
- NULL,
- NULL
- };
-
- static const GInterfaceInfo implements_interface_info = {
- (GInterfaceInitFunc) gst_v4l2radio_implements_interface_init,
- NULL,
- NULL,
- };
-
- static const GInterfaceInfo propertyprobe_info = {
- (GInterfaceInitFunc) gst_v4l2radio_property_probe_interface_init,
- NULL,
- NULL,
- };
-
- static const GInterfaceInfo tuner_interface_info = {
- (GInterfaceInitFunc) gst_v4l2radio_tuner_interface_reinit,
- NULL,
- NULL,
- };
-
- g_type_add_interface_static (type, GST_TYPE_URI_HANDLER, &urihandler_info);
- g_type_add_interface_static (type,
- GST_TYPE_IMPLEMENTS_INTERFACE, &implements_interface_info);
-
- g_type_add_interface_static (type, GST_TYPE_TUNER, &tuner_interface_info);
-
- g_type_add_interface_static (type,
- GST_TYPE_PROPERTY_PROBE, &propertyprobe_info);
-}
-
-GST_BOILERPLATE_FULL (GstV4l2Radio, gst_v4l2radio, GstElement, GST_TYPE_ELEMENT,
- gst_v4l2radio_interfaces);
+#define gst_v4l2radio_parent_class parent_class
+G_DEFINE_TYPE_WITH_CODE (GstV4l2Radio, gst_v4l2radio, GST_TYPE_ELEMENT,
+ G_IMPLEMENT_INTERFACE (GST_TYPE_URI_HANDLER,
+ gst_v4l2radio_uri_handler_init);
+ G_IMPLEMENT_INTERFACE (GST_TYPE_TUNER,
+ gst_v4l2radio_tuner_interface_reinit));
static void gst_v4l2radio_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
GstStateChange transition);
static void
-gst_v4l2radio_base_init (gpointer gclass)
-{
- GstElementClass *gstelement_class = GST_ELEMENT_CLASS (gclass);
- GstV4l2RadioClass *gstv4l2radio_class = GST_V4L2RADIO_CLASS (gclass);
-
- GST_DEBUG_CATEGORY_INIT (v4l2radio_debug, "v4l2radio", 0,
- "V4l2 radio element");
-
- gstv4l2radio_class->v4l2_class_devices = NULL;
-
- gst_element_class_set_details_simple (gstelement_class,
- "Radio (video4linux2) Tuner",
- "Tuner",
- "Controls a Video4Linux2 radio device",
- "Alexey Chernov <4ernov@gmail.com>");
-}
-
-static void
gst_v4l2radio_class_init (GstV4l2RadioClass * klass)
{
GObjectClass *gobject_class;
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
+ gobject_class->dispose = gst_v4l2radio_dispose;
+ gobject_class->finalize = (GObjectFinalizeFunc) gst_v4l2radio_finalize;
gobject_class->set_property = gst_v4l2radio_set_property;
gobject_class->get_property = gst_v4l2radio_get_property;
"Station frequency in Hz",
MIN_FREQUENCY, MAX_FREQUENCY, DEFAULT_FREQUENCY, G_PARAM_READWRITE));
- gobject_class->dispose = gst_v4l2radio_dispose;
- gobject_class->finalize = (GObjectFinalizeFunc) gst_v4l2radio_finalize;
-
gstelement_class->change_state =
GST_DEBUG_FUNCPTR (gst_v4l2radio_change_state);
+ gst_element_class_set_details_simple (gstelement_class,
+ "Radio (video4linux2) Tuner",
+ "Tuner",
+ "Controls a Video4Linux2 radio device",
+ "Alexey Chernov <4ernov@gmail.com>");
+
+ klass->v4l2_class_devices = NULL;
+
+ GST_DEBUG_CATEGORY_INIT (v4l2radio_debug, "v4l2radio", 0,
+ "V4l2 radio element");
}
static void
-gst_v4l2radio_init (GstV4l2Radio * filter, GstV4l2RadioClass * gclass)
+gst_v4l2radio_init (GstV4l2Radio * filter)
{
filter->v4l2object = gst_v4l2_object_new (GST_ELEMENT (filter),
V4L2_BUF_TYPE_VIDEO_CAPTURE, DEFAULT_PROP_DEVICE,
static gboolean
gst_v4l2radio_stop (GstV4l2Radio * radio)
{
- if (!gst_v4l2_object_stop (radio->v4l2object))
+ if (!gst_v4l2_object_close (radio->v4l2object))
return FALSE;
return TRUE;
/* GstURIHandler interface */
static GstURIType
-gst_v4l2radio_uri_get_type (void)
+gst_v4l2radio_uri_get_type (GType type)
{
return GST_URI_SRC;
}
-static gchar **
-gst_v4l2radio_uri_get_protocols (void)
+static const gchar *const *
+gst_v4l2radio_uri_get_protocols (GType type)
{
- static gchar *protocols[] = { (char *) "radio", NULL };
+ static const gchar *protocols[] = { "radio", NULL };
+
return protocols;
}
-static const gchar *
+static gchar *
gst_v4l2radio_uri_get_uri (GstURIHandler * handler)
{
GstV4l2Radio *radio = GST_V4L2RADIO (handler);
if (radio->v4l2object->videodev != NULL) {
if (gst_v4l2_get_frequency (radio->v4l2object,
0, &(radio->v4l2object->frequency))) {
- gchar uri[20];
- gchar freq[6];
- g_ascii_formatd (freq, 6, "%4.1f", radio->v4l2object->frequency / 1e6);
- g_snprintf (uri, sizeof (uri), "radio://%s", freq);
- return g_intern_string (uri);
+ return g_strdup_printf ("radio://%4.1f",
+ radio->v4l2object->frequency / 1e6);
}
}
- return "radio://";
+ return g_strdup ("radio://");
}
static gboolean
-gst_v4l2radio_uri_set_uri (GstURIHandler * handler, const gchar * uri)
+gst_v4l2radio_uri_set_uri (GstURIHandler * handler, const gchar * uri,
+ GError ** error)
{
GstV4l2Radio *radio = GST_V4L2RADIO (handler);
gdouble dfreq;
return TRUE;
uri_failed:
+ g_set_error_literal (error, GST_URI_ERROR, GST_URI_ERROR_BAD_REFERENCE,
+ "Bad radio URI, could not parse frequency");
return FALSE;
}
#include <config.h>
#endif
+#include "gst/video/gstvideometa.h"
#include "gstv4l2colorbalance.h"
#include "gstv4l2tuner.h"
#ifdef HAVE_XVIDEO
-#include "gstv4l2xoverlay.h"
+#include "gstv4l2videooverlay.h"
#endif
#include "gstv4l2vidorient.h"
GST_DEBUG_CATEGORY (v4l2sink_debug);
#define GST_CAT_DEFAULT v4l2sink_debug
-#define PROP_DEF_QUEUE_SIZE 12
-#define PROP_DEF_MIN_QUEUED_BUFS 1
#define DEFAULT_PROP_DEVICE "/dev/video1"
enum
{
PROP_0,
V4L2_STD_OBJECT_PROPS,
- PROP_QUEUE_SIZE,
- PROP_MIN_QUEUED_BUFS,
PROP_OVERLAY_TOP,
PROP_OVERLAY_LEFT,
PROP_OVERLAY_WIDTH,
};
-GST_IMPLEMENT_V4L2_PROBE_METHODS (GstV4l2SinkClass, gst_v4l2sink);
GST_IMPLEMENT_V4L2_COLOR_BALANCE_METHODS (GstV4l2Sink, gst_v4l2sink);
GST_IMPLEMENT_V4L2_TUNER_METHODS (GstV4l2Sink, gst_v4l2sink);
#ifdef HAVE_XVIDEO
-GST_IMPLEMENT_V4L2_XOVERLAY_METHODS (GstV4l2Sink, gst_v4l2sink);
+GST_IMPLEMENT_V4L2_VIDEO_OVERLAY_METHODS (GstV4l2Sink, gst_v4l2sink);
#endif
GST_IMPLEMENT_V4L2_VIDORIENT_METHODS (GstV4l2Sink, gst_v4l2sink);
-static gboolean
-gst_v4l2sink_iface_supported (GstImplementsInterface * iface, GType iface_type)
-{
- GstV4l2Object *v4l2object = GST_V4L2SINK (iface)->v4l2object;
-
-#ifdef HAVE_XVIDEO
- g_assert (iface_type == GST_TYPE_X_OVERLAY ||
- iface_type == GST_TYPE_NAVIGATION ||
- iface_type == GST_TYPE_COLOR_BALANCE ||
- iface_type == GST_TYPE_VIDEO_ORIENTATION ||
- iface_type == GST_TYPE_TUNER);
-#else
- g_assert (iface_type == GST_TYPE_COLOR_BALANCE ||
- iface_type == GST_TYPE_VIDEO_ORIENTATION ||
- iface_type == GST_TYPE_TUNER);
-#endif
-
- if (v4l2object->video_fd == -1)
- return FALSE;
-
-#ifdef HAVE_XVIDEO
- if (!GST_V4L2_IS_OVERLAY (v4l2object)) {
- if (iface_type == GST_TYPE_X_OVERLAY || iface_type == GST_TYPE_NAVIGATION)
- return FALSE;
- }
-#endif
-
- return TRUE;
-}
-
-static void
-gst_v4l2sink_interface_init (GstImplementsInterfaceClass * klass)
-{
- /*
- * default virtual functions
- */
- klass->supported = gst_v4l2sink_iface_supported;
-}
-
#ifdef HAVE_XVIDEO
static void gst_v4l2sink_navigation_send_event (GstNavigation * navigation,
GstStructure * structure);
}
#endif
-static void
-gst_v4l2sink_init_interfaces (GType type)
-{
- static const GInterfaceInfo v4l2iface_info = {
- (GInterfaceInitFunc) gst_v4l2sink_interface_init,
- NULL,
- NULL,
- };
- static const GInterfaceInfo v4l2_tuner_info = {
- (GInterfaceInitFunc) gst_v4l2sink_tuner_interface_init,
- NULL,
- NULL,
- };
+#define gst_v4l2sink_parent_class parent_class
+G_DEFINE_TYPE_WITH_CODE (GstV4l2Sink, gst_v4l2sink, GST_TYPE_VIDEO_SINK,
+ G_IMPLEMENT_INTERFACE (GST_TYPE_TUNER, gst_v4l2sink_tuner_interface_init);
#ifdef HAVE_XVIDEO
- static const GInterfaceInfo v4l2_xoverlay_info = {
- (GInterfaceInitFunc) gst_v4l2sink_xoverlay_interface_init,
- NULL,
- NULL,
- };
- static const GInterfaceInfo v4l2_navigation_info = {
- (GInterfaceInitFunc) gst_v4l2sink_navigation_init,
- NULL,
- NULL,
- };
+ G_IMPLEMENT_INTERFACE (GST_TYPE_VIDEO_OVERLAY,
+ gst_v4l2sink_video_overlay_interface_init);
+ G_IMPLEMENT_INTERFACE (GST_TYPE_NAVIGATION, gst_v4l2sink_navigation_init);
#endif
- static const GInterfaceInfo v4l2_colorbalance_info = {
- (GInterfaceInitFunc) gst_v4l2sink_color_balance_interface_init,
- NULL,
- NULL,
- };
- static const GInterfaceInfo v4l2_videoorientation_info = {
- (GInterfaceInitFunc) gst_v4l2sink_video_orientation_interface_init,
- NULL,
- NULL,
- };
- static const GInterfaceInfo v4l2_propertyprobe_info = {
- (GInterfaceInitFunc) gst_v4l2sink_property_probe_interface_init,
- NULL,
- NULL,
- };
-
- g_type_add_interface_static (type,
- GST_TYPE_IMPLEMENTS_INTERFACE, &v4l2iface_info);
- g_type_add_interface_static (type, GST_TYPE_TUNER, &v4l2_tuner_info);
-#ifdef HAVE_XVIDEO
- g_type_add_interface_static (type, GST_TYPE_X_OVERLAY, &v4l2_xoverlay_info);
- g_type_add_interface_static (type,
- GST_TYPE_NAVIGATION, &v4l2_navigation_info);
-#endif
- g_type_add_interface_static (type,
- GST_TYPE_COLOR_BALANCE, &v4l2_colorbalance_info);
- g_type_add_interface_static (type,
- GST_TYPE_VIDEO_ORIENTATION, &v4l2_videoorientation_info);
- g_type_add_interface_static (type, GST_TYPE_PROPERTY_PROBE,
- &v4l2_propertyprobe_info);
-}
-
-
-GST_BOILERPLATE_FULL (GstV4l2Sink, gst_v4l2sink, GstVideoSink,
- GST_TYPE_VIDEO_SINK, gst_v4l2sink_init_interfaces);
+ G_IMPLEMENT_INTERFACE (GST_TYPE_COLOR_BALANCE,
+ gst_v4l2sink_color_balance_interface_init);
+ G_IMPLEMENT_INTERFACE (GST_TYPE_VIDEO_ORIENTATION,
+ gst_v4l2sink_video_orientation_interface_init));
static void gst_v4l2sink_dispose (GObject * object);
static void gst_v4l2sink_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
-
/* GstElement methods: */
static GstStateChangeReturn gst_v4l2sink_change_state (GstElement * element,
GstStateChange transition);
/* GstBaseSink methods: */
-static GstCaps *gst_v4l2sink_get_caps (GstBaseSink * bsink);
+static gboolean gst_v4l2sink_propose_allocation (GstBaseSink * bsink,
+ GstQuery * query);
+static GstCaps *gst_v4l2sink_get_caps (GstBaseSink * bsink, GstCaps * filter);
static gboolean gst_v4l2sink_set_caps (GstBaseSink * bsink, GstCaps * caps);
+#if 0
static GstFlowReturn gst_v4l2sink_buffer_alloc (GstBaseSink * bsink,
guint64 offset, guint size, GstCaps * caps, GstBuffer ** buf);
+#endif
static GstFlowReturn gst_v4l2sink_show_frame (GstBaseSink * bsink,
GstBuffer * buf);
static void
-gst_v4l2sink_base_init (gpointer g_class)
-{
- GstElementClass *gstelement_class = GST_ELEMENT_CLASS (g_class);
- GstV4l2SinkClass *gstv4l2sink_class = GST_V4L2SINK_CLASS (g_class);
- GstPadTemplate *pad_template;
-
- gstv4l2sink_class->v4l2_class_devices = NULL;
-
- GST_DEBUG_CATEGORY_INIT (v4l2sink_debug, "v4l2sink", 0, "V4L2 sink element");
-
- gst_element_class_set_details_simple (gstelement_class,
- "Video (video4linux2) Sink", "Sink/Video",
- "Displays frames on a video4linux2 device", "Rob Clark <rob@ti.com>,");
-
- pad_template =
- gst_pad_template_new ("sink", GST_PAD_SINK, GST_PAD_ALWAYS,
- gst_v4l2_object_get_all_caps ());
- gst_element_class_add_pad_template (gstelement_class, pad_template);
- gst_object_unref (pad_template);
-}
-
-static void
gst_v4l2sink_class_init (GstV4l2SinkClass * klass)
{
GObjectClass *gobject_class;
gst_v4l2_object_install_properties_helper (gobject_class,
DEFAULT_PROP_DEVICE);
- g_object_class_install_property (gobject_class, PROP_QUEUE_SIZE,
- g_param_spec_uint ("queue-size", "Queue size",
- "Number of buffers to be enqueud in the driver in streaming mode",
- GST_V4L2_MIN_BUFFERS, GST_V4L2_MAX_BUFFERS, PROP_DEF_QUEUE_SIZE,
- G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
- g_object_class_install_property (gobject_class, PROP_MIN_QUEUED_BUFS,
- g_param_spec_uint ("min-queued-bufs", "Minimum queued bufs",
- "Minimum number of queued bufs; v4l2sink won't dqbuf if the driver "
- "doesn't have more than this number (which normally you shouldn't change)",
- 0, GST_V4L2_MAX_BUFFERS, PROP_DEF_MIN_QUEUED_BUFS,
- G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
g_object_class_install_property (gobject_class, PROP_OVERLAY_TOP,
g_param_spec_int ("overlay-top", "Overlay top",
"The topmost (y) coordinate of the video overlay; top left corner of screen is 0,0",
"The height of the video crop; default is equal to negotiated image height",
0, 0xffffffff, 0, G_PARAM_READWRITE));
+ gst_element_class_set_details_simple (element_class,
+ "Video (video4linux2) Sink", "Sink/Video",
+ "Displays frames on a video4linux2 device", "Rob Clark <rob@ti.com>,");
+
+ gst_element_class_add_pad_template (element_class,
+ gst_pad_template_new ("sink", GST_PAD_SINK, GST_PAD_ALWAYS,
+ gst_v4l2_object_get_all_caps ()));
+
basesink_class->get_caps = GST_DEBUG_FUNCPTR (gst_v4l2sink_get_caps);
basesink_class->set_caps = GST_DEBUG_FUNCPTR (gst_v4l2sink_set_caps);
- basesink_class->buffer_alloc = GST_DEBUG_FUNCPTR (gst_v4l2sink_buffer_alloc);
+ basesink_class->propose_allocation =
+ GST_DEBUG_FUNCPTR (gst_v4l2sink_propose_allocation);
basesink_class->render = GST_DEBUG_FUNCPTR (gst_v4l2sink_show_frame);
+
+ klass->v4l2_class_devices = NULL;
+
+ GST_DEBUG_CATEGORY_INIT (v4l2sink_debug, "v4l2sink", 0, "V4L2 sink element");
+
}
static void
-gst_v4l2sink_init (GstV4l2Sink * v4l2sink, GstV4l2SinkClass * klass)
+gst_v4l2sink_init (GstV4l2Sink * v4l2sink)
{
v4l2sink->v4l2object = gst_v4l2_object_new (GST_ELEMENT (v4l2sink),
V4L2_BUF_TYPE_VIDEO_OUTPUT, DEFAULT_PROP_DEVICE,
*/
g_object_set (v4l2sink, "device", "/dev/video1", NULL);
- /* number of buffers requested */
- v4l2sink->num_buffers = PROP_DEF_QUEUE_SIZE;
- v4l2sink->min_queued_bufs = PROP_DEF_MIN_QUEUED_BUFS;
-
v4l2sink->probed_caps = NULL;
- v4l2sink->current_caps = NULL;
v4l2sink->overlay_fields_set = 0;
v4l2sink->crop_fields_set = 0;
- v4l2sink->state = 0;
}
gst_caps_unref (v4l2sink->probed_caps);
}
- if (v4l2sink->current_caps) {
- gst_caps_unref (v4l2sink->current_caps);
- }
-
G_OBJECT_CLASS (parent_class)->dispose (object);
}
/*
- * State values
- */
-enum
-{
- STATE_OFF = 0,
- STATE_PENDING_STREAMON,
- STATE_STREAMING
-};
-
-/*
* flags to indicate which overlay/crop properties the user has set (and
* therefore which ones should override the defaults from the driver)
*/
if (!gst_v4l2_object_set_property_helper (v4l2sink->v4l2object,
prop_id, value, pspec)) {
switch (prop_id) {
- case PROP_QUEUE_SIZE:
- v4l2sink->num_buffers = g_value_get_uint (value);
- break;
- case PROP_MIN_QUEUED_BUFS:
- v4l2sink->min_queued_bufs = g_value_get_uint (value);
- break;
case PROP_OVERLAY_TOP:
v4l2sink->overlay.top = g_value_get_int (value);
v4l2sink->overlay_fields_set |= RECT_TOP_SET;
if (!gst_v4l2_object_get_property_helper (v4l2sink->v4l2object,
prop_id, value, pspec)) {
switch (prop_id) {
- case PROP_QUEUE_SIZE:
- g_value_set_uint (value, v4l2sink->num_buffers);
- break;
- case PROP_MIN_QUEUED_BUFS:
- g_value_set_uint (value, v4l2sink->min_queued_bufs);
- break;
case PROP_OVERLAY_TOP:
g_value_set_int (value, v4l2sink->overlay.top);
break;
switch (transition) {
case GST_STATE_CHANGE_NULL_TO_READY:
/* open the device */
- if (!gst_v4l2_object_start (v4l2sink->v4l2object))
+ if (!gst_v4l2_object_open (v4l2sink->v4l2object))
return GST_STATE_CHANGE_FAILURE;
break;
default:
switch (transition) {
case GST_STATE_CHANGE_PAUSED_TO_READY:
- if (v4l2sink->state == STATE_STREAMING) {
- if (!gst_v4l2_object_stop_streaming (v4l2sink->v4l2object)) {
- return GST_STATE_CHANGE_FAILURE;
- }
- v4l2sink->state = STATE_PENDING_STREAMON;
- }
+ if (!gst_v4l2_object_stop (v4l2sink->v4l2object))
+ return GST_STATE_CHANGE_FAILURE;
break;
case GST_STATE_CHANGE_READY_TO_NULL:
- if (NULL != v4l2sink->pool)
- gst_v4l2_buffer_pool_destroy (v4l2sink->pool);
- v4l2sink->pool = NULL;
- /* close the device */
+ /* we need to call stop here too */
if (!gst_v4l2_object_stop (v4l2sink->v4l2object))
return GST_STATE_CHANGE_FAILURE;
- v4l2sink->state = STATE_OFF;
+ /* close the device */
+ if (!gst_v4l2_object_close (v4l2sink->v4l2object))
+ return GST_STATE_CHANGE_FAILURE;
break;
default:
break;
static GstCaps *
-gst_v4l2sink_get_caps (GstBaseSink * bsink)
+gst_v4l2sink_get_caps (GstBaseSink * bsink, GstCaps * filter)
{
GstV4l2Sink *v4l2sink = GST_V4L2SINK (bsink);
GstCaps *ret;
(v4l2sink)));
}
- if (v4l2sink->probed_caps) {
- LOG_CAPS (v4l2sink, v4l2sink->probed_caps);
- return gst_caps_ref (v4l2sink->probed_caps);
- }
-
- formats = gst_v4l2_object_get_format_list (v4l2sink->v4l2object);
+ if (v4l2sink->probed_caps == NULL) {
+ formats = gst_v4l2_object_get_format_list (v4l2sink->v4l2object);
- ret = gst_caps_new_empty ();
+ ret = gst_caps_new_empty ();
- for (walk = formats; walk; walk = walk->next) {
- struct v4l2_fmtdesc *format;
+ for (walk = formats; walk; walk = walk->next) {
+ struct v4l2_fmtdesc *format;
- GstStructure *template;
+ GstStructure *template;
- format = (struct v4l2_fmtdesc *) walk->data;
+ format = (struct v4l2_fmtdesc *) walk->data;
- template = gst_v4l2_object_v4l2fourcc_to_structure (format->pixelformat);
+ template = gst_v4l2_object_v4l2fourcc_to_structure (format->pixelformat);
- if (template) {
- GstCaps *tmp;
+ if (template) {
+ GstCaps *tmp;
- tmp =
- gst_v4l2_object_probe_caps_for_format (v4l2sink->v4l2object,
- format->pixelformat, template);
- if (tmp)
- gst_caps_append (ret, tmp);
+ tmp =
+ gst_v4l2_object_probe_caps_for_format (v4l2sink->v4l2object,
+ format->pixelformat, template);
+ if (tmp)
+ gst_caps_append (ret, tmp);
- gst_structure_free (template);
- } else {
- GST_DEBUG_OBJECT (v4l2sink, "unknown format %u", format->pixelformat);
+ gst_structure_free (template);
+ } else {
+ GST_DEBUG_OBJECT (v4l2sink, "unknown format %u", format->pixelformat);
+ }
}
+ v4l2sink->probed_caps = ret;
}
- v4l2sink->probed_caps = gst_caps_ref (ret);
+ if (filter) {
+ ret =
+ gst_caps_intersect_full (filter, v4l2sink->probed_caps,
+ GST_CAPS_INTERSECT_FIRST);
+ } else {
+ ret = gst_caps_ref (v4l2sink->probed_caps);
+ }
GST_INFO_OBJECT (v4l2sink, "probed caps: %p", ret);
LOG_CAPS (v4l2sink, ret);
gst_v4l2sink_set_caps (GstBaseSink * bsink, GstCaps * caps)
{
GstV4l2Sink *v4l2sink = GST_V4L2SINK (bsink);
- gint w = 0, h = 0;
- gboolean interlaced;
- struct v4l2_fmtdesc *format;
- guint fps_n, fps_d;
- guint size;
+ GstV4l2Object *obj = v4l2sink->v4l2object;
LOG_CAPS (v4l2sink, caps);
return FALSE;
}
- if (v4l2sink->current_caps) {
- GST_DEBUG_OBJECT (v4l2sink, "already have caps set.. are they equal?");
- LOG_CAPS (v4l2sink, v4l2sink->current_caps);
- if (gst_caps_is_equal (v4l2sink->current_caps, caps)) {
- GST_DEBUG_OBJECT (v4l2sink, "yes they are!");
- return TRUE;
- }
- GST_DEBUG_OBJECT (v4l2sink, "no they aren't!");
- }
+ if (!gst_v4l2_object_stop (obj))
+ goto stop_failed;
- if (v4l2sink->pool) {
- /* TODO: if we've already allocated buffers, we probably need to
- * do something here to free and reallocate....
- *
- * gst_v4l2_object_stop_streaming()
- * gst_v4l2_buffer_pool_destroy()
- *
- */
- GST_DEBUG_OBJECT (v4l2sink, "warning, changing caps not supported yet");
- return FALSE;
- }
+ if (!gst_v4l2_object_set_format (v4l2sink->v4l2object, caps))
+ goto invalid_format;
- /* we want our own v4l2 type of fourcc codes */
- if (!gst_v4l2_object_get_caps_info (v4l2sink->v4l2object, caps,
- &format, &w, &h, &interlaced, &fps_n, &fps_d, &size)) {
- GST_DEBUG_OBJECT (v4l2sink, "can't get capture format from caps %p", caps);
- return FALSE;
- }
+ gst_v4l2sink_sync_overlay_fields (v4l2sink);
+ gst_v4l2sink_sync_crop_fields (v4l2sink);
- if (!format) {
- GST_DEBUG_OBJECT (v4l2sink, "unrecognized caps!!");
- return FALSE;
- }
+#ifdef HAVE_XVIDEO
+ gst_v4l2_video_overlay_prepare_window_handle (v4l2sink->v4l2object, TRUE);
+#endif
- if (!gst_v4l2_object_set_format (v4l2sink->v4l2object, format->pixelformat,
- w, h, interlaced)) {
- /* error already posted */
- return FALSE;
- }
+ GST_INFO_OBJECT (v4l2sink, "outputting buffers via mmap()");
- v4l2sink->video_width = w;
- v4l2sink->video_height = h;
+ v4l2sink->video_width = GST_V4L2_WIDTH (v4l2sink->v4l2object);
+ v4l2sink->video_height = GST_V4L2_HEIGHT (v4l2sink->v4l2object);
/* TODO: videosink width/height should be scaled according to
* pixel-aspect-ratio
*/
- GST_VIDEO_SINK_WIDTH (v4l2sink) = w;
- GST_VIDEO_SINK_HEIGHT (v4l2sink) = h;
-
- v4l2sink->current_caps = gst_caps_ref (caps);
+ GST_VIDEO_SINK_WIDTH (v4l2sink) = v4l2sink->video_width;
+ GST_VIDEO_SINK_HEIGHT (v4l2sink) = v4l2sink->video_height;
return TRUE;
+
+ /* ERRORS */
+stop_failed:
+ {
+ GST_DEBUG_OBJECT (v4l2sink, "failed to stop streaming");
+ return FALSE;
+ }
+invalid_format:
+ {
+ /* error already posted */
+ GST_DEBUG_OBJECT (v4l2sink, "can't set format");
+ return FALSE;
+ }
}
-/* buffer alloc function to implement pad_alloc for upstream element */
-static GstFlowReturn
-gst_v4l2sink_buffer_alloc (GstBaseSink * bsink, guint64 offset, guint size,
- GstCaps * caps, GstBuffer ** buf)
+static gboolean
+gst_v4l2sink_propose_allocation (GstBaseSink * bsink, GstQuery * query)
{
GstV4l2Sink *v4l2sink = GST_V4L2SINK (bsink);
- GstV4l2Buffer *v4l2buf;
-
- if (v4l2sink->v4l2object->vcap.capabilities & V4L2_CAP_STREAMING) {
-
- /* initialize the buffer pool if not initialized yet (first buffer): */
- if (G_UNLIKELY (!v4l2sink->pool)) {
-
- /* set_caps() might not be called yet.. so just to make sure: */
- if (!gst_v4l2sink_set_caps (bsink, caps)) {
- return GST_FLOW_ERROR;
- }
-
- GST_V4L2_CHECK_OPEN (v4l2sink->v4l2object);
+ GstV4l2Object *obj = v4l2sink->v4l2object;
+ GstBufferPool *pool;
+ guint size = 0;
+ GstCaps *caps;
+ gboolean need_pool;
- if (!(v4l2sink->pool = gst_v4l2_buffer_pool_new (GST_ELEMENT (v4l2sink),
- v4l2sink->v4l2object->video_fd,
- v4l2sink->num_buffers, caps, FALSE,
- V4L2_BUF_TYPE_VIDEO_OUTPUT))) {
- return GST_FLOW_ERROR;
- }
+ gst_query_parse_allocation (query, &caps, &need_pool);
- gst_v4l2sink_sync_overlay_fields (v4l2sink);
- gst_v4l2sink_sync_crop_fields (v4l2sink);
+ if (caps == NULL)
+ goto no_caps;
-#ifdef HAVE_XVIDEO
- gst_v4l2_xoverlay_prepare_xwindow_id (v4l2sink->v4l2object, TRUE);
-#endif
+ if ((pool = obj->pool))
+ gst_object_ref (pool);
- v4l2sink->state = STATE_PENDING_STREAMON;
+ if (pool != NULL) {
+ const GstCaps *pcaps;
+ GstStructure *config;
- GST_INFO_OBJECT (v4l2sink, "outputting buffers via mmap()");
+ /* we had a pool, check caps */
+ config = gst_buffer_pool_get_config (pool);
+ gst_buffer_pool_config_get (config, &pcaps, &size, NULL, NULL, NULL, NULL);
- if (v4l2sink->num_buffers != v4l2sink->pool->buffer_count) {
- v4l2sink->num_buffers = v4l2sink->pool->buffer_count;
- g_object_notify (G_OBJECT (v4l2sink), "queue-size");
- }
+ GST_DEBUG_OBJECT (v4l2sink,
+ "we had a pool with caps %" GST_PTR_FORMAT, pcaps);
+ if (!gst_caps_is_equal (caps, pcaps)) {
+ gst_object_unref (pool);
+ goto different_caps;
}
+ }
+ /* we need at least 2 buffers to operate */
+ gst_query_set_allocation_params (query, size, 2, 0, 0, 0, pool);
- v4l2buf = gst_v4l2_buffer_pool_get (v4l2sink->pool, TRUE);
+ /* we also support various metadata */
+ gst_query_add_allocation_meta (query, GST_VIDEO_META_API);
+ gst_query_add_allocation_meta (query, GST_VIDEO_CROP_META_API);
- if (G_LIKELY (v4l2buf)) {
- GST_DEBUG_OBJECT (v4l2sink, "allocated buffer: %p", v4l2buf);
- *buf = GST_BUFFER (v4l2buf);
- return GST_FLOW_OK;
- } else {
- GST_DEBUG_OBJECT (v4l2sink, "failed to allocate buffer");
- return GST_FLOW_ERROR;
- }
+ if (pool)
+ gst_object_unref (pool);
- } else {
- GST_ERROR_OBJECT (v4l2sink, "only supporting streaming mode for now...");
- return GST_FLOW_ERROR;
+ return TRUE;
+
+ /* ERRORS */
+no_caps:
+ {
+ GST_DEBUG_OBJECT (v4l2sink, "no caps specified");
+ return FALSE;
+ }
+different_caps:
+ {
+ /* different caps, we can't use this pool */
+ GST_DEBUG_OBJECT (v4l2sink, "pool has different caps");
+ return FALSE;
}
}
static GstFlowReturn
gst_v4l2sink_show_frame (GstBaseSink * bsink, GstBuffer * buf)
{
+ GstFlowReturn ret;
GstV4l2Sink *v4l2sink = GST_V4L2SINK (bsink);
- GstBuffer *newbuf = NULL;
+ GstV4l2Object *obj = v4l2sink->v4l2object;
GST_DEBUG_OBJECT (v4l2sink, "render buffer: %p", buf);
- if (!GST_IS_V4L2_BUFFER (buf)) {
- GstFlowReturn ret;
-
- /* special case check for sub-buffers: In certain cases, places like
- * GstBaseTransform, which might check that the buffer is writable
- * before copying metadata, timestamp, and such, will find that the
- * buffer has more than one reference to it. In these cases, they
- * will create a sub-buffer with an offset=0 and length equal to the
- * original buffer size.
- *
- * This could happen in two scenarios: (1) a tee in the pipeline, and
- * (2) because the refcnt is incremented in gst_mini_object_free()
- * before the finalize function is called, and decremented after it
- * returns.. but returning this buffer to the buffer pool in the
- * finalize function, could wake up a thread blocked in _buffer_alloc()
- * which could run and get a buffer w/ refcnt==2 before the thread
- * originally unref'ing the buffer returns from finalize function and
- * decrements the refcnt back to 1!
- */
- if (buf->parent &&
- (GST_BUFFER_DATA (buf) == GST_BUFFER_DATA (buf->parent)) &&
- (GST_BUFFER_SIZE (buf) == GST_BUFFER_SIZE (buf->parent))) {
- GST_DEBUG_OBJECT (v4l2sink, "I have a sub-buffer!");
- return gst_v4l2sink_show_frame (bsink, buf->parent);
- }
-
- GST_DEBUG_OBJECT (v4l2sink, "slow-path.. I got a %s so I need to memcpy",
- g_type_name (G_OBJECT_TYPE (buf)));
-
- ret = gst_v4l2sink_buffer_alloc (bsink,
- GST_BUFFER_OFFSET (buf), GST_BUFFER_SIZE (buf), GST_BUFFER_CAPS (buf),
- &newbuf);
-
- if (GST_FLOW_OK != ret) {
- GST_DEBUG_OBJECT (v4l2sink,
- "dropping frame! Consider increasing 'queue-size' property!");
- return GST_FLOW_OK;
- }
-
- memcpy (GST_BUFFER_DATA (newbuf),
- GST_BUFFER_DATA (buf),
- MIN (GST_BUFFER_SIZE (newbuf), GST_BUFFER_SIZE (buf)));
+ if (G_UNLIKELY (obj->pool == NULL))
+ goto not_negotiated;
- GST_DEBUG_OBJECT (v4l2sink, "render copied buffer: %p", newbuf);
+ ret =
+ gst_v4l2_buffer_pool_process (GST_V4L2_BUFFER_POOL_CAST (obj->pool), buf);
- buf = newbuf;
- }
-
- if (!gst_v4l2_buffer_pool_qbuf (v4l2sink->pool, GST_V4L2_BUFFER (buf))) {
- return GST_FLOW_ERROR;
- }
- if (v4l2sink->state == STATE_PENDING_STREAMON) {
- if (!gst_v4l2_object_start_streaming (v4l2sink->v4l2object)) {
- return GST_FLOW_ERROR;
- }
- v4l2sink->state = STATE_STREAMING;
- }
-
- if (!newbuf) {
- gst_buffer_ref (buf);
- }
+ return ret;
- /* if the driver has more than one buffer, ie. more than just the one we
- * just queued, then dequeue one immediately to make it available via
- * _buffer_alloc():
- */
- if (gst_v4l2_buffer_pool_available_buffers (v4l2sink->pool) >
- v4l2sink->min_queued_bufs) {
- GstV4l2Buffer *v4l2buf = gst_v4l2_buffer_pool_dqbuf (v4l2sink->pool);
-
- /* note: if we get a buf, we don't want to use it directly (because
- * someone else could still hold a ref).. but instead we release our
- * reference to it, and if no one else holds a ref it will be returned
- * to the pool of available buffers.. and if not, we keep looping.
- */
- if (v4l2buf) {
- gst_buffer_unref (GST_BUFFER (v4l2buf));
- }
+ /* ERRORS */
+not_negotiated:
+ {
+ GST_ERROR_OBJECT (bsink, "not negotiated");
+ return GST_FLOW_NOT_NEGOTIATED;
}
-
- return GST_FLOW_OK;
}
#ifdef HAVE_XVIDEO
GstVideoRectangle rect;
gdouble x, y, xscale = 1.0, yscale = 1.0;
- gst_v4l2_xoverlay_get_render_rect (v4l2sink->v4l2object, &rect);
+ gst_v4l2_video_overlay_get_render_rect (v4l2sink->v4l2object, &rect);
/* We calculate scaling using the original video frames geometry to
* include pixel aspect ratio scaling.
/*< private >*/
GstV4l2Object * v4l2object;
GstCaps *probed_caps; /* all supported caps of underlying v4l2 device */
- GstCaps *current_caps; /* the current negotiated caps */
- GstV4l2BufferPool *pool;
- guint32 num_buffers;
- guint32 min_queued_bufs;
gint video_width, video_height; /* original (unscaled) video w/h */
* setting properties:
*/
guint8 overlay_fields_set, crop_fields_set;
-
- guint8 state;
};
struct _GstV4l2SinkClass {
#include <string.h>
#include <sys/time.h>
-#include "v4l2src_calls.h"
#include <unistd.h>
+#include "gst/video/gstvideometa.h"
+#include "gst/video/gstvideopool.h"
+
+#include "gstv4l2src.h"
+
#include "gstv4l2colorbalance.h"
#include "gstv4l2tuner.h"
#ifdef HAVE_XVIDEO
GST_DEBUG_CATEGORY (v4l2src_debug);
#define GST_CAT_DEFAULT v4l2src_debug
-#define PROP_DEF_QUEUE_SIZE 2
#define PROP_DEF_ALWAYS_COPY TRUE
#define PROP_DEF_DECIMATE 1
{
PROP_0,
V4L2_STD_OBJECT_PROPS,
- PROP_QUEUE_SIZE,
PROP_ALWAYS_COPY,
PROP_DECIMATE
};
-GST_IMPLEMENT_V4L2_PROBE_METHODS (GstV4l2SrcClass, gst_v4l2src);
GST_IMPLEMENT_V4L2_COLOR_BALANCE_METHODS (GstV4l2Src, gst_v4l2src);
GST_IMPLEMENT_V4L2_TUNER_METHODS (GstV4l2Src, gst_v4l2src);
#ifdef HAVE_XVIDEO
static void gst_v4l2src_uri_handler_init (gpointer g_iface,
gpointer iface_data);
-static gboolean
-gst_v4l2src_iface_supported (GstImplementsInterface * iface, GType iface_type)
-{
- GstV4l2Object *v4l2object = GST_V4L2SRC (iface)->v4l2object;
-
+#define gst_v4l2src_parent_class parent_class
+G_DEFINE_TYPE_WITH_CODE (GstV4l2Src, gst_v4l2src, GST_TYPE_PUSH_SRC,
+ G_IMPLEMENT_INTERFACE (GST_TYPE_URI_HANDLER, gst_v4l2src_uri_handler_init);
+ G_IMPLEMENT_INTERFACE (GST_TYPE_TUNER, gst_v4l2src_tuner_interface_init);
#ifdef HAVE_XVIDEO
- if (!(iface_type == GST_TYPE_TUNER ||
- iface_type == GST_TYPE_X_OVERLAY ||
- iface_type == GST_TYPE_COLOR_BALANCE ||
- iface_type == GST_TYPE_VIDEO_ORIENTATION))
- return FALSE;
-#else
- if (!(iface_type == GST_TYPE_TUNER ||
- iface_type == GST_TYPE_COLOR_BALANCE ||
- iface_type == GST_TYPE_VIDEO_ORIENTATION))
- return FALSE;
+ /* FIXME: does GstXOverlay for v4l2src make sense in a GStreamer context? */
+ G_IMPLEMENT_INTERFACE (GST_TYPE_X_OVERLAY,
+ gst_v4l2src_xoverlay_interface_init);
#endif
-
- if (v4l2object->video_fd == -1)
- return FALSE;
-
-#ifdef HAVE_XVIDEO
- if (iface_type == GST_TYPE_X_OVERLAY && !GST_V4L2_IS_OVERLAY (v4l2object))
- return FALSE;
-#endif
-
- return TRUE;
-}
-
-static void
-gst_v4l2src_interface_init (GstImplementsInterfaceClass * klass)
-{
- /*
- * default virtual functions
- */
- klass->supported = gst_v4l2src_iface_supported;
-}
-
-static void
-gst_v4l2src_init_interfaces (GType type)
-{
- static const GInterfaceInfo urihandler_info = {
- gst_v4l2src_uri_handler_init,
- NULL,
- NULL
- };
-
- static const GInterfaceInfo v4l2iface_info = {
- (GInterfaceInitFunc) gst_v4l2src_interface_init,
- NULL,
- NULL,
- };
- static const GInterfaceInfo v4l2_tuner_info = {
- (GInterfaceInitFunc) gst_v4l2src_tuner_interface_init,
- NULL,
- NULL,
- };
-#ifdef HAVE_XVIDEO
- /* FIXME: does GstXOverlay for v4l2src make sense in a GStreamer context? */
- static const GInterfaceInfo v4l2_xoverlay_info = {
- (GInterfaceInitFunc) gst_v4l2src_xoverlay_interface_init,
- NULL,
- NULL,
- };
-#endif
- static const GInterfaceInfo v4l2_colorbalance_info = {
- (GInterfaceInitFunc) gst_v4l2src_color_balance_interface_init,
- NULL,
- NULL,
- };
- static const GInterfaceInfo v4l2_videoorientation_info = {
- (GInterfaceInitFunc) gst_v4l2src_video_orientation_interface_init,
- NULL,
- NULL,
- };
- static const GInterfaceInfo v4l2_propertyprobe_info = {
- (GInterfaceInitFunc) gst_v4l2src_property_probe_interface_init,
- NULL,
- NULL,
- };
-
- g_type_add_interface_static (type, GST_TYPE_URI_HANDLER, &urihandler_info);
- g_type_add_interface_static (type,
- GST_TYPE_IMPLEMENTS_INTERFACE, &v4l2iface_info);
- g_type_add_interface_static (type, GST_TYPE_TUNER, &v4l2_tuner_info);
-#ifdef HAVE_XVIDEO
- g_type_add_interface_static (type, GST_TYPE_X_OVERLAY, &v4l2_xoverlay_info);
-#endif
- g_type_add_interface_static (type,
- GST_TYPE_COLOR_BALANCE, &v4l2_colorbalance_info);
- g_type_add_interface_static (type,
- GST_TYPE_VIDEO_ORIENTATION, &v4l2_videoorientation_info);
- g_type_add_interface_static (type, GST_TYPE_PROPERTY_PROBE,
- &v4l2_propertyprobe_info);
-}
-
-GST_BOILERPLATE_FULL (GstV4l2Src, gst_v4l2src, GstPushSrc, GST_TYPE_PUSH_SRC,
- gst_v4l2src_init_interfaces);
+ G_IMPLEMENT_INTERFACE (GST_TYPE_COLOR_BALANCE,
+ gst_v4l2src_color_balance_interface_init);
+ G_IMPLEMENT_INTERFACE (GST_TYPE_VIDEO_ORIENTATION,
+ gst_v4l2src_video_orientation_interface_init));
static void gst_v4l2src_dispose (GObject * object);
static void gst_v4l2src_finalize (GstV4l2Src * v4l2src);
static gboolean gst_v4l2src_unlock_stop (GstBaseSrc * src);
static gboolean gst_v4l2src_stop (GstBaseSrc * src);
static gboolean gst_v4l2src_set_caps (GstBaseSrc * src, GstCaps * caps);
-static GstCaps *gst_v4l2src_get_caps (GstBaseSrc * src);
+static GstCaps *gst_v4l2src_get_caps (GstBaseSrc * src, GstCaps * filter);
static gboolean gst_v4l2src_query (GstBaseSrc * bsrc, GstQuery * query);
-static GstFlowReturn gst_v4l2src_create (GstPushSrc * src, GstBuffer ** out);
+static gboolean gst_v4l2src_decide_allocation (GstBaseSrc * src,
+ GstQuery * query);
+static GstFlowReturn gst_v4l2src_fill (GstPushSrc * src, GstBuffer * out);
static void gst_v4l2src_fixate (GstBaseSrc * basesrc, GstCaps * caps);
static gboolean gst_v4l2src_negotiate (GstBaseSrc * basesrc);
static void gst_v4l2src_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
-/* get_frame io methods */
-static GstFlowReturn
-gst_v4l2src_get_read (GstV4l2Src * v4l2src, GstBuffer ** buf);
-static GstFlowReturn
-gst_v4l2src_get_mmap (GstV4l2Src * v4l2src, GstBuffer ** buf);
-
-static void
-gst_v4l2src_base_init (gpointer g_class)
-{
- GstElementClass *gstelement_class = GST_ELEMENT_CLASS (g_class);
- GstV4l2SrcClass *gstv4l2src_class = GST_V4L2SRC_CLASS (g_class);
- GstPadTemplate *pad_template;
-
- gstv4l2src_class->v4l2_class_devices = NULL;
-
- GST_DEBUG_CATEGORY_INIT (v4l2src_debug, "v4l2src", 0, "V4L2 source element");
-
- gst_element_class_set_details_simple (gstelement_class,
- "Video (video4linux2) Source", "Source/Video",
- "Reads frames from a Video4Linux2 device",
- "Edgard Lima <edgard.lima@indt.org.br>,"
- " Stefan Kost <ensonic@users.sf.net>");
-
- pad_template =
- gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS,
- gst_v4l2_object_get_all_caps ());
- gst_element_class_add_pad_template (gstelement_class, pad_template);
- gst_object_unref (pad_template);
-}
-
static void
gst_v4l2src_class_init (GstV4l2SrcClass * klass)
{
gst_v4l2_object_install_properties_helper (gobject_class,
DEFAULT_PROP_DEVICE);
- g_object_class_install_property (gobject_class, PROP_QUEUE_SIZE,
- g_param_spec_uint ("queue-size", "Queue size",
- "Number of buffers to be enqueud in the driver in streaming mode",
- GST_V4L2_MIN_BUFFERS, GST_V4L2_MAX_BUFFERS, PROP_DEF_QUEUE_SIZE,
- G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_ALWAYS_COPY,
g_param_spec_boolean ("always-copy", "Always Copy",
"If the buffer will or not be used directly from mmap",
"Only use every nth frame", 1, G_MAXINT,
PROP_DEF_DECIMATE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ gst_element_class_set_details_simple (element_class,
+ "Video (video4linux2) Source", "Source/Video",
+ "Reads frames from a Video4Linux2 device",
+ "Edgard Lima <edgard.lima@indt.org.br>, "
+ "Stefan Kost <ensonic@users.sf.net>");
+
+ gst_element_class_add_pad_template
+ (element_class,
+ gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS,
+ gst_v4l2_object_get_all_caps ()));
+
basesrc_class->get_caps = GST_DEBUG_FUNCPTR (gst_v4l2src_get_caps);
basesrc_class->set_caps = GST_DEBUG_FUNCPTR (gst_v4l2src_set_caps);
basesrc_class->start = GST_DEBUG_FUNCPTR (gst_v4l2src_start);
basesrc_class->query = GST_DEBUG_FUNCPTR (gst_v4l2src_query);
basesrc_class->fixate = GST_DEBUG_FUNCPTR (gst_v4l2src_fixate);
basesrc_class->negotiate = GST_DEBUG_FUNCPTR (gst_v4l2src_negotiate);
+ basesrc_class->decide_allocation =
+ GST_DEBUG_FUNCPTR (gst_v4l2src_decide_allocation);
+
+ pushsrc_class->fill = GST_DEBUG_FUNCPTR (gst_v4l2src_fill);
- pushsrc_class->create = GST_DEBUG_FUNCPTR (gst_v4l2src_create);
+ klass->v4l2_class_devices = NULL;
+
+ GST_DEBUG_CATEGORY_INIT (v4l2src_debug, "v4l2src", 0, "V4L2 source element");
}
static void
-gst_v4l2src_init (GstV4l2Src * v4l2src, GstV4l2SrcClass * klass)
+gst_v4l2src_init (GstV4l2Src * v4l2src)
{
/* fixme: give an update_fps_function */
v4l2src->v4l2object = gst_v4l2_object_new (GST_ELEMENT (v4l2src),
V4L2_BUF_TYPE_VIDEO_CAPTURE, DEFAULT_PROP_DEVICE,
gst_v4l2_get_input, gst_v4l2_set_input, NULL);
- /* number of buffers requested */
- v4l2src->num_buffers = PROP_DEF_QUEUE_SIZE;
-
- v4l2src->always_copy = PROP_DEF_ALWAYS_COPY;
+ v4l2src->v4l2object->always_copy = PROP_DEF_ALWAYS_COPY;
v4l2src->decimate = PROP_DEF_DECIMATE;
- v4l2src->is_capturing = FALSE;
-
gst_base_src_set_format (GST_BASE_SRC (v4l2src), GST_FORMAT_TIME);
gst_base_src_set_live (GST_BASE_SRC (v4l2src), TRUE);
-
- v4l2src->fps_d = 0;
- v4l2src->fps_n = 0;
}
-
static void
gst_v4l2src_dispose (GObject * object)
{
if (!gst_v4l2_object_set_property_helper (v4l2src->v4l2object,
prop_id, value, pspec)) {
switch (prop_id) {
- case PROP_QUEUE_SIZE:
- v4l2src->num_buffers = g_value_get_uint (value);
- break;
case PROP_ALWAYS_COPY:
- v4l2src->always_copy = g_value_get_boolean (value);
+ v4l2src->v4l2object->always_copy = g_value_get_boolean (value);
break;
case PROP_DECIMATE:
v4l2src->decimate = g_value_get_int (value);
}
}
-
static void
gst_v4l2src_get_property (GObject * object,
guint prop_id, GValue * value, GParamSpec * pspec)
if (!gst_v4l2_object_get_property_helper (v4l2src->v4l2object,
prop_id, value, pspec)) {
switch (prop_id) {
- case PROP_QUEUE_SIZE:
- g_value_set_uint (value, v4l2src->num_buffers);
- break;
case PROP_ALWAYS_COPY:
- g_value_set_boolean (value, v4l2src->always_copy);
+ g_value_set_boolean (value, v4l2src->v4l2object->always_copy);
break;
case PROP_DECIMATE:
g_value_set_int (value, v4l2src->decimate);
}
}
-
/* this function is a bit of a last resort */
static void
gst_v4l2src_fixate (GstBaseSrc * basesrc, GstCaps * caps)
GST_DEBUG_OBJECT (basesrc, "fixating caps %" GST_PTR_FORMAT, caps);
for (i = 0; i < gst_caps_get_size (caps); ++i) {
- const GValue *v;
-
structure = gst_caps_get_structure (caps, i);
- /* FIXME such sizes? we usually fixate to something in the 320x200
- * range... */
- /* We are fixating to greater possble size (limited to GST_V4L2_MAX_SIZE)
+ /* We are fixating to a resonable 320x200 resolution
and the maximum framerate resolution for that size */
- gst_structure_fixate_field_nearest_int (structure, "width",
- GST_V4L2_MAX_SIZE);
- gst_structure_fixate_field_nearest_int (structure, "height",
- GST_V4L2_MAX_SIZE);
+ gst_structure_fixate_field_nearest_int (structure, "width", 320);
+ gst_structure_fixate_field_nearest_int (structure, "height", 200);
gst_structure_fixate_field_nearest_fraction (structure, "framerate",
G_MAXINT, 1);
-
- v = gst_structure_get_value (structure, "format");
- if (v && G_VALUE_TYPE (v) != GST_TYPE_FOURCC) {
- guint32 fourcc;
-
- g_return_if_fail (G_VALUE_TYPE (v) == GST_TYPE_LIST);
-
- fourcc = gst_value_get_fourcc (gst_value_list_get_value (v, 0));
- gst_structure_set (structure, "format", GST_TYPE_FOURCC, fourcc, NULL);
- }
+ gst_structure_fixate_field (structure, "format");
}
GST_DEBUG_OBJECT (basesrc, "fixated caps %" GST_PTR_FORMAT, caps);
+
+ GST_BASE_SRC_CLASS (parent_class)->fixate (basesrc, caps);
}
gboolean result = FALSE;
/* first see what is possible on our source pad */
- thiscaps = gst_pad_get_caps (GST_BASE_SRC_PAD (basesrc));
+ thiscaps = gst_pad_query_caps (GST_BASE_SRC_PAD (basesrc), NULL);
GST_DEBUG_OBJECT (basesrc, "caps of src: %" GST_PTR_FORMAT, thiscaps);
LOG_CAPS (basesrc, thiscaps);
goto no_nego_needed;
/* get the peer caps */
- peercaps = gst_pad_peer_get_caps (GST_BASE_SRC_PAD (basesrc));
+ peercaps = gst_pad_peer_query_caps (GST_BASE_SRC_PAD (basesrc), thiscaps);
GST_DEBUG_OBJECT (basesrc, "caps of peer: %" GST_PTR_FORMAT, peercaps);
LOG_CAPS (basesrc, peercaps);
if (peercaps && !gst_caps_is_any (peercaps)) {
* resolution strictly bigger then the first peer caps */
if (gst_caps_get_size (icaps) > 1) {
GstStructure *s = gst_caps_get_structure (peercaps, 0);
-
int best = 0;
-
int twidth, theight;
-
int width = G_MAXINT, height = G_MAXINT;
if (gst_structure_get_int (s, "width", &twidth)
*/
for (i = gst_caps_get_size (icaps) - 1; i >= 0; i--) {
GstStructure *is = gst_caps_get_structure (icaps, i);
-
int w, h;
if (gst_structure_get_int (is, "width", &w)
/* now fixate */
if (!gst_caps_is_empty (caps)) {
- gst_pad_fixate_caps (GST_BASE_SRC_PAD (basesrc), caps);
+ gst_v4l2src_fixate (basesrc, caps);
GST_DEBUG_OBJECT (basesrc, "fixated to: %" GST_PTR_FORMAT, caps);
LOG_CAPS (basesrc, caps);
result = TRUE;
} else if (gst_caps_is_fixed (caps)) {
/* yay, fixed caps, use those then */
- if (gst_pad_set_caps (GST_BASE_SRC_PAD (basesrc), caps))
- result = TRUE;
+ result = gst_base_src_set_caps (basesrc, caps);
}
}
gst_caps_unref (caps);
}
static GstCaps *
-gst_v4l2src_get_caps (GstBaseSrc * src)
+gst_v4l2src_get_caps (GstBaseSrc * src, GstCaps * filter)
{
- GstV4l2Src *v4l2src = GST_V4L2SRC (src);
+ GstV4l2Src *v4l2src;
+ GstV4l2Object *obj;
GstCaps *ret;
GSList *walk;
GSList *formats;
- if (!GST_V4L2_IS_OPEN (v4l2src->v4l2object)) {
+ v4l2src = GST_V4L2SRC (src);
+ obj = v4l2src->v4l2object;
+
+ if (!GST_V4L2_IS_OPEN (obj)) {
/* FIXME: copy? */
return
gst_caps_copy (gst_pad_get_pad_template_caps (GST_BASE_SRC_PAD
if (v4l2src->probed_caps)
return gst_caps_ref (v4l2src->probed_caps);
- formats = gst_v4l2_object_get_format_list (v4l2src->v4l2object);
+ formats = gst_v4l2_object_get_format_list (obj);
ret = gst_caps_new_empty ();
for (walk = formats; walk; walk = walk->next) {
struct v4l2_fmtdesc *format;
-
GstStructure *template;
format = (struct v4l2_fmtdesc *) walk->data;
GstCaps *tmp;
tmp =
- gst_v4l2_object_probe_caps_for_format (v4l2src->v4l2object,
+ gst_v4l2_object_probe_caps_for_format (obj,
format->pixelformat, template);
if (tmp)
gst_caps_append (ret, tmp);
gst_v4l2src_set_caps (GstBaseSrc * src, GstCaps * caps)
{
GstV4l2Src *v4l2src;
- gint w = 0, h = 0;
- gboolean interlaced;
- struct v4l2_fmtdesc *format;
- guint fps_n, fps_d;
- guint size;
+ GstV4l2Object *obj;
v4l2src = GST_V4L2SRC (src);
-
- /* if we're not open, punt -- we'll get setcaps'd later via negotiate */
- if (!GST_V4L2_IS_OPEN (v4l2src->v4l2object))
- return FALSE;
+ obj = v4l2src->v4l2object;
/* make sure we stop capturing and dealloc buffers */
- if (GST_V4L2_IS_ACTIVE (v4l2src->v4l2object)) {
- /* both will throw an element-error on failure */
- if (!gst_v4l2src_capture_stop (v4l2src))
- return FALSE;
- if (!gst_v4l2src_capture_deinit (v4l2src))
- return FALSE;
- }
-
- /* we want our own v4l2 type of fourcc codes */
- if (!gst_v4l2_object_get_caps_info (v4l2src->v4l2object, caps, &format, &w,
- &h, &interlaced, &fps_n, &fps_d, &size)) {
- GST_INFO_OBJECT (v4l2src,
- "can't get capture format from caps %" GST_PTR_FORMAT, caps);
+ if (!gst_v4l2_object_stop (obj))
return FALSE;
- }
-
- GST_DEBUG_OBJECT (v4l2src, "trying to set_capture %dx%d at %d/%d fps, "
- "format %s", w, h, fps_n, fps_d, format->description);
- if (!gst_v4l2src_set_capture (v4l2src, format->pixelformat, w, h,
- interlaced, fps_n, fps_d))
+ if (!gst_v4l2_object_set_format (obj, caps))
/* error already posted */
return FALSE;
- if (!gst_v4l2src_capture_init (v4l2src, caps))
- return FALSE;
+ return TRUE;
+}
+
+static gboolean
+gst_v4l2src_decide_allocation (GstBaseSrc * bsrc, GstQuery * query)
+{
+ GstV4l2Src *src;
+ GstV4l2Object *obj;
+ GstBufferPool *pool;
+ guint size, min, max, prefix, alignment;
+
+ src = GST_V4L2SRC (bsrc);
+ obj = src->v4l2object;
+
+ gst_query_parse_allocation_params (query, &size, &min, &max, &prefix,
+ &alignment, &pool);
+
+ GST_DEBUG_OBJECT (src, "allocation: size:%u min:%u max:%u prefix:%u "
+ "align:%u pool:%" GST_PTR_FORMAT, size, min, max, prefix, alignment,
+ pool);
- if (v4l2src->use_mmap) {
- v4l2src->get_frame = gst_v4l2src_get_mmap;
+ if (min != 0) {
+ /* if there is a min-buffers suggestion, use it. We add 1 because we need 1
+ * buffer extra to capture while the other two buffers are downstream */
+ min += 1;
} else {
- v4l2src->get_frame = gst_v4l2src_get_read;
+ min = 2;
}
- if (!gst_v4l2src_capture_start (v4l2src))
- return FALSE;
+ /* select a pool */
+ switch (obj->mode) {
+ case GST_V4L2_IO_RW:
+ if (pool == NULL) {
+ /* no downstream pool, use our own then */
+ GST_DEBUG_OBJECT (src,
+ "read/write mode: no downstream pool, using our own");
+ pool = GST_BUFFER_POOL_CAST (obj->pool);
+ size = obj->sizeimage;
+ } else {
+ /* in READ/WRITE mode, prefer a downstream pool because our own pool
+ * doesn't help much, we have to write to it as well */
+ GST_DEBUG_OBJECT (src, "read/write mode: using downstream pool");
+ /* use the bigest size, when we use our own pool we can't really do any
+ * other size than what the hardware gives us but for downstream pools
+ * we can try */
+ size = MAX (size, obj->sizeimage);
+ }
+ break;
+ case GST_V4L2_IO_MMAP:
+ case GST_V4L2_IO_USERPTR:
+ /* in streaming mode, prefer our own pool */
+ pool = GST_BUFFER_POOL_CAST (obj->pool);
+ size = obj->sizeimage;
+ GST_DEBUG_OBJECT (src,
+ "streaming mode: using our own pool %" GST_PTR_FORMAT, pool);
+ break;
+ case GST_V4L2_IO_AUTO:
+ default:
+ GST_WARNING_OBJECT (src, "unhandled mode");
+ break;
+ }
+
+ if (pool) {
+ GstStructure *config;
+ const GstCaps *caps;
- /* now store the expected output size */
- v4l2src->frame_byte_size = size;
+ config = gst_buffer_pool_get_config (pool);
+ gst_buffer_pool_config_get (config, &caps, NULL, NULL, NULL, NULL, NULL);
+ gst_buffer_pool_config_set (config, caps, size, min, max, prefix,
+ alignment);
+
+ /* if downstream supports video metadata, add this to the pool config */
+ if (gst_query_has_allocation_meta (query, GST_VIDEO_META_API))
+ gst_buffer_pool_config_add_option (config,
+ GST_BUFFER_POOL_OPTION_VIDEO_META);
+
+ gst_buffer_pool_set_config (pool, config);
+ }
+
+ gst_query_set_allocation_params (query, size, min, max, prefix,
+ alignment, pool);
return TRUE;
}
gst_v4l2src_query (GstBaseSrc * bsrc, GstQuery * query)
{
GstV4l2Src *src;
-
+ GstV4l2Object *obj;
gboolean res = FALSE;
src = GST_V4L2SRC (bsrc);
+ obj = src->v4l2object;
switch (GST_QUERY_TYPE (query)) {
case GST_QUERY_LATENCY:{
GstClockTime min_latency, max_latency;
+ guint32 fps_n, fps_d;
/* device must be open */
- if (!GST_V4L2_IS_OPEN (src->v4l2object)) {
+ if (!GST_V4L2_IS_OPEN (obj)) {
GST_WARNING_OBJECT (src,
"Can't give latency since device isn't open !");
goto done;
}
+ fps_n = GST_V4L2_FPS_N (obj);
+ fps_d = GST_V4L2_FPS_D (obj);
+
/* we must have a framerate */
- if (src->fps_n <= 0 || src->fps_d <= 0) {
+ if (fps_n <= 0 || fps_d <= 0) {
GST_WARNING_OBJECT (src,
"Can't give latency since framerate isn't fixated !");
goto done;
}
/* min latency is the time to capture one frame */
- min_latency =
- gst_util_uint64_scale_int (GST_SECOND, src->fps_d, src->fps_n);
+ min_latency = gst_util_uint64_scale_int (GST_SECOND, fps_d, fps_n);
/* max latency is total duration of the frame buffer */
- max_latency = src->num_buffers * min_latency;
+ max_latency =
+ GST_V4L2_BUFFER_POOL_CAST (obj->pool)->max_buffers * min_latency;
GST_DEBUG_OBJECT (bsrc,
"report latency min %" GST_TIME_FORMAT " max %" GST_TIME_FORMAT,
/* activate settings for first frame */
v4l2src->ctrl_time = 0;
- gst_object_sync_values (G_OBJECT (src), v4l2src->ctrl_time);
+ gst_object_sync_values (GST_OBJECT (src), v4l2src->ctrl_time);
return TRUE;
}
gst_v4l2src_unlock (GstBaseSrc * src)
{
GstV4l2Src *v4l2src = GST_V4L2SRC (src);
-
- GST_LOG_OBJECT (src, "Flushing");
- gst_poll_set_flushing (v4l2src->v4l2object->poll, TRUE);
-
- return TRUE;
+ return gst_v4l2_object_unlock (v4l2src->v4l2object);
}
static gboolean
gst_v4l2src_unlock_stop (GstBaseSrc * src)
{
GstV4l2Src *v4l2src = GST_V4L2SRC (src);
-
- GST_LOG_OBJECT (src, "No longer flushing");
- gst_poll_set_flushing (v4l2src->v4l2object->poll, FALSE);
-
- return TRUE;
+ return gst_v4l2_object_unlock_stop (v4l2src->v4l2object);
}
static gboolean
gst_v4l2src_stop (GstBaseSrc * src)
{
GstV4l2Src *v4l2src = GST_V4L2SRC (src);
+ GstV4l2Object *obj = v4l2src->v4l2object;
- if (GST_V4L2_IS_ACTIVE (v4l2src->v4l2object)
- && !gst_v4l2src_capture_stop (v4l2src))
- return FALSE;
-
- if (v4l2src->v4l2object->buffer != NULL) {
- if (!gst_v4l2src_capture_deinit (v4l2src))
+ if (GST_V4L2_IS_ACTIVE (obj)) {
+ if (!gst_v4l2_object_stop (obj))
return FALSE;
}
-
- v4l2src->fps_d = 0;
- v4l2src->fps_n = 0;
-
return TRUE;
}
{
GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
GstV4l2Src *v4l2src = GST_V4L2SRC (element);
+ GstV4l2Object *obj = v4l2src->v4l2object;
switch (transition) {
case GST_STATE_CHANGE_NULL_TO_READY:
/* open the device */
- if (!gst_v4l2_object_start (v4l2src->v4l2object))
+ if (!gst_v4l2_object_open (obj))
return GST_STATE_CHANGE_FAILURE;
break;
default:
switch (transition) {
case GST_STATE_CHANGE_READY_TO_NULL:
/* close the device */
- if (!gst_v4l2_object_stop (v4l2src->v4l2object))
+ if (!gst_v4l2_object_close (obj))
return GST_STATE_CHANGE_FAILURE;
if (v4l2src->probed_caps) {
}
static GstFlowReturn
-gst_v4l2src_get_read (GstV4l2Src * v4l2src, GstBuffer ** buf)
+gst_v4l2src_fill (GstPushSrc * src, GstBuffer * buf)
{
- gint amount;
- gint ret;
-
- gint buffersize;
-
- buffersize = v4l2src->frame_byte_size;
- /* In case the size per frame is unknown assume it's a streaming format (e.g.
- * mpegts) and grab a reasonable default size instead */
- if (buffersize == 0)
- buffersize = GST_BASE_SRC (v4l2src)->blocksize;
-
- *buf = gst_buffer_new_and_alloc (buffersize);
-
- do {
- ret = gst_poll_wait (v4l2src->v4l2object->poll, GST_CLOCK_TIME_NONE);
- if (G_UNLIKELY (ret < 0)) {
- if (errno == EBUSY)
- goto stopped;
- if (errno == ENXIO) {
- GST_DEBUG_OBJECT (v4l2src,
- "v4l2 device doesn't support polling. Disabling");
- v4l2src->v4l2object->can_poll_device = FALSE;
- } else {
- if (errno != EAGAIN && errno != EINTR)
- goto select_error;
- }
- }
- amount =
- v4l2_read (v4l2src->v4l2object->video_fd, GST_BUFFER_DATA (*buf),
- buffersize);
- if (amount == buffersize) {
- break;
- } else if (amount == -1) {
- if (errno == EAGAIN || errno == EINTR) {
- continue;
- } else
- goto read_error;
- } else {
- /* short reads can happen if a signal interrupts the read */
- continue;
- }
- } while (TRUE);
-
- /* we set the buffer metadata in gst_v4l2src_create() */
-
- return GST_FLOW_OK;
+ GstV4l2Src *v4l2src = GST_V4L2SRC (src);
+ GstV4l2Object *obj = v4l2src->v4l2object;
+ GstFlowReturn ret;
+ GstClock *clock;
+ GstClockTime timestamp, duration;
- /* ERRORS */
-select_error:
- {
- GST_ELEMENT_ERROR (v4l2src, RESOURCE, READ, (NULL),
- ("select error %d: %s (%d)", ret, g_strerror (errno), errno));
- return GST_FLOW_ERROR;
- }
-stopped:
- {
- GST_DEBUG ("stop called");
- return GST_FLOW_WRONG_STATE;
- }
-read_error:
- {
- GST_ELEMENT_ERROR (v4l2src, RESOURCE, READ,
- (_("Error reading %d bytes from device '%s'."),
- buffersize, v4l2src->v4l2object->videodev), GST_ERROR_SYSTEM);
+#if 0
+ int i;
+ /* decimate, just capture and throw away frames */
+ for (i = 0; i < v4l2src->decimate - 1; i++) {
+ ret = gst_v4l2_buffer_pool_process (obj, buf);
+ if (ret != GST_FLOW_OK) {
+ return ret;
+ }
gst_buffer_unref (*buf);
- return GST_FLOW_ERROR;
}
-}
+#endif
-static GstFlowReturn
-gst_v4l2src_get_mmap (GstV4l2Src * v4l2src, GstBuffer ** buf)
-{
- GstBuffer *temp;
- GstFlowReturn ret;
- guint size;
- guint count = 0;
+ ret =
+ gst_v4l2_buffer_pool_process (GST_V4L2_BUFFER_POOL_CAST (obj->pool), buf);
-again:
- ret = gst_v4l2src_grab_frame (v4l2src, &temp);
if (G_UNLIKELY (ret != GST_FLOW_OK))
- goto done;
+ goto error;
- if (v4l2src->frame_byte_size > 0) {
- size = GST_BUFFER_SIZE (temp);
-
- /* if size does not match what we expected, try again */
- if (size != v4l2src->frame_byte_size) {
- GST_ELEMENT_WARNING (v4l2src, RESOURCE, READ,
- (_("Got unexpected frame size of %u instead of %u."),
- size, v4l2src->frame_byte_size), (NULL));
- gst_buffer_unref (temp);
- if (count++ > 50)
- goto size_error;
-
- goto again;
- }
+ /* set buffer metadata */
+ GST_BUFFER_OFFSET (buf) = v4l2src->offset++;
+ GST_BUFFER_OFFSET_END (buf) = v4l2src->offset;
+
+ /* timestamps, LOCK to get clock and base time. */
+ /* FIXME: element clock and base_time is rarely changing */
+ GST_OBJECT_LOCK (v4l2src);
+ if ((clock = GST_ELEMENT_CLOCK (v4l2src))) {
+ /* we have a clock, get base time and ref clock */
+ timestamp = GST_ELEMENT (v4l2src)->base_time;
+ gst_object_ref (clock);
+ } else {
+ /* no clock, can't set timestamps */
+ timestamp = GST_CLOCK_TIME_NONE;
}
+ GST_OBJECT_UNLOCK (v4l2src);
- *buf = temp;
-done:
- return ret;
-
- /* ERRORS */
-size_error:
- {
- GST_ELEMENT_ERROR (v4l2src, RESOURCE, READ,
- (_("Error reading %d bytes on device '%s'."),
- v4l2src->frame_byte_size, v4l2src->v4l2object->videodev), (NULL));
- return GST_FLOW_ERROR;
- }
-}
+ duration = obj->duration;
-static GstFlowReturn
-gst_v4l2src_create (GstPushSrc * src, GstBuffer ** buf)
-{
- GstV4l2Src *v4l2src = GST_V4L2SRC (src);
- int i;
- GstFlowReturn ret;
+ if (G_LIKELY (clock)) {
+ /* the time now is the time of the clock minus the base time */
+ timestamp = gst_clock_get_time (clock) - timestamp;
+ gst_object_unref (clock);
- for (i = 0; i < v4l2src->decimate - 1; i++) {
- ret = v4l2src->get_frame (v4l2src, buf);
- if (ret != GST_FLOW_OK) {
- return ret;
+ /* if we have a framerate adjust timestamp for frame latency */
+ if (GST_CLOCK_TIME_IS_VALID (duration)) {
+ if (timestamp > duration)
+ timestamp -= duration;
+ else
+ timestamp = 0;
}
- gst_buffer_unref (*buf);
}
- ret = v4l2src->get_frame (v4l2src, buf);
+ /* activate settings for next frame */
+ if (GST_CLOCK_TIME_IS_VALID (duration)) {
+ v4l2src->ctrl_time += duration;
+ } else {
+ /* this is not very good (as it should be the next timestamp),
+ * still good enough for linear fades (as long as it is not -1)
+ */
+ v4l2src->ctrl_time = timestamp;
+ }
+ gst_object_sync_values (GST_OBJECT (src), v4l2src->ctrl_time);
+ GST_INFO_OBJECT (src, "sync to %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (v4l2src->ctrl_time));
- /* set buffer metadata */
- if (G_LIKELY (ret == GST_FLOW_OK && *buf)) {
- GstClock *clock;
- GstClockTime timestamp;
-
- GST_BUFFER_OFFSET (*buf) = v4l2src->offset++;
- GST_BUFFER_OFFSET_END (*buf) = v4l2src->offset;
-
- /* timestamps, LOCK to get clock and base time. */
- /* FIXME: element clock and base_time is rarely changing */
- GST_OBJECT_LOCK (v4l2src);
- if ((clock = GST_ELEMENT_CLOCK (v4l2src))) {
- /* we have a clock, get base time and ref clock */
- timestamp = GST_ELEMENT (v4l2src)->base_time;
- gst_object_ref (clock);
- } else {
- /* no clock, can't set timestamps */
- timestamp = GST_CLOCK_TIME_NONE;
- }
- GST_OBJECT_UNLOCK (v4l2src);
-
- if (G_LIKELY (clock)) {
- /* the time now is the time of the clock minus the base time */
- timestamp = gst_clock_get_time (clock) - timestamp;
- gst_object_unref (clock);
-
- /* if we have a framerate adjust timestamp for frame latency */
- if (GST_CLOCK_TIME_IS_VALID (v4l2src->duration)) {
- if (timestamp > v4l2src->duration)
- timestamp -= v4l2src->duration;
- else
- timestamp = 0;
- }
- }
+ /* FIXME: use the timestamp from the buffer itself! */
+ GST_BUFFER_TIMESTAMP (buf) = timestamp;
+ GST_BUFFER_DURATION (buf) = duration;
- /* activate settings for next frame */
- if (GST_CLOCK_TIME_IS_VALID (v4l2src->duration)) {
- v4l2src->ctrl_time += v4l2src->duration;
- } else {
- /* this is not very good (as it should be the next timestamp),
- * still good enough for linear fades (as long as it is not -1)
- */
- v4l2src->ctrl_time = timestamp;
- }
- gst_object_sync_values (G_OBJECT (src), v4l2src->ctrl_time);
- GST_INFO_OBJECT (src, "sync to %" GST_TIME_FORMAT,
- GST_TIME_ARGS (v4l2src->ctrl_time));
+ return ret;
- /* FIXME: use the timestamp from the buffer itself! */
- GST_BUFFER_TIMESTAMP (*buf) = timestamp;
- GST_BUFFER_DURATION (*buf) = v4l2src->duration;
+ /* ERROR */
+error:
+ {
+ GST_ERROR_OBJECT (src, "error processing buffer");
+ return ret;
}
- return ret;
}
/* GstURIHandler interface */
static GstURIType
-gst_v4l2src_uri_get_type (void)
+gst_v4l2src_uri_get_type (GType type)
{
return GST_URI_SRC;
}
-static gchar **
-gst_v4l2src_uri_get_protocols (void)
+static const gchar *const *
+gst_v4l2src_uri_get_protocols (GType type)
{
- static gchar *protocols[] = { (char *) "v4l2", NULL };
+ static const gchar *protocols[] = { "v4l2", NULL };
return protocols;
}
-static const gchar *
+static gchar *
gst_v4l2src_uri_get_uri (GstURIHandler * handler)
{
GstV4l2Src *v4l2src = GST_V4L2SRC (handler);
if (v4l2src->v4l2object->videodev != NULL) {
- gchar uri[256];
-
- /* need to return a const string, but also don't want to leak the generated
- * string, so just intern it - there's a limited number of video devices
- * after all */
- g_snprintf (uri, sizeof (uri), "v4l2://%s", v4l2src->v4l2object->videodev);
- return g_intern_string (uri);
+ return g_strdup_printf ("v4l2://%s", v4l2src->v4l2object->videodev);
}
- return "v4l2://";
+ return g_strdup ("v4l2://");
}
static gboolean
-gst_v4l2src_uri_set_uri (GstURIHandler * handler, const gchar * uri)
+gst_v4l2src_uri_set_uri (GstURIHandler * handler, const gchar * uri,
+ GError ** error)
{
GstV4l2Src *v4l2src = GST_V4L2SRC (handler);
const gchar *device = DEFAULT_PROP_DEVICE;
typedef struct _GstV4l2Src GstV4l2Src;
typedef struct _GstV4l2SrcClass GstV4l2SrcClass;
-typedef GstFlowReturn (*GstV4l2SrcGetFunc)(GstV4l2Src * v4l2src, GstBuffer ** buf);
-
/**
* GstV4l2Src:
*
/* pads */
GstCaps *probed_caps;
- /* buffer handling */
- GstV4l2BufferPool *pool;
-
- guint32 num_buffers;
- gboolean use_mmap;
- guint32 frame_byte_size;
-
- /* if the buffer will be or not used from directly mmap */
- gboolean always_copy;
-
int decimate;
- /* True if we want to stop */
- gboolean quit;
- gboolean is_capturing;
-
guint64 offset;
- gint fps_d, fps_n; /* framerate if device is open */
- GstClockTime duration; /* duration of one frame */
-
GstClockTime ctrl_time;
-
- GstV4l2SrcGetFunc get_frame;
};
struct _GstV4l2SrcClass
#include "gstv4l2tuner.h"
#include "gstv4l2object.h"
#include "v4l2_calls.h"
-#include "v4l2src_calls.h"
static void gst_v4l2_tuner_channel_class_init (GstV4l2TunerChannelClass *
klass);
} \
\
static void \
-interface_as_function ## _tuner_interface_init (GstTunerClass * klass) \
+interface_as_function ## _tuner_interface_init (GstTunerInterface * iface) \
{ \
/* default virtual functions */ \
- klass->list_channels = interface_as_function ## _tuner_list_channels; \
- klass->set_channel = interface_as_function ## _tuner_set_channel; \
- klass->get_channel = interface_as_function ## _tuner_get_channel; \
+ iface->list_channels = interface_as_function ## _tuner_list_channels; \
+ iface->set_channel = interface_as_function ## _tuner_set_channel; \
+ iface->get_channel = interface_as_function ## _tuner_get_channel; \
\
- klass->list_norms = interface_as_function ## _tuner_list_norms; \
- klass->set_norm = interface_as_function ## _tuner_set_norm_and_notify; \
- klass->get_norm = interface_as_function ## _tuner_get_norm; \
+ iface->list_norms = interface_as_function ## _tuner_list_norms; \
+ iface->set_norm = interface_as_function ## _tuner_set_norm_and_notify; \
+ iface->get_norm = interface_as_function ## _tuner_get_norm; \
\
- klass->set_frequency = interface_as_function ## _tuner_set_frequency_and_notify; \
- klass->get_frequency = interface_as_function ## _tuner_get_frequency; \
- klass->signal_strength = interface_as_function ## _tuner_signal_strength; \
+ iface->set_frequency = interface_as_function ## _tuner_set_frequency_and_notify; \
+ iface->get_frequency = interface_as_function ## _tuner_get_frequency; \
+ iface->signal_strength = interface_as_function ## _tuner_signal_strength; \
} \
#endif /* __GST_V4L2_TUNER_H__ */
/* GStreamer
- *
* Copyright (C) 2003 Ronald Bultje <rbultje@ronald.bitfreak.net>
* 2006 Edgard Lima <edgard.lima@indt.org.br>
*
- * gstv4l2xoverlay.c: X-based overlay interface implementation for V4L2
+ * gstv4l2video_overlay.c: X-based overlay interface implementation for V4L2
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
#include <gst/interfaces/navigation.h>
-#include "gstv4l2xoverlay.h"
+#include "gstv4l2videooverlay.h"
#include "gstv4l2object.h"
#include "v4l2_calls.h"
{
Display *dpy;
gint port, idle_id, event_id;
- GMutex *mutex; /* to serialize calls to X11 */
+ GMutex mutex; /* to serialize calls to X11 */
};
GST_DEBUG_CATEGORY_STATIC (v4l2xv_debug);
#define GST_CAT_DEFAULT v4l2xv_debug
void
-gst_v4l2_xoverlay_interface_init (GstXOverlayClass * klass)
+gst_v4l2_video_overlay_interface_init (GstVideoOverlayInterface * iface)
{
GST_DEBUG_CATEGORY_INIT (v4l2xv_debug, "v4l2xv", 0,
- "V4L2 XOverlay interface debugging");
+ "V4L2 GstVideoOverlay interface debugging");
}
static void
-gst_v4l2_xoverlay_open (GstV4l2Object * v4l2object)
+gst_v4l2_video_overlay_open (GstV4l2Object * v4l2object)
{
struct stat s;
GstV4l2Xv *v4l2xv;
v4l2xv = g_new0 (GstV4l2Xv, 1);
v4l2xv->dpy = dpy;
v4l2xv->port = id;
- v4l2xv->mutex = g_mutex_new ();
+ g_mutex_init (&v4l2xv->mutex);
v4l2xv->idle_id = 0;
v4l2xv->event_id = 0;
v4l2object->xv = v4l2xv;
if (v4l2object->xwindow_id) {
- gst_v4l2_xoverlay_set_window_handle (v4l2object, v4l2object->xwindow_id);
+ gst_v4l2_video_overlay_set_window_handle (v4l2object,
+ v4l2object->xwindow_id);
}
}
static void
-gst_v4l2_xoverlay_close (GstV4l2Object * v4l2object)
+gst_v4l2_video_overlay_close (GstV4l2Object * v4l2object)
{
GstV4l2Xv *v4l2xv = v4l2object->xv;
return;
if (v4l2object->xwindow_id) {
- gst_v4l2_xoverlay_set_window_handle (v4l2object, 0);
+ gst_v4l2_video_overlay_set_window_handle (v4l2object, 0);
}
XCloseDisplay (v4l2xv->dpy);
- g_mutex_free (v4l2xv->mutex);
+ g_mutex_clear (&v4l2xv->mutex);
if (v4l2xv->idle_id)
g_source_remove (v4l2xv->idle_id);
if (v4l2xv->event_id)
}
void
-gst_v4l2_xoverlay_start (GstV4l2Object * v4l2object)
+gst_v4l2_video_overlay_start (GstV4l2Object * v4l2object)
{
if (v4l2object->xwindow_id) {
- gst_v4l2_xoverlay_open (v4l2object);
+ gst_v4l2_video_overlay_open (v4l2object);
}
}
void
-gst_v4l2_xoverlay_stop (GstV4l2Object * v4l2object)
+gst_v4l2_video_overlay_stop (GstV4l2Object * v4l2object)
{
- gst_v4l2_xoverlay_close (v4l2object);
+ gst_v4l2_video_overlay_close (v4l2object);
}
/* should be called with mutex held */
}
gboolean
-gst_v4l2_xoverlay_get_render_rect (GstV4l2Object * v4l2object,
+gst_v4l2_video_overlay_get_render_rect (GstV4l2Object * v4l2object,
GstVideoRectangle * rect)
{
GstV4l2Xv *v4l2xv = v4l2object->xv;
gboolean ret = FALSE;
if (v4l2xv) {
- g_mutex_lock (v4l2xv->mutex);
+ g_mutex_lock (&v4l2xv->mutex);
ret = get_render_rect (v4l2object, rect);
- g_mutex_unlock (v4l2xv->mutex);
+ g_mutex_unlock (&v4l2xv->mutex);
}
return ret;
}
GST_LOG_OBJECT (v4l2object->element, "idle refresh");
if (v4l2xv) {
- g_mutex_lock (v4l2xv->mutex);
+ g_mutex_lock (&v4l2xv->mutex);
update_geometry (v4l2object);
v4l2xv->idle_id = 0;
- g_mutex_unlock (v4l2xv->mutex);
+ g_mutex_unlock (&v4l2xv->mutex);
}
/* once */
if (v4l2xv) {
XEvent e;
- g_mutex_lock (v4l2xv->mutex);
+ g_mutex_lock (&v4l2xv->mutex);
/* If the element supports navigation, collect the relavent input
* events and push them upstream as navigation events
if (pointer_moved) {
GST_DEBUG_OBJECT (v4l2object->element,
"pointer moved over window at %d,%d", pointer_x, pointer_y);
- g_mutex_unlock (v4l2xv->mutex);
+ g_mutex_unlock (&v4l2xv->mutex);
gst_navigation_send_mouse_event (GST_NAVIGATION (v4l2object->element),
"mouse-move", 0, e.xbutton.x, e.xbutton.y);
- g_mutex_lock (v4l2xv->mutex);
+ g_mutex_lock (&v4l2xv->mutex);
}
/* We get all events on our window to throw them upstream
KeySym keysym;
const char *key_str = NULL;
- g_mutex_unlock (v4l2xv->mutex);
+ g_mutex_unlock (&v4l2xv->mutex);
switch (e.type) {
case ButtonPress:
break;
case KeyPress:
case KeyRelease:
- g_mutex_lock (v4l2xv->mutex);
+ g_mutex_lock (&v4l2xv->mutex);
keysym = XKeycodeToKeysym (v4l2xv->dpy, e.xkey.keycode, 0);
if (keysym != NoSymbol) {
key_str = XKeysymToString (keysym);
} else {
key_str = "unknown";
}
- g_mutex_unlock (v4l2xv->mutex);
+ g_mutex_unlock (&v4l2xv->mutex);
GST_DEBUG_OBJECT (v4l2object->element,
"key %d pressed over window at %d,%d (%s)",
e.xkey.keycode, e.xkey.x, e.xkey.y, key_str);
"unhandled X event (%d)", e.type);
}
- g_mutex_lock (v4l2xv->mutex);
+ g_mutex_lock (&v4l2xv->mutex);
}
}
break;
}
}
- g_mutex_unlock (v4l2xv->mutex);
+ g_mutex_unlock (&v4l2xv->mutex);
}
/* repeat */
}
void
-gst_v4l2_xoverlay_set_window_handle (GstV4l2Object * v4l2object, guintptr id)
+gst_v4l2_video_overlay_set_window_handle (GstV4l2Object * v4l2object,
+ guintptr id)
{
GstV4l2Xv *v4l2xv;
XID xwindow_id = id;
(gulong) xwindow_id);
if (!v4l2object->xv && GST_V4L2_IS_OPEN (v4l2object))
- gst_v4l2_xoverlay_open (v4l2object);
+ gst_v4l2_video_overlay_open (v4l2object);
v4l2xv = v4l2object->xv;
if (v4l2xv)
- g_mutex_lock (v4l2xv->mutex);
+ g_mutex_lock (&v4l2xv->mutex);
if (change) {
if (v4l2object->xwindow_id && v4l2xv) {
if (!v4l2xv || xwindow_id == 0) {
if (v4l2xv)
- g_mutex_unlock (v4l2xv->mutex);
+ g_mutex_unlock (&v4l2xv->mutex);
return;
}
if (v4l2xv->idle_id)
g_source_remove (v4l2xv->idle_id);
v4l2xv->idle_id = g_idle_add (idle_refresh, v4l2object);
- g_mutex_unlock (v4l2xv->mutex);
+ g_mutex_unlock (&v4l2xv->mutex);
}
/**
- * gst_v4l2_xoverlay_prepare_xwindow_id:
+ * gst_v4l2_video_overlay_prepare_window_handle:
* @v4l2object: the v4l2object
* @required: %TRUE if display is required (ie. TRUE for v4l2sink, but
* FALSE for any other element with optional overlay capabilities)
* Helper function to create a windo if none is set from the application.
*/
void
-gst_v4l2_xoverlay_prepare_xwindow_id (GstV4l2Object * v4l2object,
+gst_v4l2_video_overlay_prepare_window_handle (GstV4l2Object * v4l2object,
gboolean required)
{
+ GstVideoOverlay *overlay;
+
if (!GST_V4L2_IS_OVERLAY (v4l2object))
return;
- gst_x_overlay_prepare_xwindow_id (GST_X_OVERLAY (v4l2object->element));
+ overlay = GST_VIDEO_OVERLAY (v4l2object->element);
+ gst_video_overlay_prepare_window_handle (overlay);
if (required && !v4l2object->xwindow_id) {
GstV4l2Xv *v4l2xv;
long event_mask;
if (!v4l2object->xv && GST_V4L2_IS_OPEN (v4l2object))
- gst_v4l2_xoverlay_open (v4l2object);
+ gst_v4l2_video_overlay_open (v4l2object);
v4l2xv = v4l2object->xv;
- /* if xoverlay is not supported, just bail */
+ /* if video_overlay is not supported, just bail */
if (!v4l2xv)
return;
- /* xoverlay is supported, but we don't have a window.. so create one */
+ /* video_overlay is supported, but we don't have a window.. so create one */
GST_DEBUG_OBJECT (v4l2object->element, "creating window");
- g_mutex_lock (v4l2xv->mutex);
+ g_mutex_lock (&v4l2xv->mutex);
width = XDisplayWidth (v4l2xv->dpy, DefaultScreen (v4l2xv->dpy));
height = XDisplayHeight (v4l2xv->dpy, DefaultScreen (v4l2xv->dpy));
XSync (v4l2xv->dpy, FALSE);
- g_mutex_unlock (v4l2xv->mutex);
+ g_mutex_unlock (&v4l2xv->mutex);
GST_DEBUG_OBJECT (v4l2object->element, "got window");
- gst_v4l2_xoverlay_set_window_handle (v4l2object, win);
+ gst_v4l2_video_overlay_set_window_handle (v4l2object, win);
}
}
/* GStreamer
- *
* Copyright (C) 2003 Ronald Bultje <rbultje@ronald.bitfreak.net>
* 2006 Edgard Lima <edgard.lima@indt.org.br>
*
- * gstv4l2xoverlay.h: tv mixer interface implementation for V4L2
+ * gstv4l2videooverlay.h: tv mixer interface implementation for V4L2
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* Boston, MA 02111-1307, USA.
*/
-#ifndef __GST_V4L2_X_OVERLAY_H__
-#define __GST_V4L2_X_OVERLAY_H__
+#ifndef __GST_V4L2_VIDEO_OVERLAY_H__
+#define __GST_V4L2_VIDEO_OVERLAY_H__
#include <X11/X.h>
#include <gst/gst.h>
-#include <gst/interfaces/xoverlay.h>
#include <gst/interfaces/navigation.h>
#include <gst/video/gstvideosink.h> /* for GstVideoRectange */
+#include <gst/video/videooverlay.h>
#include "gstv4l2object.h"
G_BEGIN_DECLS
-void gst_v4l2_xoverlay_start (GstV4l2Object *v4l2object);
-void gst_v4l2_xoverlay_stop (GstV4l2Object *v4l2object);
-gboolean gst_v4l2_xoverlay_get_render_rect (GstV4l2Object *v4l2object,
+void gst_v4l2_video_overlay_start (GstV4l2Object *v4l2object);
+void gst_v4l2_video_overlay_stop (GstV4l2Object *v4l2object);
+gboolean gst_v4l2_video_overlay_get_render_rect (GstV4l2Object *v4l2object,
GstVideoRectangle *rect);
-void gst_v4l2_xoverlay_interface_init (GstXOverlayClass * klass);
-void gst_v4l2_xoverlay_set_window_handle (GstV4l2Object * v4l2object,
+void gst_v4l2_video_overlay_interface_init (GstVideoOverlayInterface * iface);
+void gst_v4l2_video_overlay_set_window_handle (GstV4l2Object * v4l2object,
guintptr id);
-void gst_v4l2_xoverlay_prepare_xwindow_id (GstV4l2Object * v4l2object,
+void gst_v4l2_video_overlay_prepare_window_handle (GstV4l2Object * v4l2object,
gboolean required);
-#define GST_IMPLEMENT_V4L2_XOVERLAY_METHODS(Type, interface_as_function) \
+#define GST_IMPLEMENT_V4L2_VIDEO_OVERLAY_METHODS(Type, interface_as_function) \
\
static void \
-interface_as_function ## _xoverlay_set_window_handle (GstXOverlay * xoverlay, \
- guintptr id) \
+interface_as_function ## _video_overlay_set_window_handle (GstVideoOverlay * overlay, \
+ guintptr id) \
{ \
- Type *this = (Type*) xoverlay; \
- gst_v4l2_xoverlay_set_window_handle (this->v4l2object, id); \
+ Type *this = (Type*) overlay; \
+ gst_v4l2_video_overlay_set_window_handle (this->v4l2object, id); \
} \
\
static void \
-interface_as_function ## _xoverlay_interface_init (GstXOverlayClass * klass) \
+interface_as_function ## _video_overlay_interface_init (GstVideoOverlayInterface * iface) \
{ \
/* default virtual functions */ \
- klass->set_window_handle = interface_as_function ## _xoverlay_set_window_handle; \
+ iface->set_window_handle = interface_as_function ## _video_overlay_set_window_handle; \
\
- gst_v4l2_xoverlay_interface_init(klass); \
+ gst_v4l2_video_overlay_interface_init (iface); \
} \
-#endif /* __GST_V4L2_X_OVERLAY_H__ */
+#endif /* __GST_V4L2_VIDEO_OVERLAY_H__ */
#include "gstv4l2vidorient.h"
#include "gstv4l2object.h"
#include "v4l2_calls.h"
-#include "v4l2src_calls.h"
GST_DEBUG_CATEGORY_STATIC (v4l2vo_debug);
#define GST_CAT_DEFAULT v4l2vo_debug
#endif
void
-gst_v4l2_video_orientation_interface_init (GstVideoOrientationInterface * klass)
+gst_v4l2_video_orientation_interface_init (GstVideoOrientationInterface * iface)
{
GST_DEBUG_CATEGORY_INIT (v4l2vo_debug, "v4l2vo", 0,
"V4L2 VideoOrientation interface debugging");
#define __GST_V4L2_VIDORIENT_H__
#include <gst/gst.h>
-#include <gst/interfaces/videoorientation.h>
+#include <gst/video/videoorientation.h>
#include "gstv4l2object.h"
G_BEGIN_DECLS
-void gst_v4l2_video_orientation_interface_init (GstVideoOrientationInterface * klass);
+void gst_v4l2_video_orientation_interface_init (GstVideoOrientationInterface * iface);
gboolean gst_v4l2_video_orientation_get_hflip (GstV4l2Object *v4l2object, gboolean *flip);
gboolean gst_v4l2_video_orientation_get_vflip (GstV4l2Object *v4l2object, gboolean *flip);
} \
\
static void \
- interface_as_function ## _video_orientation_interface_init (GstVideoOrientationInterface * klass) \
+ interface_as_function ## _video_orientation_interface_init (GstVideoOrientationInterface * iface) \
{ \
/* default virtual functions */ \
- klass->get_hflip = interface_as_function ## _video_orientation_get_hflip; \
- klass->get_vflip = interface_as_function ## _video_orientation_get_vflip; \
- klass->get_hcenter = interface_as_function ## _video_orientation_get_hcenter; \
- klass->get_vcenter = interface_as_function ## _video_orientation_get_vcenter; \
- klass->set_hflip = interface_as_function ## _video_orientation_set_hflip; \
- klass->set_vflip = interface_as_function ## _video_orientation_set_vflip; \
- klass->set_hcenter = interface_as_function ## _video_orientation_set_hcenter; \
- klass->set_vcenter = interface_as_function ## _video_orientation_set_vcenter; \
+ iface->get_hflip = interface_as_function ## _video_orientation_get_hflip; \
+ iface->get_vflip = interface_as_function ## _video_orientation_get_vflip; \
+ iface->get_hcenter = interface_as_function ## _video_orientation_get_hcenter; \
+ iface->get_vcenter = interface_as_function ## _video_orientation_get_vcenter; \
+ iface->set_hflip = interface_as_function ## _video_orientation_set_hflip; \
+ iface->set_vflip = interface_as_function ## _video_orientation_set_vflip; \
+ iface->set_hcenter = interface_as_function ## _video_orientation_set_hcenter; \
+ iface->set_vcenter = interface_as_function ## _video_orientation_set_vcenter; \
}
#endif /* __GST_V4L2_VIDORIENT_H__ */
#include "gstv4l2colorbalance.h"
#include "gstv4l2src.h"
-
-#ifdef HAVE_EXPERIMENTAL
#include "gstv4l2sink.h"
-#endif
#include "gst/gst-i18n-plugin.h"
!(v4l2object->vcap.capabilities & V4L2_CAP_VIDEO_CAPTURE))
goto not_capture;
-#ifdef HAVE_EXPERIMENTAL
if (GST_IS_V4L2SINK (v4l2object->element) &&
!(v4l2object->vcap.capabilities & V4L2_CAP_VIDEO_OUTPUT))
goto not_output;
-#endif
/* create enumerations, posts errors. */
if (!gst_v4l2_fill_lists (v4l2object))
("Capabilities: 0x%x", v4l2object->vcap.capabilities));
goto error;
}
-#ifdef HAVE_EXPERIMENTAL
not_output:
{
GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, NOT_FOUND,
("Capabilities: 0x%x", v4l2object->vcap.capabilities));
goto error;
}
-#endif
error:
{
if (GST_V4L2_IS_OPEN (v4l2object)) {
# define v4l2_munmap munmap
#endif
-/* simple check whether the device is open */
-#define GST_V4L2_IS_OPEN(v4l2object) \
- (v4l2object->video_fd > 0)
-
-/* check whether the device is 'active' */
-#define GST_V4L2_IS_ACTIVE(v4l2object) \
- (v4l2object->buffer != NULL)
-
#define GST_V4L2_IS_OVERLAY(v4l2object) \
(v4l2object->vcap.capabilities & V4L2_CAP_VIDEO_OVERLAY)
+++ /dev/null
-/* GStreamer
- *
- * Copyright (C) 2002 Ronald Bultje <rbultje@ronald.bitfreak.net>
- * 2006 Edgard Lima <edgard.lima@indt.org.br>
- *
- * v4l2src.c - system calls
- *
- * This library is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Library General Public
- * License as published by the Free Software Foundation; either
- * version 2 of the License, or (at your option) any later version.
- *
- * This library is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Library General Public License for more details.
- *
- * You should have received a copy of the GNU Library General Public
- * License along with this library; if not, write to the
- * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
- * Boston, MA 02111-1307, USA.
- */
-
-#ifdef HAVE_CONFIG_H
-#include "config.h"
-#endif
-
-#include <stdlib.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <fcntl.h>
-#include <sys/ioctl.h>
-#include <sys/mman.h>
-#include <string.h>
-#include <errno.h>
-#include "v4l2src_calls.h"
-#include <sys/time.h>
-#include <unistd.h>
-#ifdef __sun
-/* Needed on older Solaris Nevada builds (72 at least) */
-#include <stropts.h>
-#include <sys/ioccom.h>
-#endif
-
-#include "gstv4l2tuner.h"
-#include "gstv4l2bufferpool.h"
-
-#include "gst/gst-i18n-plugin.h"
-
-#define GST_CAT_DEFAULT v4l2src_debug
-GST_DEBUG_CATEGORY_EXTERN (GST_CAT_PERFORMANCE);
-
-/* lalala... */
-#define GST_V4L2_SET_ACTIVE(element) (element)->buffer = GINT_TO_POINTER (-1)
-#define GST_V4L2_SET_INACTIVE(element) (element)->buffer = NULL
-
-/* On some systems MAP_FAILED seems to be missing */
-#ifndef MAP_FAILED
-#define MAP_FAILED ((caddr_t) -1)
-#endif
-
-
-/* Local functions */
-
-static gboolean
-gst_v4l2src_buffer_pool_activate (GstV4l2BufferPool * pool,
- GstV4l2Src * v4l2src)
-{
- GstV4l2Buffer *buf;
-
- while ((buf = gst_v4l2_buffer_pool_get (pool, FALSE)) != NULL)
- if (!gst_v4l2_buffer_pool_qbuf (pool, buf))
- goto queue_failed;
-
- return TRUE;
-
- /* ERRORS */
-queue_failed:
- {
- GST_ELEMENT_ERROR (v4l2src, RESOURCE, READ,
- (_("Could not enqueue buffers in device '%s'."),
- v4l2src->v4l2object->videodev),
- ("enqueing buffer %d/%d failed: %s",
- buf->vbuffer.index, v4l2src->num_buffers, g_strerror (errno)));
- return FALSE;
- }
-}
-
-/******************************************************
- * gst_v4l2src_grab_frame ():
- * grab a frame for capturing
- * return value: GST_FLOW_OK, GST_FLOW_WRONG_STATE or GST_FLOW_ERROR
- ******************************************************/
-GstFlowReturn
-gst_v4l2src_grab_frame (GstV4l2Src * v4l2src, GstBuffer ** buf)
-{
-#define NUM_TRIALS 50
- GstV4l2Object *v4l2object;
- GstV4l2BufferPool *pool;
- gint32 trials = NUM_TRIALS;
- GstBuffer *pool_buffer;
- gboolean need_copy;
- gint ret;
-
- v4l2object = v4l2src->v4l2object;
- pool = v4l2src->pool;
- if (!pool)
- goto no_buffer_pool;
-
- GST_DEBUG_OBJECT (v4l2src, "grab frame");
-
- for (;;) {
- if (v4l2object->can_poll_device) {
- ret = gst_poll_wait (v4l2object->poll, GST_CLOCK_TIME_NONE);
- if (G_UNLIKELY (ret < 0)) {
- if (errno == EBUSY)
- goto stopped;
- if (errno == ENXIO) {
- GST_DEBUG_OBJECT (v4l2src,
- "v4l2 device doesn't support polling. Disabling");
- v4l2object->can_poll_device = FALSE;
- } else {
- if (errno != EAGAIN && errno != EINTR)
- goto select_error;
- }
- }
- }
-
- pool_buffer = GST_BUFFER (gst_v4l2_buffer_pool_dqbuf (pool));
- if (pool_buffer)
- break;
-
- GST_WARNING_OBJECT (pool->v4l2elem, "trials=%d", trials);
-
- /* if the sync() got interrupted, we can retry */
- switch (errno) {
- case EINVAL:
- case ENOMEM:
- /* fatal */
- return GST_FLOW_ERROR;
-
- case EAGAIN:
- case EIO:
- case EINTR:
- default:
- /* try again, until too many trials */
- break;
- }
-
- /* check nr. of attempts to capture */
- if (--trials == -1) {
- goto too_many_trials;
- }
- }
-
- /* if we are handing out the last buffer in the pool, we need to make a
- * copy and bring the buffer back in the pool. */
- need_copy = v4l2src->always_copy
- || !gst_v4l2_buffer_pool_available_buffers (pool);
-
- if (G_UNLIKELY (need_copy)) {
- if (!v4l2src->always_copy) {
- GST_CAT_LOG_OBJECT (GST_CAT_PERFORMANCE, v4l2src,
- "running out of buffers, making a copy to reuse current one");
- }
- *buf = gst_buffer_copy (pool_buffer);
- GST_BUFFER_FLAG_UNSET (*buf, GST_BUFFER_FLAG_READONLY);
- /* this will requeue */
- gst_buffer_unref (pool_buffer);
- } else {
- *buf = pool_buffer;
- }
- /* we set the buffer metadata in gst_v4l2src_create() */
-
- return GST_FLOW_OK;
-
- /* ERRORS */
-no_buffer_pool:
- {
- GST_DEBUG ("no buffer pool");
- return GST_FLOW_WRONG_STATE;
- }
-select_error:
- {
- GST_ELEMENT_ERROR (pool->v4l2elem, RESOURCE, READ, (NULL),
- ("select error %d: %s (%d)", ret, g_strerror (errno), errno));
- return GST_FLOW_ERROR;
- }
-stopped:
- {
- GST_DEBUG ("stop called");
- return GST_FLOW_WRONG_STATE;
- }
-too_many_trials:
- {
- GST_ELEMENT_ERROR (pool->v4l2elem, RESOURCE, FAILED,
- (_("Failed trying to get video frames from device '%s'."),
- v4l2object->videodev),
- (_("Failed after %d tries. device %s. system error: %s"),
- NUM_TRIALS, v4l2object->videodev, g_strerror (errno)));
- return GST_FLOW_ERROR;
- }
-}
-
-/******************************************************
- * gst_v4l2src_set_capture():
- * set capture parameters
- * return value: TRUE on success, FALSE on error
- ******************************************************/
-gboolean
-gst_v4l2src_set_capture (GstV4l2Src * v4l2src, guint32 pixelformat,
- guint32 width, guint32 height, gboolean interlaced,
- guint fps_n, guint fps_d)
-{
- gint fd = v4l2src->v4l2object->video_fd;
- struct v4l2_streamparm stream;
-
- if (pixelformat == GST_MAKE_FOURCC ('M', 'P', 'E', 'G'))
- return TRUE;
-
- if (!gst_v4l2_object_set_format (v4l2src->v4l2object, pixelformat, width,
- height, interlaced)) {
- /* error already reported */
- return FALSE;
- }
-
- /* Is there a reason we require the caller to always specify a framerate? */
- GST_DEBUG_OBJECT (v4l2src, "Desired framerate: %u/%u", fps_n, fps_d);
-
- memset (&stream, 0x00, sizeof (struct v4l2_streamparm));
- stream.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- if (v4l2_ioctl (fd, VIDIOC_G_PARM, &stream) < 0) {
- GST_ELEMENT_WARNING (v4l2src, RESOURCE, SETTINGS,
- (_("Could not get parameters on device '%s'"),
- v4l2src->v4l2object->videodev), GST_ERROR_SYSTEM);
- goto done;
- }
-
- /* We used to skip frame rate setup if the camera was already setup
- with the requested frame rate. This breaks some cameras though,
- causing them to not output data (several models of Thinkpad cameras
- have this problem at least).
- So, don't skip. */
-
- /* We want to change the frame rate, so check whether we can. Some cheap USB
- * cameras don't have the capability */
- if ((stream.parm.capture.capability & V4L2_CAP_TIMEPERFRAME) == 0) {
- GST_DEBUG_OBJECT (v4l2src, "Not setting framerate (not supported)");
- goto done;
- }
-
- GST_LOG_OBJECT (v4l2src, "Setting framerate to %u/%u", fps_n, fps_d);
-
- /* Note: V4L2 wants the frame interval, we have the frame rate */
- stream.parm.capture.timeperframe.numerator = fps_d;
- stream.parm.capture.timeperframe.denominator = fps_n;
-
- /* some cheap USB cam's won't accept any change */
- if (v4l2_ioctl (fd, VIDIOC_S_PARM, &stream) < 0) {
- GST_ELEMENT_WARNING (v4l2src, RESOURCE, SETTINGS,
- (_("Video input device did not accept new frame rate setting.")),
- GST_ERROR_SYSTEM);
- goto done;
- }
-
- v4l2src->fps_n = fps_n;
- v4l2src->fps_d = fps_d;
-
- /* if we have a framerate pre-calculate duration */
- if (fps_n > 0 && fps_d > 0) {
- v4l2src->duration = gst_util_uint64_scale_int (GST_SECOND, fps_d, fps_n);
- } else {
- v4l2src->duration = GST_CLOCK_TIME_NONE;
- }
-
- GST_INFO_OBJECT (v4l2src,
- "Set framerate to %u/%u and duration to %" GST_TIME_FORMAT, fps_n, fps_d,
- GST_TIME_ARGS (v4l2src->duration));
-done:
-
- return TRUE;
-}
-
-/******************************************************
- * gst_v4l2src_capture_init():
- * initialize the capture system
- * return value: TRUE on success, FALSE on error
- ******************************************************/
-gboolean
-gst_v4l2src_capture_init (GstV4l2Src * v4l2src, GstCaps * caps)
-{
- GST_DEBUG_OBJECT (v4l2src, "initializing the capture system");
-
- GST_V4L2_CHECK_OPEN (v4l2src->v4l2object);
- GST_V4L2_CHECK_NOT_ACTIVE (v4l2src->v4l2object);
-
- if (v4l2src->v4l2object->vcap.capabilities & V4L2_CAP_STREAMING) {
-
- /* Map the buffers */
- GST_LOG_OBJECT (v4l2src, "initiating buffer pool");
-
- if (!(v4l2src->pool = gst_v4l2_buffer_pool_new (GST_ELEMENT (v4l2src),
- v4l2src->v4l2object->video_fd,
- v4l2src->num_buffers, caps, TRUE, V4L2_BUF_TYPE_VIDEO_CAPTURE)))
- goto buffer_pool_new_failed;
-
- GST_INFO_OBJECT (v4l2src, "capturing buffers via mmap()");
- v4l2src->use_mmap = TRUE;
-
- if (v4l2src->num_buffers != v4l2src->pool->buffer_count) {
- v4l2src->num_buffers = v4l2src->pool->buffer_count;
- g_object_notify (G_OBJECT (v4l2src), "queue-size");
- }
-
- } else if (v4l2src->v4l2object->vcap.capabilities & V4L2_CAP_READWRITE) {
- GST_INFO_OBJECT (v4l2src, "capturing buffers via read()");
- v4l2src->use_mmap = FALSE;
- v4l2src->pool = NULL;
- } else {
- goto no_supported_capture_method;
- }
-
- GST_V4L2_SET_ACTIVE (v4l2src->v4l2object);
-
- return TRUE;
-
- /* ERRORS */
-buffer_pool_new_failed:
- {
- GST_ELEMENT_ERROR (v4l2src, RESOURCE, READ,
- (_("Could not map buffers from device '%s'"),
- v4l2src->v4l2object->videodev),
- ("Failed to create buffer pool: %s", g_strerror (errno)));
- return FALSE;
- }
-no_supported_capture_method:
- {
- GST_ELEMENT_ERROR (v4l2src, RESOURCE, READ,
- (_("The driver of device '%s' does not support any known capture "
- "method."), v4l2src->v4l2object->videodev), (NULL));
- return FALSE;
- }
-}
-
-
-/******************************************************
- * gst_v4l2src_capture_start():
- * start streaming capture
- * return value: TRUE on success, FALSE on error
- ******************************************************/
-gboolean
-gst_v4l2src_capture_start (GstV4l2Src * v4l2src)
-{
- GST_DEBUG_OBJECT (v4l2src, "starting the capturing");
- //GST_V4L2_CHECK_OPEN (v4l2src->v4l2object);
- GST_V4L2_CHECK_ACTIVE (v4l2src->v4l2object);
-
- v4l2src->quit = FALSE;
-
- if (v4l2src->use_mmap) {
- if (!gst_v4l2src_buffer_pool_activate (v4l2src->pool, v4l2src)) {
- return FALSE;
- }
-
- if (!gst_v4l2_object_start_streaming (v4l2src->v4l2object)) {
- return FALSE;
- }
- }
-
- v4l2src->is_capturing = TRUE;
-
- return TRUE;
-}
-
-/******************************************************
- * gst_v4l2src_capture_stop():
- * stop streaming capture
- * return value: TRUE on success, FALSE on error
- ******************************************************/
-gboolean
-gst_v4l2src_capture_stop (GstV4l2Src * v4l2src)
-{
- GST_DEBUG_OBJECT (v4l2src, "stopping capturing");
-
- if (!GST_V4L2_IS_OPEN (v4l2src->v4l2object)) {
- goto done;
- }
- if (!GST_V4L2_IS_ACTIVE (v4l2src->v4l2object)) {
- goto done;
- }
-
- if (v4l2src->use_mmap) {
- /* we actually need to sync on all queued buffers but not
- * on the non-queued ones */
- if (!gst_v4l2_object_stop_streaming (v4l2src->v4l2object)) {
- return FALSE;
- }
- }
-
-done:
-
- /* make an optional pending wait stop */
- v4l2src->quit = TRUE;
- v4l2src->is_capturing = FALSE;
-
- return TRUE;
-}
-
-/******************************************************
- * gst_v4l2src_capture_deinit():
- * deinitialize the capture system
- * return value: TRUE on success, FALSE on error
- ******************************************************/
-gboolean
-gst_v4l2src_capture_deinit (GstV4l2Src * v4l2src)
-{
- GST_DEBUG_OBJECT (v4l2src, "deinitting capture system");
-
- if (!GST_V4L2_IS_OPEN (v4l2src->v4l2object)) {
- return TRUE;
- }
- if (!GST_V4L2_IS_ACTIVE (v4l2src->v4l2object)) {
- return TRUE;
- }
-
- if (v4l2src->pool) {
- gst_v4l2_buffer_pool_destroy (v4l2src->pool);
- v4l2src->pool = NULL;
- }
-
- GST_V4L2_SET_INACTIVE (v4l2src->v4l2object);
-
- return TRUE;
-}
+++ /dev/null
-/* GStreamer
- *
- * Copyright (C) 2002 Ronald Bultje <rbultje@ronald.bitfreak.net>
- * 2006 Edgard Lima <edgard.lima@indt.org.br>
- *
- * v4l2src.h - system calls
- *
- * This library is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Library General Public
- * License as published by the Free Software Foundation; either
- * version 2 of the License, or (at your option) any later version.
- *
- * This library is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Library General Public License for more details.
- *
- * You should have received a copy of the GNU Library General Public
- * License along with this library; if not, write to the
- * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
- * Boston, MA 02111-1307, USA.
- */
-
-#ifndef __V4L2SRC_CALLS_H__
-#define __V4L2SRC_CALLS_H__
-
-#include "gstv4l2src.h"
-#include "v4l2_calls.h"
-
-gboolean gst_v4l2src_get_capture (GstV4l2Src * v4l2src);
-gboolean gst_v4l2src_set_capture (GstV4l2Src * v4l2src,
- guint32 pixelformat,
- guint32 width, guint32 height,
- gboolean interlaced,
- guint32 fps_n, guint32 fps_d);
-
-gboolean gst_v4l2src_capture_init (GstV4l2Src * v4l2src, GstCaps *caps);
-gboolean gst_v4l2src_capture_start (GstV4l2Src * v4l2src);
-
-GstFlowReturn gst_v4l2src_grab_frame (GstV4l2Src * v4l2src, GstBuffer **buf);
-
-gboolean gst_v4l2src_capture_stop (GstV4l2Src * v4l2src);
-gboolean gst_v4l2src_capture_deinit (GstV4l2Src * v4l2src);
-
-
-#endif /* __V4L2SRC_CALLS_H__ */
guint prop_id, const GValue * value, GParamSpec * pspec);
static void gst_waveform_sink_get_property (GObject * object,
guint prop_id, GValue * value, GParamSpec * pspec);
-static GstCaps *gst_waveform_sink_getcaps (GstBaseSink * bsink);
+static GstCaps *gst_waveform_sink_getcaps (GstBaseSink * bsink,
+ GstCaps * filter);
/************************************************************************/
/* GstAudioSink functions */
/************************************************************************/
static gboolean gst_waveform_sink_prepare (GstAudioSink * asink,
- GstRingBufferSpec * spec);
+ GstAudioRingBufferSpec * spec);
static gboolean gst_waveform_sink_unprepare (GstAudioSink * asink);
static gboolean gst_waveform_sink_open (GstAudioSink * asink);
static gboolean gst_waveform_sink_close (GstAudioSink * asink);
-static guint gst_waveform_sink_write (GstAudioSink * asink, gpointer data,
+static gint gst_waveform_sink_write (GstAudioSink * asink, gpointer data,
guint length);
static guint gst_waveform_sink_delay (GstAudioSink * asink);
static void gst_waveform_sink_reset (GstAudioSink * asink);
/* Utils */
/************************************************************************/
GstCaps *gst_waveform_sink_create_caps (gint rate, gint channels,
- gint bits_per_sample);
+ const gchar * format);
WAVEHDR *bufferpool_get_buffer (GstWaveFormSink * wfsink, gpointer data,
guint length);
void CALLBACK waveOutProc (HWAVEOUT hwo, UINT uMsg, DWORD_PTR dwInstance,
DWORD_PTR dwParam1, DWORD_PTR dwParam2);
static GstStaticPadTemplate waveformsink_sink_factory =
- GST_STATIC_PAD_TEMPLATE ("sink",
+GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("audio/x-raw-int, "
- "signed = (boolean) { TRUE, FALSE }, "
- "width = (int) 16, "
- "depth = (int) 16, "
- "rate = (int) [ 1, MAX ], " "channels = (int) [ 1, 2 ]; "
- "audio/x-raw-int, "
- "signed = (boolean) { TRUE, FALSE }, "
- "width = (int) 8, "
- "depth = (int) 8, "
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "format = (string) { " GST_AUDIO_NE (S16) ", S8 }, "
+ "layout = (string) interleaved, "
"rate = (int) [ 1, MAX ], " "channels = (int) [ 1, 2 ]"));
-GST_BOILERPLATE (GstWaveFormSink, gst_waveform_sink, GstAudioSink,
- GST_TYPE_AUDIO_SINK);
-
-static void
-gst_waveform_sink_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_set_details_simple (element_class, "WaveForm Audio Sink",
- "Sink/Audio",
- "Output to a sound card via WaveForm API",
- "Sebastien Moutte <sebastien@moutte.net>");
- gst_element_class_add_static_pad_template (element_class,
- &waveformsink_sink_factory);
-}
+#define gst_waveform_sink_parent_class parent_class
+G_DEFINE_TYPE (GstWaveFormSink, gst_waveform_sink, GST_TYPE_AUDIO_SINK);
static void
gst_waveform_sink_class_init (GstWaveFormSinkClass * klass)
GObjectClass *gobject_class;
GstElementClass *gstelement_class;
GstBaseSinkClass *gstbasesink_class;
- GstBaseAudioSinkClass *gstbaseaudiosink_class;
GstAudioSinkClass *gstaudiosink_class;
+ GstElementClass *element_class;
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
gstbasesink_class = (GstBaseSinkClass *) klass;
- gstbaseaudiosink_class = (GstBaseAudioSinkClass *) klass;
gstaudiosink_class = (GstAudioSinkClass *) klass;
+ element_class = GST_ELEMENT_CLASS (klass);
parent_class = g_type_class_peek_parent (klass);
GST_DEBUG_CATEGORY_INIT (waveformsink_debug, "waveformsink", 0,
"Waveform sink");
+
+ gst_element_class_set_details_simple (element_class, "WaveForm Audio Sink",
+ "Sink/Audio",
+ "Output to a sound card via WaveForm API",
+ "Sebastien Moutte <sebastien@moutte.net>");
+
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&waveformsink_sink_factory));
}
static void
}
static void
-gst_waveform_sink_init (GstWaveFormSink * wfsink,
- GstWaveFormSinkClass * g_class)
+gst_waveform_sink_init (GstWaveFormSink * wfsink)
{
/* initialize members */
wfsink->hwaveout = NULL;
}
static GstCaps *
-gst_waveform_sink_getcaps (GstBaseSink * bsink)
+gst_waveform_sink_getcaps (GstBaseSink * bsink, GstCaps * filter)
{
GstWaveFormSink *wfsink = GST_WAVEFORM_SINK (bsink);
MMRESULT mmresult;
/* create a caps for all wave formats supported by the device
starting by the best quality format */
if (wocaps.dwFormats & WAVE_FORMAT_96S16) {
- caps_temp = gst_waveform_sink_create_caps (96000, 2, 16);
+ caps_temp = gst_waveform_sink_create_caps (96000, 2, GST_AUDIO_NE (S16));
if (caps_temp) {
gst_caps_append (caps, caps_temp);
}
}
if (wocaps.dwFormats & WAVE_FORMAT_96S08) {
- caps_temp = gst_waveform_sink_create_caps (96000, 2, 8);
+ caps_temp = gst_waveform_sink_create_caps (96000, 2, "S8");
if (caps_temp) {
gst_caps_append (caps, caps_temp);
}
}
if (wocaps.dwFormats & WAVE_FORMAT_96M16) {
- caps_temp = gst_waveform_sink_create_caps (96000, 1, 16);
+ caps_temp = gst_waveform_sink_create_caps (96000, 1, GST_AUDIO_NE (S16));
if (caps_temp) {
gst_caps_append (caps, caps_temp);
}
}
if (wocaps.dwFormats & WAVE_FORMAT_96M08) {
- caps_temp = gst_waveform_sink_create_caps (96000, 1, 8);
+ caps_temp = gst_waveform_sink_create_caps (96000, 1, "S8");
if (caps_temp) {
gst_caps_append (caps, caps_temp);
}
}
if (wocaps.dwFormats & WAVE_FORMAT_4S16) {
- caps_temp = gst_waveform_sink_create_caps (44100, 2, 16);
+ caps_temp = gst_waveform_sink_create_caps (44100, 2, GST_AUDIO_NE (S16));
if (caps_temp) {
gst_caps_append (caps, caps_temp);
}
}
if (wocaps.dwFormats & WAVE_FORMAT_4S08) {
- caps_temp = gst_waveform_sink_create_caps (44100, 2, 8);
+ caps_temp = gst_waveform_sink_create_caps (44100, 2, "S8");
if (caps_temp) {
gst_caps_append (caps, caps_temp);
}
}
if (wocaps.dwFormats & WAVE_FORMAT_4M16) {
- caps_temp = gst_waveform_sink_create_caps (44100, 1, 16);
+ caps_temp = gst_waveform_sink_create_caps (44100, 1, GST_AUDIO_NE (S16));
if (caps_temp) {
gst_caps_append (caps, caps_temp);
}
}
if (wocaps.dwFormats & WAVE_FORMAT_4M08) {
- caps_temp = gst_waveform_sink_create_caps (44100, 1, 8);
+ caps_temp = gst_waveform_sink_create_caps (44100, 1, "S8");
if (caps_temp) {
gst_caps_append (caps, caps_temp);
}
}
if (wocaps.dwFormats & WAVE_FORMAT_2S16) {
- caps_temp = gst_waveform_sink_create_caps (22050, 2, 16);
+ caps_temp = gst_waveform_sink_create_caps (22050, 2, GST_AUDIO_NE (S16));
if (caps_temp) {
gst_caps_append (caps, caps_temp);
}
}
if (wocaps.dwFormats & WAVE_FORMAT_2S08) {
- caps_temp = gst_waveform_sink_create_caps (22050, 2, 8);
+ caps_temp = gst_waveform_sink_create_caps (22050, 2, "S8");
if (caps_temp) {
gst_caps_append (caps, caps_temp);
}
}
if (wocaps.dwFormats & WAVE_FORMAT_2M16) {
- caps_temp = gst_waveform_sink_create_caps (22050, 1, 16);
+ caps_temp = gst_waveform_sink_create_caps (22050, 1, GST_AUDIO_NE (S16));
if (caps_temp) {
gst_caps_append (caps, caps_temp);
}
}
if (wocaps.dwFormats & WAVE_FORMAT_2M08) {
- caps_temp = gst_waveform_sink_create_caps (22050, 1, 8);
+ caps_temp = gst_waveform_sink_create_caps (22050, 1, "S8");
if (caps_temp) {
gst_caps_append (caps, caps_temp);
}
}
if (wocaps.dwFormats & WAVE_FORMAT_1S16) {
- caps_temp = gst_waveform_sink_create_caps (11025, 2, 16);
+ caps_temp = gst_waveform_sink_create_caps (11025, 2, GST_AUDIO_NE (S16));
if (caps_temp) {
gst_caps_append (caps, caps_temp);
}
}
if (wocaps.dwFormats & WAVE_FORMAT_1S08) {
- caps_temp = gst_waveform_sink_create_caps (11025, 2, 8);
+ caps_temp = gst_waveform_sink_create_caps (11025, 2, "S8");
if (caps_temp) {
gst_caps_append (caps, caps_temp);
}
}
if (wocaps.dwFormats & WAVE_FORMAT_1M16) {
- caps_temp = gst_waveform_sink_create_caps (11025, 1, 16);
+ caps_temp = gst_waveform_sink_create_caps (11025, 1, GST_AUDIO_NE (S16));
if (caps_temp) {
gst_caps_append (caps, caps_temp);
}
}
if (wocaps.dwFormats & WAVE_FORMAT_1M08) {
- caps_temp = gst_waveform_sink_create_caps (11025, 1, 8);
+ caps_temp = gst_waveform_sink_create_caps (11025, 1, "S8");
if (caps_temp) {
gst_caps_append (caps, caps_temp);
}
}
static gboolean
-gst_waveform_sink_prepare (GstAudioSink * asink, GstRingBufferSpec * spec)
+gst_waveform_sink_prepare (GstAudioSink * asink, GstAudioRingBufferSpec * spec)
{
GstWaveFormSink *wfsink = GST_WAVEFORM_SINK (asink);
WAVEFORMATEX wfx;
memset (&wfx, 0, sizeof (wfx));
wfx.cbSize = 0;
wfx.wFormatTag = WAVE_FORMAT_PCM;
- wfx.nChannels = spec->channels;
- wfx.nSamplesPerSec = spec->rate;
- wfx.wBitsPerSample = (spec->bytes_per_sample * 8) / wfx.nChannels;
- wfx.nBlockAlign = spec->bytes_per_sample;
+ wfx.nChannels = spec->info.channels;
+ wfx.nSamplesPerSec = spec->info.rate;
+ wfx.wBitsPerSample = (spec->info.bpf * 8) / wfx.nChannels;
+ wfx.nBlockAlign = spec->info.bpf;
wfx.nAvgBytesPerSec = wfx.nSamplesPerSec * wfx.nBlockAlign;
/* save bytes per sample to use it in delay */
- wfsink->bytes_per_sample = spec->bytes_per_sample;
+ wfsink->bytes_per_sample = spec->info.bpf;
/* open the default audio device with the given caps */
mmresult = waveOutOpen (&wfsink->hwaveout, WAVE_MAPPER,
return TRUE;
}
-static guint
+static gint
gst_waveform_sink_write (GstAudioSink * asink, gpointer data, guint length)
{
GstWaveFormSink *wfsink = GST_WAVEFORM_SINK (asink);
}
GstCaps *
-gst_waveform_sink_create_caps (gint rate, gint channels, gint bits_per_sample)
+gst_waveform_sink_create_caps (gint rate, gint channels, const gchar * format)
{
GstCaps *caps = NULL;
- caps = gst_caps_new_simple ("audio/x-raw-int",
- "width", G_TYPE_INT, bits_per_sample,
- "depth", G_TYPE_INT, bits_per_sample,
- "endianness", G_TYPE_INT, G_BYTE_ORDER,
- "signed", G_TYPE_BOOLEAN, TRUE,
+ caps = gst_caps_new_simple ("audio/x-raw",
+ "format", G_TYPE_STRING, format,
+ "layout", G_TYPE_STRING, "interleaved",
"channels", G_TYPE_INT, channels, "rate", G_TYPE_INT, rate, NULL);
return caps;
}
PROP_XNAME,
};
-GST_BOILERPLATE (GstXImageSrc, gst_ximage_src, GstPushSrc, GST_TYPE_PUSH_SRC);
+#define gst_ximage_src_parent_class parent_class
+G_DEFINE_TYPE (GstXImageSrc, gst_ximage_src, GST_TYPE_PUSH_SRC);
-static void gst_ximage_src_fixate (GstPad * pad, GstCaps * caps);
+static void gst_ximage_src_fixate (GstBaseSrc * bsrc, GstCaps * caps);
static void gst_ximage_src_clear_bufpool (GstXImageSrc * ximagesrc);
/* Called when a buffer is returned from the pipeline */
static void
-gst_ximage_src_return_buf (GstXImageSrc * ximagesrc,
- GstXImageSrcBuffer * ximage)
+gst_ximage_src_return_buf (GstXImageSrc * ximagesrc, GstBuffer * ximage)
{
+ GstMetaXImage *meta = GST_META_XIMAGE_GET (ximage);
+
/* If our geometry changed we can't reuse that image. */
- if ((ximage->width != ximagesrc->width) ||
- (ximage->height != ximagesrc->height)) {
+ if ((meta->width != ximagesrc->width) || (meta->height != ximagesrc->height)) {
GST_DEBUG_OBJECT (ximagesrc,
"destroy image %p as its size changed %dx%d vs current %dx%d",
- ximage, ximage->width, ximage->height,
- ximagesrc->width, ximagesrc->height);
- g_mutex_lock (ximagesrc->x_lock);
+ ximage, meta->width, meta->height, ximagesrc->width, ximagesrc->height);
+ g_mutex_lock (&ximagesrc->x_lock);
gst_ximageutil_ximage_destroy (ximagesrc->xcontext, ximage);
- g_mutex_unlock (ximagesrc->x_lock);
+ g_mutex_unlock (&ximagesrc->x_lock);
} else {
/* In that case we can reuse the image and add it to our image pool. */
GST_LOG_OBJECT (ximagesrc, "recycling image %p in pool", ximage);
/* need to increment the refcount again to recycle */
- gst_buffer_ref (GST_BUFFER (ximage));
- g_mutex_lock (ximagesrc->pool_lock);
+ gst_buffer_ref (ximage);
+ g_mutex_lock (&ximagesrc->pool_lock);
GST_BUFFER_FLAGS (GST_BUFFER (ximage)) = 0; /* clear out any flags from the previous use */
ximagesrc->buffer_pool = g_slist_prepend (ximagesrc->buffer_pool, ximage);
- g_mutex_unlock (ximagesrc->pool_lock);
+ g_mutex_unlock (&ximagesrc->pool_lock);
}
}
if (s->xcontext != NULL)
return TRUE;
- g_mutex_lock (s->x_lock);
+ g_mutex_lock (&s->x_lock);
s->xcontext = ximageutil_xcontext_get (GST_ELEMENT (s), name);
if (s->xcontext == NULL) {
- g_mutex_unlock (s->x_lock);
+ g_mutex_unlock (&s->x_lock);
GST_ELEMENT_ERROR (s, RESOURCE, OPEN_READ,
("Could not open X display for reading"),
("NULL returned from getting xcontext"));
#endif
#endif
- g_mutex_unlock (s->x_lock);
+ g_mutex_unlock (&s->x_lock);
if (s->xcontext == NULL)
return FALSE;
#endif
if (src->xcontext) {
- g_mutex_lock (src->x_lock);
+ g_mutex_lock (&src->x_lock);
#ifdef HAVE_XDAMAGE
if (src->damage_copy_gc != None) {
ximageutil_xcontext_clear (src->xcontext);
src->xcontext = NULL;
- g_mutex_unlock (src->x_lock);
+ g_mutex_unlock (&src->x_lock);
}
return TRUE;
}
#endif
+#ifdef HAVE_XDAMAGE
+static void
+copy_buffer (GstBuffer * dest, GstBuffer * src)
+{
+ GstMapInfo map;
+
+ gst_buffer_map (src, &map, GST_MAP_READ);
+ gst_buffer_fill (dest, 0, map.data, map.size);
+ gst_buffer_unmap (src, &map);
+}
+#endif
+
/* Retrieve an XImageSrcBuffer, preferably from our
* pool of existing images and populate it from the window */
-static GstXImageSrcBuffer *
+static GstBuffer *
gst_ximage_src_ximage_get (GstXImageSrc * ximagesrc)
{
- GstXImageSrcBuffer *ximage = NULL;
+ GstBuffer *ximage = NULL;
+ GstMetaXImage *meta;
- g_mutex_lock (ximagesrc->pool_lock);
+ g_mutex_lock (&ximagesrc->pool_lock);
while (ximagesrc->buffer_pool != NULL) {
ximage = ximagesrc->buffer_pool->data;
- if ((ximage->width != ximagesrc->width) ||
- (ximage->height != ximagesrc->height)) {
+ meta = GST_META_XIMAGE_GET (ximage);
+
+ if ((meta->width != ximagesrc->width) ||
+ (meta->height != ximagesrc->height)) {
gst_ximage_buffer_free (ximage);
}
ximagesrc->buffer_pool = g_slist_delete_link (ximagesrc->buffer_pool,
ximagesrc->buffer_pool);
}
- g_mutex_unlock (ximagesrc->pool_lock);
+ g_mutex_unlock (&ximagesrc->pool_lock);
if (ximage == NULL) {
- GstXContext *xcontext;
- GstCaps *caps = NULL;
-
GST_DEBUG_OBJECT (ximagesrc, "creating image (%dx%d)",
ximagesrc->width, ximagesrc->height);
- g_mutex_lock (ximagesrc->x_lock);
+ g_mutex_lock (&ximagesrc->x_lock);
ximage = gst_ximageutil_ximage_new (ximagesrc->xcontext,
GST_ELEMENT (ximagesrc), ximagesrc->width, ximagesrc->height,
(BufferReturnFunc) (gst_ximage_src_return_buf));
GST_ELEMENT_ERROR (ximagesrc, RESOURCE, WRITE, (NULL),
("could not create a %dx%d ximage", ximagesrc->width,
ximagesrc->height));
- g_mutex_unlock (ximagesrc->x_lock);
+ g_mutex_unlock (&ximagesrc->x_lock);
return NULL;
}
- xcontext = ximagesrc->xcontext;
-
-
- caps = gst_caps_new_simple ("video/x-raw-rgb",
- "bpp", G_TYPE_INT, xcontext->bpp,
- "depth", G_TYPE_INT, xcontext->depth,
- "endianness", G_TYPE_INT, xcontext->endianness,
- "red_mask", G_TYPE_INT, xcontext->r_mask_output,
- "green_mask", G_TYPE_INT, xcontext->g_mask_output,
- "blue_mask", G_TYPE_INT, xcontext->b_mask_output,
- "width", G_TYPE_INT, ximagesrc->width,
- "height", G_TYPE_INT, ximagesrc->height,
- "framerate", GST_TYPE_FRACTION, ximagesrc->fps_n, ximagesrc->fps_d,
- "pixel-aspect-ratio", GST_TYPE_FRACTION,
- gst_value_get_fraction_numerator (xcontext->par),
- gst_value_get_fraction_denominator (xcontext->par), NULL);
-
- gst_buffer_set_caps (GST_BUFFER (ximage), caps);
- g_mutex_unlock (ximagesrc->x_lock);
-
- gst_caps_unref (caps);
+ g_mutex_unlock (&ximagesrc->x_lock);
}
g_return_val_if_fail (GST_IS_XIMAGE_SRC (ximagesrc), NULL);
+
+ meta = GST_META_XIMAGE_GET (ximage);
+
#ifdef HAVE_XDAMAGE
if (ximagesrc->have_xdamage && ximagesrc->use_damage &&
ximagesrc->last_ximage != NULL) {
if (!have_frame) {
GST_LOG_OBJECT (ximagesrc,
- "Copying from last frame ximage->size: %d",
- GST_BUFFER_SIZE (GST_BUFFER (ximage)));
- memcpy (GST_BUFFER_DATA (GST_BUFFER (ximage)),
- GST_BUFFER_DATA (GST_BUFFER (ximagesrc->last_ximage)),
- GST_BUFFER_SIZE (GST_BUFFER (ximage)));
+ "Copying from last frame ximage->size: %" G_GSIZE_FORMAT,
+ gst_buffer_get_size (ximage));
+ copy_buffer (ximage, ximagesrc->last_ximage);
have_frame = TRUE;
}
for (i = 0; i < nrects; i++) {
startx, starty, width, height);
XGetSubImage (ximagesrc->xcontext->disp, ximagesrc->xwindow,
startx, starty, width, height, AllPlanes, ZPixmap,
- ximage->ximage, startx - ximagesrc->startx,
+ meta->ximage, startx - ximagesrc->startx,
starty - ximagesrc->starty);
}
} else {
XGetSubImage (ximagesrc->xcontext->disp, ximagesrc->xwindow,
rects[i].x, rects[i].y,
rects[i].width, rects[i].height,
- AllPlanes, ZPixmap, ximage->ximage, rects[i].x, rects[i].y);
+ AllPlanes, ZPixmap, meta->ximage, rects[i].x, rects[i].y);
}
}
free (rects);
} while (XPending (ximagesrc->xcontext->disp));
if (!have_frame) {
GST_LOG_OBJECT (ximagesrc,
- "Copying from last frame ximage->size: %d",
- GST_BUFFER_SIZE (GST_BUFFER (ximage)));
- memcpy (GST_BUFFER_DATA (GST_BUFFER (ximage)),
- GST_BUFFER_DATA (GST_BUFFER (ximagesrc->last_ximage)),
- GST_BUFFER_SIZE (GST_BUFFER (ximage)));
+ "Copying from last frame ximage->size: %" G_GSIZE_FORMAT,
+ gst_buffer_get_size (ximage));
+ copy_buffer (ximage, ximagesrc->last_ximage);
}
#ifdef HAVE_XFIXES
/* re-get area where last mouse pointer was but only if in our clipping
GST_DEBUG_OBJECT (ximagesrc, "Removing cursor from %d,%d", x, y);
XGetSubImage (ximagesrc->xcontext->disp, ximagesrc->xwindow,
startx, starty, iwidth, iheight, AllPlanes, ZPixmap,
- ximage->ximage, startx - ximagesrc->startx,
+ meta->ximage, startx - ximagesrc->startx,
starty - ximagesrc->starty);
}
} else {
GST_DEBUG_OBJECT (ximagesrc, "Removing cursor from %d,%d", x, y);
XGetSubImage (ximagesrc->xcontext->disp, ximagesrc->xwindow,
- x, y, width, height, AllPlanes, ZPixmap, ximage->ximage, x, y);
+ x, y, width, height, AllPlanes, ZPixmap, meta->ximage, x, y);
}
}
#endif
if (ximagesrc->xcontext->use_xshm) {
GST_DEBUG_OBJECT (ximagesrc, "Retrieving screen using XShm");
XShmGetImage (ximagesrc->xcontext->disp, ximagesrc->xwindow,
- ximage->ximage, ximagesrc->startx, ximagesrc->starty, AllPlanes);
+ meta->ximage, ximagesrc->startx, ximagesrc->starty, AllPlanes);
} else
#endif /* HAVE_XSHM */
if (ximagesrc->remote) {
XGetSubImage (ximagesrc->xcontext->disp, ximagesrc->xwindow,
ximagesrc->startx, ximagesrc->starty, ximagesrc->width,
- ximagesrc->height, AllPlanes, ZPixmap, ximage->ximage, 0, 0);
+ ximagesrc->height, AllPlanes, ZPixmap, meta->ximage, 0, 0);
} else {
- ximage->ximage =
+ meta->ximage =
XGetImage (ximagesrc->xcontext->disp, ximagesrc->xwindow,
ximagesrc->startx, ximagesrc->starty, ximagesrc->width,
ximagesrc->height, AllPlanes, ZPixmap);
(guint8 *) & (ximagesrc->cursor_image->pixels[((j -
cy) * ximagesrc->cursor_image->width + (i - cx))]);
dest =
- (guint8 *) & (ximage->ximage->data[((j -
+ (guint8 *) & (meta->ximage->data[((j -
ximagesrc->starty) * ximagesrc->width + (i -
ximagesrc->startx)) * (ximagesrc->xcontext->bpp /
8)]);
#ifdef HAVE_XDAMAGE
if (ximagesrc->have_xdamage && ximagesrc->use_damage) {
/* need to ref ximage to put in last_ximage */
- gst_buffer_ref (GST_BUFFER (ximage));
+ gst_buffer_ref (ximage);
if (ximagesrc->last_ximage) {
- gst_buffer_unref (GST_BUFFER (ximagesrc->last_ximage));
+ gst_buffer_unref (ximagesrc->last_ximage);
}
ximagesrc->last_ximage = ximage;
GST_LOG_OBJECT (ximagesrc, "reffing current buffer for last_ximage");
gst_ximage_src_create (GstPushSrc * bs, GstBuffer ** buf)
{
GstXImageSrc *s = GST_XIMAGE_SRC (bs);
- GstXImageSrcBuffer *image;
+ GstBuffer *image;
GstClockTime base_time;
GstClockTime next_capture_ts;
GstClockTime dur;
if (ret == GST_CLOCK_UNSCHEDULED) {
/* Got woken up by the unlock function */
GST_OBJECT_UNLOCK (s);
- return GST_FLOW_WRONG_STATE;
+ return GST_FLOW_FLUSHING;
}
/* Duration is a complete 1/fps frame duration */
dur = gst_util_uint64_scale_int (GST_SECOND, s->fps_d, s->fps_n);
if (!image)
return GST_FLOW_ERROR;
- *buf = GST_BUFFER (image);
+ *buf = image;
GST_BUFFER_TIMESTAMP (*buf) = next_capture_ts;
GST_BUFFER_DURATION (*buf) = dur;
static void
gst_ximage_src_clear_bufpool (GstXImageSrc * ximagesrc)
{
- g_mutex_lock (ximagesrc->pool_lock);
+ g_mutex_lock (&ximagesrc->pool_lock);
while (ximagesrc->buffer_pool != NULL) {
- GstXImageSrcBuffer *ximage = ximagesrc->buffer_pool->data;
+ GstBuffer *ximage = ximagesrc->buffer_pool->data;
gst_ximage_buffer_free (ximage);
ximagesrc->buffer_pool = g_slist_delete_link (ximagesrc->buffer_pool,
ximagesrc->buffer_pool);
}
- g_mutex_unlock (ximagesrc->pool_lock);
-}
-
-static void
-gst_ximage_src_base_init (gpointer g_class)
-{
- GstElementClass *ec = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_set_details_simple (ec, "Ximage video source",
- "Source/Video",
- "Creates a screenshot video stream",
- "Lutz Mueller <lutz@users.sourceforge.net>, "
- "Jan Schmidt <thaytan@mad.scientist.com>, "
- "Zaheer Merali <zaheerabbas at merali dot org>");
- gst_element_class_add_static_pad_template (ec, &t);
+ g_mutex_unlock (&ximagesrc->pool_lock);
}
static void
ximageutil_xcontext_clear (src->xcontext);
g_free (src->xname);
- g_mutex_free (src->pool_lock);
- g_mutex_free (src->x_lock);
+ g_mutex_clear (&src->pool_lock);
+ g_mutex_clear (&src->x_lock);
G_OBJECT_CLASS (parent_class)->finalize (object);
}
static GstCaps *
-gst_ximage_src_get_caps (GstBaseSrc * bs)
+gst_ximage_src_get_caps (GstBaseSrc * bs, GstCaps * filter)
{
GstXImageSrc *s = GST_XIMAGE_SRC (bs);
GstXContext *xcontext;
}
static void
-gst_ximage_src_fixate (GstPad * pad, GstCaps * caps)
+gst_ximage_src_fixate (GstBaseSrc * bsrc, GstCaps * caps)
{
gint i;
GstStructure *structure;
gst_structure_fixate_field_nearest_fraction (structure, "framerate", 25, 1);
}
+ GST_BASE_SRC_CLASS (parent_class)->fixate (bsrc, caps);
}
static void
gst_ximage_src_class_init (GstXImageSrcClass * klass)
{
GObjectClass *gc = G_OBJECT_CLASS (klass);
+ GstElementClass *ec = GST_ELEMENT_CLASS (klass);
GstBaseSrcClass *bc = GST_BASE_SRC_CLASS (klass);
GstPushSrcClass *push_class = GST_PUSH_SRC_CLASS (klass);
g_param_spec_boolean ("remote", "Remote dispay",
"Whether the display is remote", FALSE,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
-
/**
* GstXImageSrc:xid
*
g_param_spec_uint64 ("xid", "Window XID",
"Window XID to capture from", 0, G_MAXUINT64, 0,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
-
/**
* GstXImageSrc:xname
*
"Window name to capture from", NULL,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
- parent_class = g_type_class_peek_parent (klass);
+ gst_element_class_set_details_simple (ec, "Ximage video source",
+ "Source/Video",
+ "Creates a screenshot video stream",
+ "Lutz Mueller <lutz@users.sourceforge.net>, "
+ "Jan Schmidt <thaytan@mad.scientist.com>, "
+ "Zaheer Merali <zaheerabbas at merali dot org>");
+ gst_element_class_add_pad_template (ec, gst_static_pad_template_get (&t));
- push_class->create = gst_ximage_src_create;
+ bc->fixate = gst_ximage_src_fixate;
bc->get_caps = gst_ximage_src_get_caps;
bc->set_caps = gst_ximage_src_set_caps;
bc->start = gst_ximage_src_start;
bc->stop = gst_ximage_src_stop;
bc->unlock = gst_ximage_src_unlock;
+ push_class->create = gst_ximage_src_create;
}
static void
-gst_ximage_src_init (GstXImageSrc * ximagesrc, GstXImageSrcClass * klass)
+gst_ximage_src_init (GstXImageSrc * ximagesrc)
{
gst_base_src_set_format (GST_BASE_SRC (ximagesrc), GST_FORMAT_TIME);
gst_base_src_set_live (GST_BASE_SRC (ximagesrc), TRUE);
- gst_pad_set_fixatecaps_function (GST_BASE_SRC_PAD (ximagesrc),
- gst_ximage_src_fixate);
- ximagesrc->pool_lock = g_mutex_new ();
- ximagesrc->x_lock = g_mutex_new ();
+ g_mutex_init (&ximagesrc->pool_lock);
+ g_mutex_init (&ximagesrc->x_lock);
ximagesrc->show_pointer = TRUE;
ximagesrc->use_damage = TRUE;
ximagesrc->startx = 0;
gint64 last_frame_no;
/* Protect X Windows calls */
- GMutex *x_lock;
+ GMutex x_lock;
/* Gathered pool of emitted buffers */
- GMutex *pool_lock;
+ GMutex pool_lock;
GSList *buffer_pool;
/* XFixes and XDamage support */
int damage_event_base;
XserverRegion damage_region;
GC damage_copy_gc;
- GstXImageSrcBuffer *last_ximage;
+ GstBuffer *last_ximage;
#endif
};
#include "ximageutil.h"
+const GstMetaInfo *
+gst_meta_ximage_get_info (void)
+{
+ static const GstMetaInfo *meta_ximage_info = NULL;
+
+ if (meta_ximage_info == NULL) {
+ meta_ximage_info =
+ gst_meta_register ("GstMetaXImageSrc", "GstMetaXImageSrc",
+ sizeof (GstMetaXImage), (GstMetaInitFunction) NULL,
+ (GstMetaFreeFunction) NULL, (GstMetaCopyFunction) NULL,
+ (GstMetaTransformFunction) NULL);
+ }
+ return meta_ximage_info;
+}
+
#ifdef HAVE_XSHM
static gboolean error_caught = FALSE;
gst_value_get_fraction_denominator (xcontext->par));
}
-static GstBufferClass *ximagesrc_buffer_parent_class = NULL;
-
static void
-gst_ximagesrc_buffer_finalize (GstXImageSrcBuffer * ximage)
+gst_ximagesrc_buffer_dispose (GstBuffer * ximage)
{
GstElement *parent;
+ GstMetaXImage *meta;
g_return_if_fail (ximage != NULL);
- parent = ximage->parent;
+ meta = GST_META_XIMAGE_GET (ximage);
+
+ parent = meta->parent;
if (parent == NULL) {
g_warning ("XImageSrcBuffer->ximagesrc == NULL");
goto beach;
}
- if (ximage->return_func)
- ximage->return_func (parent, ximage);
+ if (meta->return_func)
+ meta->return_func (parent, ximage);
beach:
-
- GST_MINI_OBJECT_CLASS (ximagesrc_buffer_parent_class)->finalize
- (GST_MINI_OBJECT (ximage));
-
return;
}
void
-gst_ximage_buffer_free (GstXImageSrcBuffer * ximage)
+gst_ximage_buffer_free (GstBuffer * ximage)
{
- /* make sure it is not recycled */
- ximage->width = -1;
- ximage->height = -1;
- gst_buffer_unref (GST_BUFFER (ximage));
-}
+ GstMetaXImage *meta;
-static void
-gst_ximagesrc_buffer_init (GstXImageSrcBuffer * ximage_buffer, gpointer g_class)
-{
-#ifdef HAVE_XSHM
- ximage_buffer->SHMInfo.shmaddr = ((void *) -1);
- ximage_buffer->SHMInfo.shmid = -1;
-#endif
-}
-
-static void
-gst_ximagesrc_buffer_class_init (gpointer g_class, gpointer class_data)
-{
- GstMiniObjectClass *mini_object_class = GST_MINI_OBJECT_CLASS (g_class);
-
- ximagesrc_buffer_parent_class = g_type_class_peek_parent (g_class);
-
- mini_object_class->finalize = (GstMiniObjectFinalizeFunction)
- gst_ximagesrc_buffer_finalize;
-}
+ meta = GST_META_XIMAGE_GET (ximage);
-static GType
-gst_ximagesrc_buffer_get_type (void)
-{
- static GType _gst_ximagesrc_buffer_type;
-
- if (G_UNLIKELY (_gst_ximagesrc_buffer_type == 0)) {
- static const GTypeInfo ximagesrc_buffer_info = {
- sizeof (GstBufferClass),
- NULL,
- NULL,
- gst_ximagesrc_buffer_class_init,
- NULL,
- NULL,
- sizeof (GstXImageSrcBuffer),
- 0,
- (GInstanceInitFunc) gst_ximagesrc_buffer_init,
- NULL
- };
- _gst_ximagesrc_buffer_type = g_type_register_static (GST_TYPE_BUFFER,
- "GstXImageSrcBuffer", &ximagesrc_buffer_info, 0);
- }
- return _gst_ximagesrc_buffer_type;
+ /* make sure it is not recycled */
+ meta->width = -1;
+ meta->height = -1;
+ gst_buffer_unref (ximage);
}
/* This function handles GstXImageSrcBuffer creation depending on XShm availability */
-GstXImageSrcBuffer *
+GstBuffer *
gst_ximageutil_ximage_new (GstXContext * xcontext,
GstElement * parent, int width, int height, BufferReturnFunc return_func)
{
- GstXImageSrcBuffer *ximage = NULL;
+ GstBuffer *ximage = NULL;
+ GstMetaXImage *meta;
gboolean succeeded = FALSE;
- ximage =
- (GstXImageSrcBuffer *) gst_mini_object_new (GST_TYPE_XIMAGESRC_BUFFER);
+ ximage = gst_buffer_new ();
+ GST_MINI_OBJECT_CAST (ximage)->dispose =
+ (GstMiniObjectDisposeFunction) gst_ximagesrc_buffer_dispose;
- ximage->width = width;
- ximage->height = height;
+ meta = GST_META_XIMAGE_ADD (ximage);
+ meta->width = width;
+ meta->height = height;
#ifdef HAVE_XSHM
+ meta->SHMInfo.shmaddr = ((void *) -1);
+ meta->SHMInfo.shmid = -1;
+
if (xcontext->use_xshm) {
- ximage->ximage = XShmCreateImage (xcontext->disp,
+ meta->ximage = XShmCreateImage (xcontext->disp,
xcontext->visual, xcontext->depth,
- ZPixmap, NULL, &ximage->SHMInfo, ximage->width, ximage->height);
- if (!ximage->ximage) {
+ ZPixmap, NULL, &meta->SHMInfo, meta->width, meta->height);
+ if (!meta->ximage) {
GST_WARNING_OBJECT (parent,
- "could not XShmCreateImage a %dx%d image",
- ximage->width, ximage->height);
+ "could not XShmCreateImage a %dx%d image", meta->width, meta->height);
/* Retry without XShm */
xcontext->use_xshm = FALSE;
}
/* we have to use the returned bytes_per_line for our shm size */
- ximage->size = ximage->ximage->bytes_per_line * ximage->ximage->height;
- ximage->SHMInfo.shmid = shmget (IPC_PRIVATE, ximage->size,
- IPC_CREAT | 0777);
- if (ximage->SHMInfo.shmid == -1)
+ meta->size = meta->ximage->bytes_per_line * meta->ximage->height;
+ meta->SHMInfo.shmid = shmget (IPC_PRIVATE, meta->size, IPC_CREAT | 0777);
+ if (meta->SHMInfo.shmid == -1)
goto beach;
- ximage->SHMInfo.shmaddr = shmat (ximage->SHMInfo.shmid, 0, 0);
- if (ximage->SHMInfo.shmaddr == ((void *) -1))
+ meta->SHMInfo.shmaddr = shmat (meta->SHMInfo.shmid, 0, 0);
+ if (meta->SHMInfo.shmaddr == ((void *) -1))
goto beach;
/* Delete the SHM segment. It will actually go away automatically
* when we detach now */
- shmctl (ximage->SHMInfo.shmid, IPC_RMID, 0);
+ shmctl (meta->SHMInfo.shmid, IPC_RMID, 0);
- ximage->ximage->data = ximage->SHMInfo.shmaddr;
- ximage->SHMInfo.readOnly = FALSE;
+ meta->ximage->data = meta->SHMInfo.shmaddr;
+ meta->SHMInfo.readOnly = FALSE;
- if (XShmAttach (xcontext->disp, &ximage->SHMInfo) == 0)
+ if (XShmAttach (xcontext->disp, &meta->SHMInfo) == 0)
goto beach;
XSync (xcontext->disp, FALSE);
no_xshm:
#endif /* HAVE_XSHM */
{
- ximage->ximage = XCreateImage (xcontext->disp,
+ meta->ximage = XCreateImage (xcontext->disp,
xcontext->visual,
xcontext->depth,
- ZPixmap, 0, NULL, ximage->width, ximage->height, xcontext->bpp, 0);
- if (!ximage->ximage)
+ ZPixmap, 0, NULL, meta->width, meta->height, xcontext->bpp, 0);
+ if (!meta->ximage)
goto beach;
/* we have to use the returned bytes_per_line for our image size */
- ximage->size = ximage->ximage->bytes_per_line * ximage->ximage->height;
- ximage->ximage->data = g_malloc (ximage->size);
+ meta->size = meta->ximage->bytes_per_line * meta->ximage->height;
+ meta->ximage->data = g_malloc (meta->size);
XSync (xcontext->disp, FALSE);
}
succeeded = TRUE;
- GST_BUFFER_DATA (ximage) = (guchar *) ximage->ximage->data;
- GST_BUFFER_SIZE (ximage) = ximage->size;
+ gst_buffer_take_memory (ximage, -1,
+ gst_memory_new_wrapped (GST_MEMORY_FLAG_NO_SHARE, meta->ximage->data,
+ NULL, meta->size, 0, meta->size));
/* Keep a ref to our src */
- ximage->parent = gst_object_ref (parent);
- ximage->return_func = return_func;
+ meta->parent = gst_object_ref (parent);
+ meta->return_func = return_func;
beach:
if (!succeeded) {
gst_ximage_buffer_free (ximage);
/* This function destroys a GstXImageBuffer handling XShm availability */
void
-gst_ximageutil_ximage_destroy (GstXContext * xcontext,
- GstXImageSrcBuffer * ximage)
+gst_ximageutil_ximage_destroy (GstXContext * xcontext, GstBuffer * ximage)
{
+ GstMetaXImage *meta;
+
+ meta = GST_META_XIMAGE_GET (ximage);
+
/* We might have some buffers destroyed after changing state to NULL */
if (!xcontext)
goto beach;
#ifdef HAVE_XSHM
if (xcontext->use_xshm) {
- if (ximage->SHMInfo.shmaddr != ((void *) -1)) {
- XShmDetach (xcontext->disp, &ximage->SHMInfo);
+ if (meta->SHMInfo.shmaddr != ((void *) -1)) {
+ XShmDetach (xcontext->disp, &meta->SHMInfo);
XSync (xcontext->disp, 0);
- shmdt (ximage->SHMInfo.shmaddr);
+ shmdt (meta->SHMInfo.shmaddr);
}
- if (ximage->ximage)
- XDestroyImage (ximage->ximage);
+ if (meta->ximage)
+ XDestroyImage (meta->ximage);
} else
#endif /* HAVE_XSHM */
{
- if (ximage->ximage) {
- XDestroyImage (ximage->ximage);
+ if (meta->ximage) {
+ XDestroyImage (meta->ximage);
}
}
XSync (xcontext->disp, FALSE);
beach:
- if (ximage->parent) {
+ if (meta->parent) {
/* Release the ref to our parent */
- gst_object_unref (ximage->parent);
- ximage->parent = NULL;
+ gst_object_unref (meta->parent);
+ meta->parent = NULL;
}
return;
typedef struct _GstXContext GstXContext;
typedef struct _GstXWindow GstXWindow;
typedef struct _GstXImage GstXImage;
-typedef struct _GstXImageSrcBuffer GstXImageSrcBuffer;
+typedef struct _GstMetaXImage GstMetaXImage;
/* Global X Context stuff */
/**
/* custom ximagesrc buffer, copied from ximagesink */
/* BufferReturnFunc is called when a buffer is finalised */
-typedef void (*BufferReturnFunc) (GstElement *parent, GstXImageSrcBuffer *buf);
+typedef void (*BufferReturnFunc) (GstElement *parent, GstBuffer *buf);
/**
- * GstXImageSrcBuffer:
+ * GstMetaXImage:
* @parent: a reference to the element we belong to
* @ximage: the XImage of this buffer
* @width: the width in pixels of XImage @ximage
* @height: the height in pixels of XImage @ximage
* @size: the size in bytes of XImage @ximage
*
- * Subclass of #GstBuffer containing additional information about an XImage.
+ * Extra data attached to buffers containing additional information about an XImage.
*/
-struct _GstXImageSrcBuffer {
- GstBuffer buffer;
+struct _GstMetaXImage {
+ GstMeta meta;
/* Reference to the ximagesrc we belong to */
GstElement *parent;
gint width, height;
size_t size;
-
+
BufferReturnFunc return_func;
};
+const GstMetaInfo * gst_meta_ximage_get_info (void);
+#define GST_META_XIMAGE_GET(buf) ((GstMetaXImage *)gst_buffer_get_meta(buf,gst_meta_ximage_get_info()))
+#define GST_META_XIMAGE_ADD(buf) ((GstMetaXImage *)gst_buffer_add_meta(buf,gst_meta_ximage_get_info(),NULL))
-GstXImageSrcBuffer *gst_ximageutil_ximage_new (GstXContext *xcontext,
+GstBuffer *gst_ximageutil_ximage_new (GstXContext *xcontext,
GstElement *parent, int width, int height, BufferReturnFunc return_func);
void gst_ximageutil_ximage_destroy (GstXContext *xcontext,
- GstXImageSrcBuffer * ximage);
+ GstBuffer * ximage);
/* Call to manually release a buffer */
-void gst_ximage_buffer_free (GstXImageSrcBuffer *ximage);
-
-#define GST_TYPE_XIMAGESRC_BUFFER (gst_ximagesrc_buffer_get_type())
-#define GST_IS_XIMAGESRC_BUFFER(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_XIMAGESRC_BUFFER))
-#define GST_IS_XIMAGESRC_BUFFER_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_XIMAGESRC_BUFFER))
-#define GST_XIMAGESRC_BUFFER(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_XIMAGESRC_BUFFER, GstXImageSrcBuffer))
-#define GST_XIMAGESRC_BUFFER_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_XIMAGESRC_BUFFER, GstXImageSrcBufferClass))
-#define GST_XIMAGESRC_BUFFER_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_XIMAGESRC_BUFFER, GstXImageSrcBufferClass))
+void gst_ximage_buffer_free (GstBuffer *ximage);
G_END_DECLS
GST_PLUGIN_PATH=$(top_builddir)/gst:$(top_builddir)/ext:$(top_builddir)/sys:$(GSTPB_PLUGINS_DIR):$(GST_PLUGINS_DIR) \
GST_PLUGIN_LOADING_WHITELIST="gstreamer@$(GST_PLUGINS_DIR):gst-plugins-base@$(GSTPB_PLUGINS_DIR):gst-plugins-good@$(top_builddir)" \
GST_STATE_IGNORE_ELEMENTS="aasink autoaudiosrc autoaudiosink autovideosrc autovideosink \
- cacasink cairotextoverlay gconfaudiosrc gconfvideosrc gconfaudiosink gconfvideosink \
+ cacasink cairotextoverlay \
halaudiosrc halaudiosink jackaudiosrc jackaudiosink \
osssrc osssink osxaudiosink osxaudiosrc osxvideosrc osxvideosink \
pulsesink pulsesrc pulsemixer v4l2src"
check_PROGRAMS = \
- generic/index \
generic/states \
elements/aacparse \
elements/ac3parse \
elements/audiopanorama \
elements/audiowsincband \
elements/audiowsinclimit \
+ elements/autodetect \
elements/avimux \
elements/avisubtitle \
elements/capssetter \
TESTS = $(check_PROGRAMS)
-# these tests don't even pass
-# autodetect: temporarily disabled because of broken videosinks in -bad
-noinst_PROGRAMS = \
- elements/autodetect
-
AM_CFLAGS = $(GST_OBJ_CFLAGS) $(GST_CHECK_CFLAGS) $(CHECK_CFLAGS) \
- $(GST_OPTION_CFLAGS) -DGST_TEST_FILES_PATH="\"$(TEST_FILES_DIRECTORY)\"" \
+ $(GST_OPTION_CFLAGS) $(GST_CFLAGS) -DGST_TEST_FILES_PATH="\"$(TEST_FILES_DIRECTORY)\"" \
-UG_DISABLE_ASSERT -UG_DISABLE_CAST_CHECKS
LDADD = $(GST_OBJ_LIBS) $(GST_CHECK_LIBS) $(CHECK_LIBS)
libparser_la_SOURCES = elements/parser.c elements/parser.h
libparser_la_CFLAGS = \
-I$(top_srcdir)/tests/check \
- $(GST_CHECK_CFLAGS) $(GST_OPTION_CFLAGS)
+ $(GST_CHECK_CFLAGS) $(GST_OPTION_CFLAGS) -DGST_USE_UNSTABLE_API
elements_aacparse_LDADD = libparser.la $(LDADD)
elements_aspectratiocrop_LDADD = $(LDADD)
elements_aspectratiocrop_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(CFLAGS) $(AM_CFLAGS)
+elements_audioamplify_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(CFLAGS) $(AM_CFLAGS)
+elements_audioamplify_LDADD = $(GST_PLUGINS_BASE_LIBS) -lgstaudio-$(GST_MAJORMINOR) $(LDADD)
+
+elements_audiochebband_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(CFLAGS) $(AM_CFLAGS)
+elements_audiochebband_LDADD = $(GST_PLUGINS_BASE_LIBS) -lgstaudio-$(GST_MAJORMINOR) $(LDADD)
+
+elements_audiocheblimit_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(CFLAGS) $(AM_CFLAGS)
+elements_audiocheblimit_LDADD = $(GST_PLUGINS_BASE_LIBS) -lgstaudio-$(GST_MAJORMINOR) $(LDADD)
+
+elements_audiodynamic_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(CFLAGS) $(AM_CFLAGS)
+elements_audiodynamic_LDADD = $(GST_PLUGINS_BASE_LIBS) -lgstaudio-$(GST_MAJORMINOR) $(LDADD)
+
+elements_audioecho_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(CFLAGS) $(AM_CFLAGS)
+elements_audioecho_LDADD = $(GST_PLUGINS_BASE_LIBS) -lgstaudio-$(GST_MAJORMINOR) $(LDADD)
+
+elements_audioinvert_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(CFLAGS) $(AM_CFLAGS)
+elements_audioinvert_LDADD = $(GST_PLUGINS_BASE_LIBS) -lgstaudio-$(GST_MAJORMINOR) $(LDADD)
+
elements_audiopanorama_LDADD = \
- $(GST_BASE_LIBS) $(GST_CONTROLLER_LIBS) \
- $(LDADD)
+ $(GST_PLUGINS_BASE_LIBS) -lgstaudio-$(GST_MAJORMINOR) $(GST_BASE_LIBS) \
+ $(GST_CONTROLLER_LIBS) $(LDADD)
elements_audiopanorama_CFLAGS = \
- $(GST_BASE_CFLAGS) $(GST_CONTROLLER_CFLAGS) \
+ $(GST_PLUGINS_BASE_CFLAGS) $(GST_BASE_CFLAGS) $(GST_CONTROLLER_CFLAGS) \
$(CFLAGS) $(AM_CFLAGS)
+elements_audiowsincband_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(CFLAGS) $(AM_CFLAGS)
+elements_audiowsincband_LDADD = $(GST_PLUGINS_BASE_LIBS) -lgstaudio-$(GST_MAJORMINOR) $(LDADD)
+
+elements_audiowsinclimit_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(CFLAGS) $(AM_CFLAGS)
+elements_audiowsinclimit_LDADD = $(GST_PLUGINS_BASE_LIBS) -lgstaudio-$(GST_MAJORMINOR) $(LDADD)
+
+elements_equalizer_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(CFLAGS) $(AM_CFLAGS)
+elements_equalizer_LDADD = $(GST_PLUGINS_BASE_LIBS) -lgstaudio-$(GST_MAJORMINOR) $(LDADD)
+
+elements_rganalysis_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(CFLAGS) $(AM_CFLAGS)
+elements_rganalysis_LDADD = $(GST_PLUGINS_BASE_LIBS) -lgstaudio-$(GST_MAJORMINOR) $(LDADD)
+elements_rglimiter_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(CFLAGS) $(AM_CFLAGS)
+elements_rglimiter_LDADD = $(GST_PLUGINS_BASE_LIBS) -lgstaudio-$(GST_MAJORMINOR) $(LDADD)
+elements_rgvolume_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(CFLAGS) $(AM_CFLAGS)
+elements_rgvolume_LDADD = $(GST_PLUGINS_BASE_LIBS) -lgstaudio-$(GST_MAJORMINOR) $(LDADD)
+
+elements_spectrum_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(CFLAGS) $(AM_CFLAGS)
+elements_spectrum_LDADD = $(GST_PLUGINS_BASE_LIBS) -lgstaudio-$(GST_MAJORMINOR) $(LDADD)
+
elements_cmmldec_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(CFLAGS) $(AM_CFLAGS)
elements_cmmlenc_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(CFLAGS) $(AM_CFLAGS)
elements_interleave_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(CFLAGS) $(AM_CFLAGS)
elements_interleave_LDADD = $(GST_PLUGINS_BASE_LIBS) -lgstaudio-$(GST_MAJORMINOR) $(LDADD)
+elements_level_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(CFLAGS) $(AM_CFLAGS)
+elements_level_LDADD = $(GST_PLUGINS_BASE_LIBS) -lgstaudio-$(GST_MAJORMINOR) $(LDADD)
+
elements_imagefreeze_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_BASE_CFLAGS) $(AM_CFLAGS)
elements_imagefreeze_LDADD = $(GST_PLUGINS_BASE_LIBS) -lgstvideo-$(GST_MAJORMINOR) $(GST_BASE_LIBS) $(LDADD)
elements_jpegenc_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_BASE_CFLAGS) $(AM_CFLAGS)
-elements_jpegenc_LDADD = $(GST_PLUGINS_BASE_LIBS) -lgstapp-0.10 $(GST_BASE_LIBS) $(LDADD)
+elements_jpegenc_LDADD = $(GST_PLUGINS_BASE_LIBS) -lgstapp-$(GST_MAJORMINOR) $(GST_BASE_LIBS) $(LDADD)
elements_level_LDADD = $(LDADD) $(LIBM)
elements_rtpbin_buffer_list_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_CFLAGS) \
$(WARNING_CFLAGS) $(ERROR_CFLAGS) $(GST_CHECK_CFLAGS) $(AM_CFLAGS)
elements_rtpbin_buffer_list_LDADD = $(GST_PLUGINS_BASE_LIBS) \
- -lgstnetbuffer-@GST_MAJORMINOR@ -lgstrtp-@GST_MAJORMINOR@ \
+ -lgstrtp-@GST_MAJORMINOR@ \
$(GST_BASE_LIBS) $(GST_LIBS) $(GST_CHECK_LIBS)
elements_rtpbin_buffer_list_SOURCES = elements/rtpbin_buffer_list.c
elements_udpsrc_CFLAGS = $(AM_CFLAGS) $(GIO_CFLAGS)
elements_udpsrc_LDADD = $(LDADD) $(GIO_LIBS)
-elements_videocrop_LDADD = $(GST_BASE_LIBS) $(LDADD)
-elements_videocrop_CFLAGS = $(GST_BASE_CFLAGS) $(CFLAGS) $(AM_CFLAGS)
+elements_videocrop_LDADD = $(GST_PLUGINS_BASE_LIBS) $(GST_BASE_LIBS) -lgstvideo-$(GST_MAJORMINOR) $(LDADD)
+elements_videocrop_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_BASE_CFLAGS) $(CFLAGS) $(AM_CFLAGS)
elements_videofilter_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(CFLAGS) $(AM_CFLAGS)
elements_videofilter_LDADD = $(GST_PLUGINS_BASE_LIBS) -lgstvideo-$(GST_MAJORMINOR) $(LDADD)
$(LDADD) $(GDK_PIXBUF_LIBS)
+pipelines_flacdec_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(CFLAGS) $(AM_CFLAGS)
+pipelines_flacdec_LDADD = $(GST_PLUGINS_BASE_LIBS) -lgstaudio-$(GST_MAJORMINOR) $(LDADD)
+
pipelines_wavenc_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(CFLAGS) $(AM_CFLAGS)
pipelines_wavenc_LDADD = $(GST_PLUGINS_BASE_LIBS) -lgstaudio-$(GST_MAJORMINOR) $(LDADD)
static GstStaticPadTemplate sinktemplate = GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("AYUV"))
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("AYUV"))
);
static GstStaticPadTemplate srctemplate = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS (GST_VIDEO_CAPS_RGBA ";" GST_VIDEO_CAPS_RGB)
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ RGBA, RGB }"))
);
static GstElement *
GstElement *alphacolor;
alphacolor = gst_check_setup_element ("alphacolor");
- mysrcpad = gst_check_setup_src_pad (alphacolor, &srctemplate, NULL);
- mysinkpad = gst_check_setup_sink_pad (alphacolor, &sinktemplate, NULL);
+ mysrcpad = gst_check_setup_src_pad (alphacolor, &srctemplate);
+ mysinkpad = gst_check_setup_sink_pad (alphacolor, &sinktemplate);
gst_pad_set_active (mysrcpad, TRUE);
gst_pad_set_active (mysinkpad, TRUE);
{
GstCaps *caps;
- caps = gst_caps_new_simple ("video/x-raw-rgb",
+ caps = gst_caps_new_simple ("video/x-raw",
"width", G_TYPE_INT, 3,
"height", G_TYPE_INT, 4,
- "bpp", G_TYPE_INT, 24,
- "depth", G_TYPE_INT, 24,
"framerate", GST_TYPE_FRACTION, 0, 1,
- "endianness", G_TYPE_INT, G_BIG_ENDIAN,
- "red_mask", G_TYPE_INT, 0x00ff0000,
- "green_mask", G_TYPE_INT, 0x0000ff00,
- "blue_mask", G_TYPE_INT, 0x000000ff, NULL);
+ "format", G_TYPE_STRING, "RGB", NULL);
return caps;
}
{
GstCaps *caps;
- caps = gst_caps_new_simple ("video/x-raw-rgb",
+ caps = gst_caps_new_simple ("video/x-raw",
"width", G_TYPE_INT, 3,
"height", G_TYPE_INT, 4,
- "bpp", G_TYPE_INT, 32,
- "depth", G_TYPE_INT, 32,
"framerate", GST_TYPE_FRACTION, 0, 1,
- "endianness", G_TYPE_INT, G_BIG_ENDIAN,
- "red_mask", G_TYPE_INT, 0xff000000,
- "green_mask", G_TYPE_INT, 0x00ff0000,
- "blue_mask", G_TYPE_INT, 0x0000ff00,
- "alpha_mask", G_TYPE_INT, 0x000000ff, NULL);
+ "format", G_TYPE_STRING, "RGBA", NULL);
return caps;
}
+static void
+push_caps (GstCaps * caps)
+{
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
+ gst_caps_unref (caps);
+}
+
static GstBuffer *
create_buffer_rgb24_3x4 (void)
{
};
guint rowstride = GST_ROUND_UP_4 (WIDTH * 3);
GstBuffer *buf;
- GstCaps *caps;
+ GstMapInfo info;
buf = gst_buffer_new_and_alloc (HEIGHT * rowstride);
- fail_unless_equals_int (GST_BUFFER_SIZE (buf), sizeof (rgb24_3x4_img));
- memcpy (GST_BUFFER_DATA (buf), rgb24_3x4_img, sizeof (rgb24_3x4_img));
+ gst_buffer_map (buf, &info, GST_MAP_READWRITE);
+ fail_unless_equals_int (info.size, sizeof (rgb24_3x4_img));
+ memcpy (info.data, rgb24_3x4_img, sizeof (rgb24_3x4_img));
- caps = create_caps_rgb24 ();
- gst_buffer_set_caps (buf, caps);
- gst_caps_unref (caps);
+ gst_buffer_unmap (buf, &info);
return buf;
}
};
guint rowstride = WIDTH * 4;
GstBuffer *buf;
- GstCaps *caps;
+ GstMapInfo map;
buf = gst_buffer_new_and_alloc (HEIGHT * rowstride);
- fail_unless_equals_int (GST_BUFFER_SIZE (buf), sizeof (rgba32_3x4_img));
- memcpy (GST_BUFFER_DATA (buf), rgba32_3x4_img, sizeof (rgba32_3x4_img));
+ gst_buffer_map (buf, &map, GST_MAP_READWRITE);
+ fail_unless_equals_int (map.size, sizeof (rgba32_3x4_img));
+ memcpy (map.data, rgba32_3x4_img, sizeof (rgba32_3x4_img));
- caps = create_caps_rgba32 ();
- gst_buffer_set_caps (buf, caps);
- gst_caps_unref (caps);
+ gst_buffer_unmap (buf, &map);
return buf;
}
fail_unless_equals_int (gst_element_set_state (alphacolor, GST_STATE_PLAYING),
GST_STATE_CHANGE_SUCCESS);
+ push_caps (create_caps_rgb24 ());
+
inbuffer = create_buffer_rgb24_3x4 ();
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
GstCaps *incaps;
guint8 *ayuv;
guint outlength;
+ GstMapInfo map;
incaps = create_caps_rgba32 ();
alphacolor = setup_alphacolor ();
fail_unless_equals_int (gst_element_set_state (alphacolor, GST_STATE_PLAYING),
GST_STATE_CHANGE_SUCCESS);
+ push_caps (create_caps_rgba32 ());
+
inbuffer = create_buffer_rgba32_3x4 ();
- GST_DEBUG ("Created buffer of %d bytes", GST_BUFFER_SIZE (inbuffer));
+ GST_DEBUG ("Created buffer of %d bytes", gst_buffer_get_size (inbuffer));
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
/* pushing gives away reference */
ASSERT_BUFFER_REFCOUNT (outbuffer, "outbuffer", 1);
outlength = WIDTH * HEIGHT * 4; /* output is AYUV */
- fail_unless_equals_int (GST_BUFFER_SIZE (outbuffer), outlength);
+ gst_buffer_map (outbuffer, &map, GST_MAP_READ);
+ fail_unless_equals_int (map.size, outlength);
- ayuv = GST_BUFFER_DATA (outbuffer);
+ ayuv = map.data;
/* check alpha values (0x00 = totally transparent, 0xff = totally opaque) */
fail_unless_ayuv_pixel_has_alpha (ayuv, 0, 0, 0xff);
/* we don't check the YUV data, because apparently results differ slightly
* depending on whether we run in valgrind or not */
+ gst_buffer_unmap (outbuffer, &map);
+
buffers = g_list_remove (buffers, outbuffer);
gst_buffer_unref (outbuffer);
{
GstTagList *tags;
- tags = gst_tag_list_new ();
+ tags = gst_tag_list_new_empty ();
if (mask & (1 << 0)) {
gst_tag_list_add (tags, GST_TAG_MERGE_KEEP,
fill_mp3_buffer (GstElement * fakesrc, GstBuffer * buf, GstPad * pad,
guint64 * p_offset)
{
- fail_unless (GST_BUFFER_SIZE (buf) == MP3_FRAME_SIZE);
+ gsize size;
+
+ size = gst_buffer_get_size (buf);
+
+ fail_unless (size == MP3_FRAME_SIZE);
GST_LOG ("filling buffer with fake mp3 data, offset = %" G_GUINT64_FORMAT,
*p_offset);
- memcpy (GST_BUFFER_DATA (buf), mp3_dummyhdr, sizeof (mp3_dummyhdr));
+ gst_buffer_fill (buf, 0, mp3_dummyhdr, sizeof (mp3_dummyhdr));
+#if 0
/* can't use gst_buffer_set_caps() here because the metadata isn't writable
* because of the extra refcounts taken by the signal emission mechanism;
* we know it's fine to use GST_BUFFER_CAPS() here though */
GST_BUFFER_CAPS (buf) = gst_caps_new_simple ("audio/mpeg", "mpegversion",
G_TYPE_INT, 1, "layer", G_TYPE_INT, 3, NULL);
+#endif
GST_BUFFER_OFFSET (buf) = *p_offset;
- *p_offset += GST_BUFFER_SIZE (buf);
+ *p_offset += size;
}
static void
GstBuffer ** p_buf)
{
gint64 off;
- guint size;
+ GstMapInfo map;
off = GST_BUFFER_OFFSET (buf);
- size = GST_BUFFER_SIZE (buf);
- GST_LOG ("got buffer, size=%u, offset=%" G_GINT64_FORMAT, size, off);
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+
+ GST_LOG ("got buffer, size=%u, offset=%" G_GINT64_FORMAT, map.size, off);
fail_unless (GST_BUFFER_OFFSET_IS_VALID (buf));
- if (*p_buf == NULL || (off + size) > GST_BUFFER_SIZE (*p_buf)) {
+ if (*p_buf == NULL || (off + map.size) > gst_buffer_get_size (*p_buf)) {
GstBuffer *newbuf;
/* not very elegant, but who cares */
- newbuf = gst_buffer_new_and_alloc (off + size);
+ newbuf = gst_buffer_new_and_alloc (off + map.size);
if (*p_buf) {
- memcpy (GST_BUFFER_DATA (newbuf), GST_BUFFER_DATA (*p_buf),
- GST_BUFFER_SIZE (*p_buf));
+ GstMapInfo pmap;
+
+ gst_buffer_map (*p_buf, &pmap, GST_MAP_READ);
+ gst_buffer_fill (newbuf, 0, pmap.data, pmap.size);
+ gst_buffer_unmap (*p_buf, &pmap);
}
- memcpy (GST_BUFFER_DATA (newbuf) + off, GST_BUFFER_DATA (buf), size);
+ gst_buffer_fill (newbuf, off, map.data, map.size);
if (*p_buf)
gst_buffer_unref (*p_buf);
*p_buf = newbuf;
} else {
- memcpy (GST_BUFFER_DATA (*p_buf) + off, GST_BUFFER_DATA (buf), size);
+ gst_buffer_fill (*p_buf, off, map.data, map.size);
}
-}
-
-static void
-demux_pad_added (GstElement * apedemux, GstPad * srcpad, GstBuffer ** p_outbuf)
-{
- GstElement *fakesink, *pipeline;
-
- GST_LOG ("apedemux added source pad with caps %" GST_PTR_FORMAT,
- GST_PAD_CAPS (srcpad));
-
- pipeline = apedemux;
- while (GST_OBJECT_PARENT (pipeline) != NULL)
- pipeline = (GstElement *) GST_OBJECT_PARENT (pipeline);
-
- fakesink = gst_element_factory_make ("fakesink", "fakesink");
- g_assert (fakesink != NULL);
-
- /* set up sink */
- g_object_set (fakesink, "signal-handoffs", TRUE, NULL);
- g_signal_connect (fakesink, "handoff", G_CALLBACK (got_buffer), p_outbuf);
-
- gst_bin_add (GST_BIN (pipeline), fakesink);
- gst_element_set_state (fakesink, GST_STATE_PLAYING);
-
- fail_unless (gst_element_link (apedemux, fakesink));
+ gst_buffer_unmap (buf, &map);
}
static void
test_taglib_apev2mux_check_output_buffer (GstBuffer * buf)
{
- guint8 *data = GST_BUFFER_DATA (buf);
- guint size = GST_BUFFER_SIZE (buf);
+ GstMapInfo map;
guint off;
- g_assert (size % MP3_FRAME_SIZE == 0);
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ g_assert (map.size % MP3_FRAME_SIZE == 0);
- for (off = 0; off < size; off += MP3_FRAME_SIZE) {
- fail_unless (memcmp (data + off, mp3_dummyhdr, sizeof (mp3_dummyhdr)) == 0);
+ for (off = 0; off < map.size; off += MP3_FRAME_SIZE) {
+ fail_unless (memcmp (map.data + off, mp3_dummyhdr,
+ sizeof (mp3_dummyhdr)) == 0);
}
+ gst_buffer_unmap (buf, &map);
}
static void
{
GstMessage *msg;
GstTagList *tags_read = NULL;
- GstElement *pipeline, *apev2mux, *apedemux, *fakesrc;
+ GstElement *pipeline, *apev2mux, *apedemux, *fakesrc, *fakesink;
GstBus *bus;
guint64 offset;
GstBuffer *outbuf = NULL;
apedemux = gst_element_factory_make ("apedemux", "apedemux");
g_assert (apedemux != NULL);
+ fakesink = gst_element_factory_make ("fakesink", "fakesink");
+ g_assert (fakesink != NULL);
+
+ /* set up sink */
outbuf = NULL;
- g_signal_connect (apedemux, "pad-added",
- G_CALLBACK (demux_pad_added), &outbuf);
+ g_object_set (fakesink, "signal-handoffs", TRUE, NULL);
+ g_signal_connect (fakesink, "handoff", G_CALLBACK (got_buffer), &outbuf);
gst_bin_add (GST_BIN (pipeline), fakesrc);
gst_bin_add (GST_BIN (pipeline), apev2mux);
gst_bin_add (GST_BIN (pipeline), apedemux);
+ gst_bin_add (GST_BIN (pipeline), fakesink);
gst_tag_setter_merge_tags (GST_TAG_SETTER (apev2mux), tags,
GST_TAG_MERGE_APPEND);
- gst_element_link_many (fakesrc, apev2mux, apedemux, NULL);
+ gst_element_link_many (fakesrc, apev2mux, apedemux, fakesink, NULL);
/* set up source */
g_object_set (fakesrc, "signal-handoffs", TRUE, "can-activate-pull", FALSE,
#include <gst/video/video.h>
#include <gst/check/gstcheck.h>
-#define ASPECT_RATIO_CROP_CAPS \
- GST_VIDEO_CAPS_RGBx ";" \
- GST_VIDEO_CAPS_xRGB ";" \
- GST_VIDEO_CAPS_BGRx ";" \
- GST_VIDEO_CAPS_xBGR ";" \
- GST_VIDEO_CAPS_RGBA ";" \
- GST_VIDEO_CAPS_ARGB ";" \
- GST_VIDEO_CAPS_BGRA ";" \
- GST_VIDEO_CAPS_ABGR ";" \
- GST_VIDEO_CAPS_RGB ";" \
- GST_VIDEO_CAPS_BGR ";" \
- GST_VIDEO_CAPS_YUV ("AYUV") ";" \
- GST_VIDEO_CAPS_YUV ("YUY2") ";" \
- GST_VIDEO_CAPS_YUV ("YVYU") ";" \
- GST_VIDEO_CAPS_YUV ("UYVY") ";" \
- GST_VIDEO_CAPS_YUV ("Y800") ";" \
- GST_VIDEO_CAPS_YUV ("I420") ";" \
- GST_VIDEO_CAPS_YUV ("YV12") ";" \
- GST_VIDEO_CAPS_RGB_16 ";" \
- GST_VIDEO_CAPS_RGB_15
-
-static GstBuffer *
-make_buffer_with_caps (const gchar * caps_string, int buffer_size)
-{
- GstCaps *caps;
- GstBuffer *temp;
-
- caps = gst_caps_from_string (caps_string);
- temp = gst_buffer_new_and_alloc (buffer_size);
- fail_if (caps == NULL);
- fail_if (temp == NULL);
- gst_buffer_set_caps (temp, caps);
- gst_caps_unref (caps);
-
- return temp;
-}
+#define ASPECT_RATIO_CROP_CAPS \
+ GST_VIDEO_CAPS_MAKE ("{ RGBx, xRGB, BGRx, xBGR, " \
+ "RGBA, ARGB, BGRA, ABGR, RGB, BGR, AYUV, " \
+ "YUY2, YVYU, UYVY, Y800, I420, YV12, RGB16, " \
+ "RGB15 }")
+
+static GstStaticPadTemplate sinktemplate = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (ASPECT_RATIO_CROP_CAPS)
+ );
static void
check_aspectratiocrop (const gchar * in_string, const gchar * out_string,
GstBuffer *new;
GstBuffer *buffer;
GstBuffer *buffer_out;
- GstCaps *sink_caps;
+ GstCaps *incaps;
+ GstCaps *outcaps;
- buffer = make_buffer_with_caps (in_string, in_size);
- buffer_out = make_buffer_with_caps (out_string, out_size);
+ incaps = gst_caps_from_string (in_string);
+ buffer = gst_buffer_new_and_alloc (in_size);
+ outcaps = gst_caps_from_string (out_string);
+ buffer_out = gst_buffer_new_and_alloc (out_size);
/* check that there are no buffers waiting */
gst_check_drop_buffers ();
/* create the src pad */
src_pad = gst_pad_new (NULL, GST_PAD_SRC);
- gst_pad_set_caps (src_pad, GST_BUFFER_CAPS (buffer));
+ gst_pad_set_active (src_pad, TRUE);
+ GST_DEBUG ("setting caps %s %" GST_PTR_FORMAT, in_string, incaps);
+ fail_unless (gst_pad_set_caps (src_pad, incaps),
+ "Couldn't set input caps %" GST_PTR_FORMAT, incaps);
+
pad_peer = gst_element_get_static_pad (element, "sink");
fail_if (pad_peer == NULL);
fail_unless (gst_pad_link (src_pad, pad_peer) == GST_PAD_LINK_OK,
"Could not link source and %s sink pads", GST_ELEMENT_NAME (element));
gst_object_unref (pad_peer);
- gst_pad_set_active (src_pad, TRUE);
/* create the sink pad */
pad_peer = gst_element_get_static_pad (element, "src");
- sink_caps = gst_caps_from_string (ASPECT_RATIO_CROP_CAPS);
- sink_pad = gst_pad_new (NULL, GST_PAD_SINK);
- GST_PAD_CAPS (sink_pad) = sink_caps;
+ sink_pad = gst_pad_new_from_static_template (&sinktemplate, "sink");
fail_unless (gst_pad_link (pad_peer, sink_pad) == GST_PAD_LINK_OK,
"Could not link sink and %s source pads", GST_ELEMENT_NAME (element));
gst_object_unref (pad_peer);
fail_unless (gst_element_set_state (element,
GST_STATE_PLAYING) == GST_STATE_CHANGE_SUCCESS,
"could not set to playing");
+
fail_unless (gst_pad_push (src_pad, buffer) == GST_FLOW_OK,
"Failed to push buffer");
fail_unless (gst_element_set_state (element,
fail_unless (g_list_length (buffers) == 1);
new = GST_BUFFER (buffers->data);
buffers = g_list_remove (buffers, new);
- fail_unless (GST_BUFFER_SIZE (buffer_out) == GST_BUFFER_SIZE (new),
+ fail_unless (gst_buffer_get_size (buffer_out) == gst_buffer_get_size (new),
"size of the buffers are not the same");
- gst_check_caps_equal (GST_BUFFER_CAPS (buffer_out), GST_BUFFER_CAPS (new));
+ {
+ GstCaps *sinkpad_caps;
+
+ sinkpad_caps = gst_pad_get_current_caps (sink_pad);
+
+ gst_check_caps_equal (sinkpad_caps, outcaps);
+
+ gst_caps_unref (sinkpad_caps);
+ }
gst_buffer_unref (new);
gst_buffer_unref (buffer_out);
+ gst_caps_unref (outcaps);
+ gst_caps_unref (incaps);
/* teardown the element and pads */
gst_pad_set_active (src_pad, FALSE);
GST_START_TEST (test_no_cropping)
{
check_aspectratiocrop
- ("video/x-raw-yuv, format=(fourcc)YUY2, width=(int)320, height=(int)240, framerate=(fraction)30/1",
- "video/x-raw-yuv, format=(fourcc)YUY2, width=(int)320, height=(int)240, framerate=(fraction)30/1",
+ ("video/x-raw, format=(string)YUY2, width=(int)320, height=(int)240, framerate=(fraction)30/1",
+ "video/x-raw, format=(string)YUY2, width=(int)320, height=(int)240, framerate=(fraction)30/1",
153600, 153600, 4, 3);
check_aspectratiocrop
- ("video/x-raw-yuv, format=(fourcc)YUY2, width=(int)320, height=(int)320, framerate=(fraction)30/1, pixel-aspect-ratio=(fraction)4/3",
- "video/x-raw-yuv, format=(fourcc)YUY2, width=(int)320, height=(int)320, framerate=(fraction)30/1, pixel-aspect-ratio=(fraction)4/3",
+ ("video/x-raw, format=(string)YUY2, width=(int)320, height=(int)320, framerate=(fraction)30/1, pixel-aspect-ratio=(fraction)4/3",
+ "video/x-raw, format=(string)YUY2, width=(int)320, height=(int)320, framerate=(fraction)30/1, pixel-aspect-ratio=(fraction)4/3",
204800, 204800, 4, 3);
}
GST_START_TEST (test_autocropping)
{
check_aspectratiocrop
- ("video/x-raw-yuv, format=(fourcc)YUY2, width=(int)320, height=(int)240, framerate=(fraction)30/1, pixel-aspect-ratio=(fraction)4/3",
- "video/x-raw-yuv, format=(fourcc)YUY2, width=(int)240, height=(int)240, framerate=(fraction)30/1, pixel-aspect-ratio=(fraction)4/3",
+ ("video/x-raw, format=(string)YUY2, width=(int)320, height=(int)240, framerate=(fraction)30/1, pixel-aspect-ratio=(fraction)4/3",
+ "video/x-raw, format=(string)YUY2, width=(int)240, height=(int)240, framerate=(fraction)30/1, pixel-aspect-ratio=(fraction)4/3",
153600, 115200, 4, 3);
check_aspectratiocrop
- ("video/x-raw-yuv, format=(fourcc)YUY2, width=(int)320, height=(int)240, framerate=(fraction)30/1, pixel-aspect-ratio=(fraction)16/9",
- "video/x-raw-yuv, format=(fourcc)YUY2, width=(int)180, height=(int)240, framerate=(fraction)30/1, pixel-aspect-ratio=(fraction)16/9",
+ ("video/x-raw, format=(string)YUY2, width=(int)320, height=(int)240, framerate=(fraction)30/1, pixel-aspect-ratio=(fraction)16/9",
+ "video/x-raw, format=(string)YUY2, width=(int)180, height=(int)240, framerate=(fraction)30/1, pixel-aspect-ratio=(fraction)16/9",
153600, 86400, 4, 3);
check_aspectratiocrop
- ("video/x-raw-yuv, format=(fourcc)YUY2, width=(int)320, height=(int)240, framerate=(fraction)30/1, pixel-aspect-ratio=(fraction)16/15",
- "video/x-raw-yuv, format=(fourcc)YUY2, width=(int)320, height=(int)192, framerate=(fraction)30/1, pixel-aspect-ratio=(fraction)16/15",
+ ("video/x-raw, format=(string)YUY2, width=(int)320, height=(int)240, framerate=(fraction)30/1, pixel-aspect-ratio=(fraction)16/15",
+ "video/x-raw, format=(string)YUY2, width=(int)320, height=(int)192, framerate=(fraction)30/1, pixel-aspect-ratio=(fraction)16/15",
153600, 122880, 16, 9);
}
#include <gst/base/gstbasetransform.h>
#include <gst/check/gstcheck.h>
+#include <gst/audio/audio.h>
gboolean have_eos = FALSE;
#define AMPLIFY_CAPS_STRING \
- "audio/x-raw-int, " \
+ "audio/x-raw, " \
"channels = (int) 1, " \
"rate = (int) 44100, " \
- "endianness = (int) BYTE_ORDER, " \
- "width = (int) 16, " \
- "depth = (int) 16, " \
- "signed = (bool) TRUE"
+ "layout = (string) interleaved, " \
+ "format = (string) " GST_AUDIO_NE(S16)
static GstStaticPadTemplate sinktemplate = GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("audio/x-raw-int, "
+ GST_STATIC_CAPS ("audio/x-raw, "
"channels = (int) 1, "
"rate = (int) [ 1, MAX ], "
- "endianness = (int) BYTE_ORDER, "
- "width = (int) 16, " "depth = (int) 16, " "signed = (bool) TRUE")
- );
+ "layout = (string) interleaved, "
+ "format = (string) " GST_AUDIO_NE (S16)));
static GstStaticPadTemplate srctemplate = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("audio/x-raw-int, "
+ GST_STATIC_CAPS ("audio/x-raw, "
"channels = (int) 1, "
"rate = (int) [ 1, MAX ], "
- "endianness = (int) BYTE_ORDER, "
- "width = (int) 16, " "depth = (int) 16, " "signed = (bool) TRUE")
- );
+ "layout = (string) interleaved, "
+ "format = (string) " GST_AUDIO_NE (S16)));
static GstElement *
setup_amplify (void)
GST_DEBUG ("setup_amplify");
amplify = gst_check_setup_element ("audioamplify");
- mysrcpad = gst_check_setup_src_pad (amplify, &srctemplate, NULL);
- mysinkpad = gst_check_setup_sink_pad (amplify, &sinktemplate, NULL);
+ mysrcpad = gst_check_setup_src_pad (amplify, &srctemplate);
+ mysinkpad = gst_check_setup_sink_pad (amplify, &sinktemplate);
gst_pad_set_active (mysrcpad, TRUE);
gst_pad_set_active (mysinkpad, TRUE);
GstBuffer *inbuffer, *outbuffer;
GstCaps *caps;
gint16 in[6] = { 24576, -16384, 256, -128, 0, -24576 };
- gint16 *res;
+ gint16 res[6];
amplify = setup_amplify ();
fail_unless (gst_element_set_state (amplify,
GST_STATE_PLAYING) == GST_STATE_CHANGE_SUCCESS,
"could not set to playing");
- inbuffer = gst_buffer_new_and_alloc (12);
- memcpy (GST_BUFFER_DATA (inbuffer), in, 12);
- fail_unless (memcmp (GST_BUFFER_DATA (inbuffer), in, 12) == 0);
caps = gst_caps_from_string (AMPLIFY_CAPS_STRING);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
+
+ inbuffer = gst_buffer_new_wrapped_full (in, NULL, 0, 12);
+ fail_unless (gst_buffer_memcmp (inbuffer, 0, in, 12) == 0);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
/* pushing gives away my reference ... */
fail_unless_equals_int (g_list_length (buffers), 1);
fail_if ((outbuffer = (GstBuffer *) buffers->data) == NULL);
- res = (gint16 *) GST_BUFFER_DATA (outbuffer);
+ fail_unless (gst_buffer_extract (outbuffer, 0, res, 12) == 12);
GST_INFO
("expected %+5d %+5d %+5d %+5d %+5d %+5d real %+5d %+5d %+5d %+5d %+5d %+5d",
in[0], in[1], in[2], in[3], in[4], in[5], res[0], res[1], res[2], res[3],
res[4], res[5]);
- fail_unless (memcmp (GST_BUFFER_DATA (outbuffer), in, 12) == 0);
+ fail_unless (gst_buffer_memcmp (outbuffer, 0, in, 12) == 0);
/* cleanup */
cleanup_amplify (amplify);
GstCaps *caps;
gint16 in[6] = { 24576, -16384, 256, -128, 0, -24576 };
gint16 out[6] = { 0, 0, 0, 0, 0, 0 };
- gint16 *res;
+ gint16 res[6];
amplify = setup_amplify ();
g_object_set (G_OBJECT (amplify), "amplification", 0.0, NULL);
GST_STATE_PLAYING) == GST_STATE_CHANGE_SUCCESS,
"could not set to playing");
- inbuffer = gst_buffer_new_and_alloc (12);
- memcpy (GST_BUFFER_DATA (inbuffer), in, 12);
- fail_unless (memcmp (GST_BUFFER_DATA (inbuffer), in, 12) == 0);
caps = gst_caps_from_string (AMPLIFY_CAPS_STRING);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
+
+ inbuffer = gst_buffer_new_wrapped_full (in, NULL, 0, 12);
+ fail_unless (gst_buffer_memcmp (inbuffer, 0, in, 12) == 0);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
/* pushing gives away my reference ... */
fail_unless_equals_int (g_list_length (buffers), 1);
fail_if ((outbuffer = (GstBuffer *) buffers->data) == NULL);
- res = (gint16 *) GST_BUFFER_DATA (outbuffer);
+ fail_unless (gst_buffer_extract (outbuffer, 0, res, 12) == 12);
GST_INFO
("expected %+5d %+5d %+5d %+5d %+5d %+5d real %+5d %+5d %+5d %+5d %+5d %+5d",
out[0], out[1], out[2], out[3], out[4], out[5], res[0], res[1], res[2],
res[3], res[4], res[5]);
- fail_unless (memcmp (GST_BUFFER_DATA (outbuffer), out, 12) == 0);
+ fail_unless (gst_buffer_memcmp (outbuffer, 0, out, 12) == 0);
/* cleanup */
cleanup_amplify (amplify);
GstCaps *caps;
gint16 in[6] = { 24576, -16384, 256, -128, 0, -24576 };
gint16 out[6] = { 12288, -8192, 128, -64, 0, -12288 };
- gint16 *res;
+ gint16 res[6];
amplify = setup_amplify ();
g_object_set (G_OBJECT (amplify), "amplification", 0.5, NULL);
GST_STATE_PLAYING) == GST_STATE_CHANGE_SUCCESS,
"could not set to playing");
- inbuffer = gst_buffer_new_and_alloc (12);
- memcpy (GST_BUFFER_DATA (inbuffer), in, 12);
- fail_unless (memcmp (GST_BUFFER_DATA (inbuffer), in, 12) == 0);
caps = gst_caps_from_string (AMPLIFY_CAPS_STRING);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
+
+ inbuffer = gst_buffer_new_wrapped_full (in, NULL, 0, 12);
+ fail_unless (gst_buffer_memcmp (inbuffer, 0, in, 12) == 0);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
/* pushing gives away my reference ... */
fail_unless_equals_int (g_list_length (buffers), 1);
fail_if ((outbuffer = (GstBuffer *) buffers->data) == NULL);
- res = (gint16 *) GST_BUFFER_DATA (outbuffer);
+ fail_unless (gst_buffer_extract (outbuffer, 0, res, 12) == 12);
GST_INFO
("expected %+5d %+5d %+5d %+5d %+5d %+5d real %+5d %+5d %+5d %+5d %+5d %+5d",
out[0], out[1], out[2], out[3], out[4], out[5], res[0], res[1], res[2],
res[3], res[4], res[5]);
- fail_unless (memcmp (GST_BUFFER_DATA (outbuffer), out, 12) == 0);
+ fail_unless (gst_buffer_memcmp (outbuffer, 0, out, 12) == 0);
/* cleanup */
cleanup_amplify (amplify);
GstCaps *caps;
gint16 in[6] = { 24576, -16384, 256, -128, 0, -24576 };
gint16 out[6] = { G_MAXINT16, -32768, 512, -256, 0, G_MININT16 };
- gint16 *res;
+ gint16 res[6];
amplify = setup_amplify ();
g_object_set (G_OBJECT (amplify), "amplification", 2.0, NULL);
GST_STATE_PLAYING) == GST_STATE_CHANGE_SUCCESS,
"could not set to playing");
- inbuffer = gst_buffer_new_and_alloc (12);
- memcpy (GST_BUFFER_DATA (inbuffer), in, 12);
- fail_unless (memcmp (GST_BUFFER_DATA (inbuffer), in, 12) == 0);
caps = gst_caps_from_string (AMPLIFY_CAPS_STRING);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
+
+ inbuffer = gst_buffer_new_wrapped_full (in, NULL, 0, 12);
+ fail_unless (gst_buffer_memcmp (inbuffer, 0, in, 12) == 0);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
/* pushing gives away my reference ... */
fail_unless_equals_int (g_list_length (buffers), 1);
fail_if ((outbuffer = (GstBuffer *) buffers->data) == NULL);
- res = (gint16 *) GST_BUFFER_DATA (outbuffer);
+ fail_unless (gst_buffer_extract (outbuffer, 0, res, 12) == 12);
GST_INFO
("expected %+5d %+5d %+5d %+5d %+5d %+5d real %+5d %+5d %+5d %+5d %+5d %+5d",
out[0], out[1], out[2], out[3], out[4], out[5], res[0], res[1], res[2],
res[3], res[4], res[5]);
- fail_unless (memcmp (GST_BUFFER_DATA (outbuffer), out, 12) == 0);
+ fail_unless (gst_buffer_memcmp (outbuffer, 0, out, 12) == 0);
/* cleanup */
cleanup_amplify (amplify);
GstCaps *caps;
gint16 in[6] = { 24576, -16384, 256, -128, 0, -24576 };
gint16 out[6] = { 12288, -8192, 128, -64, 0, -12288 };
- gint16 *res;
+ gint16 res[6];
amplify = setup_amplify ();
g_object_set (G_OBJECT (amplify), "amplification", 0.5, NULL);
GST_STATE_PLAYING) == GST_STATE_CHANGE_SUCCESS,
"could not set to playing");
- inbuffer = gst_buffer_new_and_alloc (12);
- memcpy (GST_BUFFER_DATA (inbuffer), in, 12);
- fail_unless (memcmp (GST_BUFFER_DATA (inbuffer), in, 12) == 0);
caps = gst_caps_from_string (AMPLIFY_CAPS_STRING);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
+
+ inbuffer = gst_buffer_new_wrapped_full (in, NULL, 0, 12);
+ fail_unless (gst_buffer_memcmp (inbuffer, 0, in, 12) == 0);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
/* pushing gives away my reference ... */
fail_unless_equals_int (g_list_length (buffers), 1);
fail_if ((outbuffer = (GstBuffer *) buffers->data) == NULL);
- res = (gint16 *) GST_BUFFER_DATA (outbuffer);
+ fail_unless (gst_buffer_extract (outbuffer, 0, res, 12) == 12);
GST_INFO
("expected %+5d %+5d %+5d %+5d %+5d %+5d real %+5d %+5d %+5d %+5d %+5d %+5d",
out[0], out[1], out[2], out[3], out[4], out[5], res[0], res[1], res[2],
res[3], res[4], res[5]);
- fail_unless (memcmp (GST_BUFFER_DATA (outbuffer), out, 12) == 0);
+ fail_unless (gst_buffer_memcmp (outbuffer, 0, out, 12) == 0);
/* cleanup */
cleanup_amplify (amplify);
GstCaps *caps;
gint16 in[6] = { 24576, -16384, 256, -128, 0, -24576 };
gint16 out[6] = { -16384, -32768, 512, -256, 0, 16384 };
- gint16 *res;
+ gint16 res[6];
amplify = setup_amplify ();
g_object_set (G_OBJECT (amplify), "amplification", 2.0, NULL);
GST_STATE_PLAYING) == GST_STATE_CHANGE_SUCCESS,
"could not set to playing");
- inbuffer = gst_buffer_new_and_alloc (12);
- memcpy (GST_BUFFER_DATA (inbuffer), in, 12);
- fail_unless (memcmp (GST_BUFFER_DATA (inbuffer), in, 12) == 0);
caps = gst_caps_from_string (AMPLIFY_CAPS_STRING);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
+
+ inbuffer = gst_buffer_new_wrapped_full (in, NULL, 0, 12);
+ fail_unless (gst_buffer_memcmp (inbuffer, 0, in, 12) == 0);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
/* pushing gives away my reference ... */
fail_unless_equals_int (g_list_length (buffers), 1);
fail_if ((outbuffer = (GstBuffer *) buffers->data) == NULL);
- res = (gint16 *) GST_BUFFER_DATA (outbuffer);
+ fail_unless (gst_buffer_extract (outbuffer, 0, res, 12) == 12);
GST_INFO
("expected %+5d %+5d %+5d %+5d %+5d %+5d real %+5d %+5d %+5d %+5d %+5d %+5d",
out[0], out[1], out[2], out[3], out[4], out[5], res[0], res[1], res[2],
res[3], res[4], res[5]);
- fail_unless (memcmp (GST_BUFFER_DATA (outbuffer), out, 12) == 0);
+ fail_unless (gst_buffer_memcmp (outbuffer, 0, out, 12) == 0);
/* cleanup */
cleanup_amplify (amplify);
GstCaps *caps;
gint16 in[6] = { 24576, -16384, 256, -128, 0, -24576 };
gint16 out[6] = { 12288, -8192, 128, -64, 0, -12288 };
- gint16 *res;
+ gint16 res[6];
amplify = setup_amplify ();
g_object_set (G_OBJECT (amplify), "amplification", 0.5, NULL);
GST_STATE_PLAYING) == GST_STATE_CHANGE_SUCCESS,
"could not set to playing");
- inbuffer = gst_buffer_new_and_alloc (12);
- memcpy (GST_BUFFER_DATA (inbuffer), in, 12);
- fail_unless (memcmp (GST_BUFFER_DATA (inbuffer), in, 12) == 0);
caps = gst_caps_from_string (AMPLIFY_CAPS_STRING);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
+
+ inbuffer = gst_buffer_new_wrapped_full (in, NULL, 0, 12);
+ fail_unless (gst_buffer_memcmp (inbuffer, 0, in, 12) == 0);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
/* pushing gives away my reference ... */
fail_unless_equals_int (g_list_length (buffers), 1);
fail_if ((outbuffer = (GstBuffer *) buffers->data) == NULL);
- res = (gint16 *) GST_BUFFER_DATA (outbuffer);
+ fail_unless (gst_buffer_extract (outbuffer, 0, res, 12) == 12);
GST_INFO
("expected %+5d %+5d %+5d %+5d %+5d %+5d real %+5d %+5d %+5d %+5d %+5d %+5d",
out[0], out[1], out[2], out[3], out[4], out[5], res[0], res[1], res[2],
res[3], res[4], res[5]);
- fail_unless (memcmp (GST_BUFFER_DATA (outbuffer), out, 12) == 0);
+ fail_unless (gst_buffer_memcmp (outbuffer, 0, out, 12) == 0);
/* cleanup */
cleanup_amplify (amplify);
GstCaps *caps;
gint16 in[6] = { 24576, -16384, 256, -128, 0, -24576 };
gint16 out[6] = { 16382, -32768, 512, -256, 0, -16384 };
- gint16 *res;
+ gint16 res[6];
amplify = setup_amplify ();
g_object_set (G_OBJECT (amplify), "amplification", 2.0, NULL);
GST_STATE_PLAYING) == GST_STATE_CHANGE_SUCCESS,
"could not set to playing");
- inbuffer = gst_buffer_new_and_alloc (12);
- memcpy (GST_BUFFER_DATA (inbuffer), in, 12);
- fail_unless (memcmp (GST_BUFFER_DATA (inbuffer), in, 12) == 0);
caps = gst_caps_from_string (AMPLIFY_CAPS_STRING);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
+
+ inbuffer = gst_buffer_new_wrapped_full (in, NULL, 0, 12);
+ fail_unless (gst_buffer_memcmp (inbuffer, 0, in, 12) == 0);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
/* pushing gives away my reference ... */
fail_unless_equals_int (g_list_length (buffers), 1);
fail_if ((outbuffer = (GstBuffer *) buffers->data) == NULL);
- res = (gint16 *) GST_BUFFER_DATA (outbuffer);
+ fail_unless (gst_buffer_extract (outbuffer, 0, res, 12) == 12);
GST_INFO
("expected %+5d %+5d %+5d %+5d %+5d %+5d real %+5d %+5d %+5d %+5d %+5d %+5d",
out[0], out[1], out[2], out[3], out[4], out[5], res[0], res[1], res[2],
res[3], res[4], res[5]);
- fail_unless (memcmp (GST_BUFFER_DATA (outbuffer), out, 12) == 0);
+ fail_unless (gst_buffer_memcmp (outbuffer, 0, out, 12) == 0);
/* cleanup */
cleanup_amplify (amplify);
#include <gst/gst.h>
#include <gst/base/gstbasetransform.h>
+#include <gst/audio/audio.h>
#include <gst/check/gstcheck.h>
#include <math.h>
GstPad *mysrcpad, *mysinkpad;
#define BUFFER_CAPS_STRING_32 \
- "audio/x-raw-float, " \
+ "audio/x-raw, " \
"channels = (int) 1, " \
"rate = (int) 44100, " \
- "endianness = (int) BYTE_ORDER, " \
- "width = (int) 32" \
+ "layout = (string) interleaved, " \
+ "format = (string) " GST_AUDIO_NE(F32)
#define BUFFER_CAPS_STRING_64 \
- "audio/x-raw-float, " \
+ "audio/x-raw, " \
"channels = (int) 1, " \
"rate = (int) 44100, " \
- "endianness = (int) BYTE_ORDER, " \
- "width = (int) 64" \
+ "layout = (string) interleaved, " \
+ "format = (string) " GST_AUDIO_NE(F64)
static GstStaticPadTemplate sinktemplate = GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("audio/x-raw-float, "
+ GST_STATIC_CAPS ("audio/x-raw, "
"channels = (int) 1, "
"rate = (int) 44100, "
- "endianness = (int) BYTE_ORDER, " "width = (int) { 32, 64 }")
- );
+ "layout = (string) interleaved, "
+ "format = (string) { "
+ GST_AUDIO_NE (F32) ", " GST_AUDIO_NE (F64) " }"));
static GstStaticPadTemplate srctemplate = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("audio/x-raw-float, "
+ GST_STATIC_CAPS ("audio/x-raw, "
"channels = (int) 1, "
"rate = (int) 44100, "
- "endianness = (int) BYTE_ORDER, " "width = (int) { 32, 64 }")
- );
+ "layout = (string) interleaved, "
+ "format = (string) { "
+ GST_AUDIO_NE (F32) ", " GST_AUDIO_NE (F64) " }"));
static GstElement *
setup_audiochebband (void)
GST_DEBUG ("setup_audiochebband");
audiochebband = gst_check_setup_element ("audiochebband");
- mysrcpad = gst_check_setup_src_pad (audiochebband, &srctemplate, NULL);
- mysinkpad = gst_check_setup_sink_pad (audiochebband, &sinktemplate, NULL);
+ mysrcpad = gst_check_setup_src_pad (audiochebband, &srctemplate);
+ mysinkpad = gst_check_setup_sink_pad (audiochebband, &sinktemplate);
gst_pad_set_active (mysrcpad, TRUE);
gst_pad_set_active (mysinkpad, TRUE);
GstCaps *caps;
gfloat *in, *res, rms;
gint i;
+ GstMapInfo map;
audiochebband = setup_audiochebband ();
/* Set to bandpass */
44100 / 4.0 - 1000, NULL);
g_object_set (G_OBJECT (audiochebband), "upper-frequency",
44100 / 4.0 + 1000, NULL);
- inbuffer = gst_buffer_new_and_alloc (1024 * sizeof (gfloat));
- in = (gfloat *) GST_BUFFER_DATA (inbuffer);
+ inbuffer = gst_buffer_new_allocate (NULL, 1024 * sizeof (gfloat), 0);
+ gst_buffer_map (inbuffer, &map, GST_MAP_WRITE);
+ in = (gfloat *) map.data;
for (i = 0; i < 1024; i++)
in[i] = 1.0;
+ gst_buffer_unmap (inbuffer, &map);
caps = gst_caps_from_string (BUFFER_CAPS_STRING_32);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_unless_equals_int (g_list_length (buffers), 1);
fail_if ((outbuffer = (GstBuffer *) buffers->data) == NULL);
- res = (gfloat *) GST_BUFFER_DATA (outbuffer);
+ gst_buffer_map (outbuffer, &map, GST_MAP_READ);
+ res = (gfloat *) map.data;
rms = 0.0;
for (i = 0; i < 1024; i++)
rms = sqrt (rms / 1024.0);
fail_unless (rms <= 0.1);
+ gst_buffer_unmap (outbuffer, &map);
+
/* cleanup */
cleanup_audiochebband (audiochebband);
}
GstCaps *caps;
gfloat *in, *res, rms;
gint i;
+ GstMapInfo map;
audiochebband = setup_audiochebband ();
/* Set to bandpass */
44100 / 4.0 - 1000, NULL);
g_object_set (G_OBJECT (audiochebband), "upper-frequency",
44100 / 4.0 + 1000, NULL);
- inbuffer = gst_buffer_new_and_alloc (1024 * sizeof (gfloat));
- in = (gfloat *) GST_BUFFER_DATA (inbuffer);
+ inbuffer = gst_buffer_new_allocate (NULL, 1024 * sizeof (gfloat), 0);
+ gst_buffer_map (inbuffer, &map, GST_MAP_WRITE);
+ in = (gfloat *) map.data;
for (i = 0; i < 1024; i += 4) {
in[i] = 0.0;
in[i + 1] = 1.0;
in[i + 2] = 0.0;
in[i + 3] = -1.0;
}
+ gst_buffer_unmap (inbuffer, &map);
caps = gst_caps_from_string (BUFFER_CAPS_STRING_32);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_unless_equals_int (g_list_length (buffers), 1);
fail_if ((outbuffer = (GstBuffer *) buffers->data) == NULL);
- res = (gfloat *) GST_BUFFER_DATA (outbuffer);
+ gst_buffer_map (outbuffer, &map, GST_MAP_READ);
+ res = (gfloat *) map.data;
rms = 0.0;
for (i = 0; i < 1024; i++)
rms = sqrt (rms / 1024.0);
fail_unless (rms >= 0.6);
+ gst_buffer_unmap (outbuffer, &map);
+
/* cleanup */
cleanup_audiochebband (audiochebband);
}
GstCaps *caps;
gfloat *in, *res, rms;
gint i;
+ GstMapInfo map;
audiochebband = setup_audiochebband ();
/* Set to bandpass */
44100 / 4.0 - 1000, NULL);
g_object_set (G_OBJECT (audiochebband), "upper-frequency",
44100 / 4.0 + 1000, NULL);
- inbuffer = gst_buffer_new_and_alloc (1024 * sizeof (gfloat));
- in = (gfloat *) GST_BUFFER_DATA (inbuffer);
+ inbuffer = gst_buffer_new_allocate (NULL, 1024 * sizeof (gfloat), 0);
+ gst_buffer_map (inbuffer, &map, GST_MAP_WRITE);
+ in = (gfloat *) map.data;
for (i = 0; i < 1024; i += 2) {
in[i] = 1.0;
in[i + 1] = -1.0;
}
+ gst_buffer_unmap (inbuffer, &map);
caps = gst_caps_from_string (BUFFER_CAPS_STRING_32);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_unless_equals_int (g_list_length (buffers), 1);
fail_if ((outbuffer = (GstBuffer *) buffers->data) == NULL);
- res = (gfloat *) GST_BUFFER_DATA (outbuffer);
+ gst_buffer_map (outbuffer, &map, GST_MAP_READ);
+ res = (gfloat *) map.data;
rms = 0.0;
for (i = 0; i < 1024; i++)
rms = sqrt (rms / 1024.0);
fail_unless (rms <= 0.1);
+ gst_buffer_unmap (outbuffer, &map);
+
/* cleanup */
cleanup_audiochebband (audiochebband);
}
GstCaps *caps;
gfloat *in, *res, rms;
gint i;
+ GstMapInfo map;
audiochebband = setup_audiochebband ();
/* Set to bandreject */
44100 / 4.0 - 1000, NULL);
g_object_set (G_OBJECT (audiochebband), "upper-frequency",
44100 / 4.0 + 1000, NULL);
- inbuffer = gst_buffer_new_and_alloc (1024 * sizeof (gfloat));
- in = (gfloat *) GST_BUFFER_DATA (inbuffer);
+ inbuffer = gst_buffer_new_allocate (NULL, 1024 * sizeof (gfloat), 0);
+ gst_buffer_map (inbuffer, &map, GST_MAP_WRITE);
+ in = (gfloat *) map.data;
for (i = 0; i < 1024; i++)
in[i] = 1.0;
+ gst_buffer_unmap (inbuffer, &map);
caps = gst_caps_from_string (BUFFER_CAPS_STRING_32);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_unless_equals_int (g_list_length (buffers), 1);
fail_if ((outbuffer = (GstBuffer *) buffers->data) == NULL);
- res = (gfloat *) GST_BUFFER_DATA (outbuffer);
+ gst_buffer_map (outbuffer, &map, GST_MAP_READ);
+ res = (gfloat *) map.data;
rms = 0.0;
for (i = 0; i < 1024; i++)
rms = sqrt (rms / 1024.0);
fail_unless (rms >= 0.9);
+ gst_buffer_unmap (outbuffer, &map);
+
/* cleanup */
cleanup_audiochebband (audiochebband);
}
GstCaps *caps;
gfloat *in, *res, rms;
gint i;
+ GstMapInfo map;
audiochebband = setup_audiochebband ();
/* Set to bandreject */
44100 / 4.0 - 1000, NULL);
g_object_set (G_OBJECT (audiochebband), "upper-frequency",
44100 / 4.0 + 1000, NULL);
- inbuffer = gst_buffer_new_and_alloc (1024 * sizeof (gfloat));
- in = (gfloat *) GST_BUFFER_DATA (inbuffer);
+ inbuffer = gst_buffer_new_allocate (NULL, 1024 * sizeof (gfloat), 0);
+ gst_buffer_map (inbuffer, &map, GST_MAP_WRITE);
+ in = (gfloat *) map.data;
for (i = 0; i < 1024; i += 4) {
in[i] = 0.0;
in[i + 1] = 1.0;
in[i + 2] = 0.0;
in[i + 3] = -1.0;
}
+ gst_buffer_unmap (inbuffer, &map);
caps = gst_caps_from_string (BUFFER_CAPS_STRING_32);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_unless_equals_int (g_list_length (buffers), 1);
fail_if ((outbuffer = (GstBuffer *) buffers->data) == NULL);
- res = (gfloat *) GST_BUFFER_DATA (outbuffer);
+ gst_buffer_map (outbuffer, &map, GST_MAP_READ);
+ res = (gfloat *) map.data;
rms = 0.0;
for (i = 0; i < 1024; i++)
rms = sqrt (rms / 1024.0);
fail_unless (rms <= 0.1);
+ gst_buffer_unmap (outbuffer, &map);
+
/* cleanup */
cleanup_audiochebband (audiochebband);
}
GstCaps *caps;
gfloat *in, *res, rms;
gint i;
+ GstMapInfo map;
audiochebband = setup_audiochebband ();
/* Set to bandreject */
44100 / 4.0 - 1000, NULL);
g_object_set (G_OBJECT (audiochebband), "upper-frequency",
44100 / 4.0 + 1000, NULL);
- inbuffer = gst_buffer_new_and_alloc (1024 * sizeof (gfloat));
- in = (gfloat *) GST_BUFFER_DATA (inbuffer);
+ inbuffer = gst_buffer_new_allocate (NULL, 1024 * sizeof (gfloat), 0);
+ gst_buffer_map (inbuffer, &map, GST_MAP_WRITE);
+ in = (gfloat *) map.data;
for (i = 0; i < 1024; i += 2) {
in[i] = 1.0;
in[i + 1] = -1.0;
}
+ gst_buffer_unmap (inbuffer, &map);
caps = gst_caps_from_string (BUFFER_CAPS_STRING_32);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_unless_equals_int (g_list_length (buffers), 1);
fail_if ((outbuffer = (GstBuffer *) buffers->data) == NULL);
- res = (gfloat *) GST_BUFFER_DATA (outbuffer);
+ gst_buffer_map (outbuffer, &map, GST_MAP_READ);
+ res = (gfloat *) map.data;
rms = 0.0;
for (i = 0; i < 1024; i++)
rms = sqrt (rms / 1024.0);
fail_unless (rms >= 0.9);
+ gst_buffer_unmap (outbuffer, &map);
+
/* cleanup */
cleanup_audiochebband (audiochebband);
}
GstCaps *caps;
gdouble *in, *res, rms;
gint i;
+ GstMapInfo map;
audiochebband = setup_audiochebband ();
/* Set to bandpass */
g_object_set (G_OBJECT (audiochebband), "upper-frequency",
44100 / 4.0 + 1000, NULL);
inbuffer = gst_buffer_new_and_alloc (1024 * sizeof (gdouble));
- in = (gdouble *) GST_BUFFER_DATA (inbuffer);
+ gst_buffer_map (inbuffer, &map, GST_MAP_WRITE);
+ in = (gdouble *) map.data;
for (i = 0; i < 1024; i++)
in[i] = 1.0;
+ gst_buffer_unmap (inbuffer, &map);
caps = gst_caps_from_string (BUFFER_CAPS_STRING_64);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_unless_equals_int (g_list_length (buffers), 1);
fail_if ((outbuffer = (GstBuffer *) buffers->data) == NULL);
- res = (gdouble *) GST_BUFFER_DATA (outbuffer);
+ gst_buffer_map (outbuffer, &map, GST_MAP_READ);
+ res = (gdouble *) map.data;
rms = 0.0;
for (i = 0; i < 1024; i++)
rms = sqrt (rms / 1024.0);
fail_unless (rms <= 0.1);
+ gst_buffer_unmap (outbuffer, &map);
+
/* cleanup */
cleanup_audiochebband (audiochebband);
}
GstCaps *caps;
gdouble *in, *res, rms;
gint i;
+ GstMapInfo map;
audiochebband = setup_audiochebband ();
/* Set to bandpass */
g_object_set (G_OBJECT (audiochebband), "upper-frequency",
44100 / 4.0 + 1000, NULL);
inbuffer = gst_buffer_new_and_alloc (1024 * sizeof (gdouble));
- in = (gdouble *) GST_BUFFER_DATA (inbuffer);
+ gst_buffer_map (inbuffer, &map, GST_MAP_WRITE);
+ in = (gdouble *) map.data;
for (i = 0; i < 1024; i += 4) {
in[i] = 0.0;
in[i + 1] = 1.0;
in[i + 2] = 0.0;
in[i + 3] = -1.0;
}
+ gst_buffer_unmap (inbuffer, &map);
caps = gst_caps_from_string (BUFFER_CAPS_STRING_64);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_unless_equals_int (g_list_length (buffers), 1);
fail_if ((outbuffer = (GstBuffer *) buffers->data) == NULL);
- res = (gdouble *) GST_BUFFER_DATA (outbuffer);
+ gst_buffer_map (outbuffer, &map, GST_MAP_READ);
+ res = (gdouble *) map.data;
rms = 0.0;
for (i = 0; i < 1024; i++)
rms = sqrt (rms / 1024.0);
fail_unless (rms >= 0.6);
+ gst_buffer_unmap (outbuffer, &map);
+
/* cleanup */
cleanup_audiochebband (audiochebband);
}
GstCaps *caps;
gdouble *in, *res, rms;
gint i;
+ GstMapInfo map;
audiochebband = setup_audiochebband ();
/* Set to bandpass */
g_object_set (G_OBJECT (audiochebband), "upper-frequency",
44100 / 4.0 + 1000, NULL);
inbuffer = gst_buffer_new_and_alloc (1024 * sizeof (gdouble));
- in = (gdouble *) GST_BUFFER_DATA (inbuffer);
+ gst_buffer_map (inbuffer, &map, GST_MAP_WRITE);
+ in = (gdouble *) map.data;
for (i = 0; i < 1024; i += 2) {
in[i] = 1.0;
in[i + 1] = -1.0;
}
+ gst_buffer_unmap (inbuffer, &map);
caps = gst_caps_from_string (BUFFER_CAPS_STRING_64);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_unless_equals_int (g_list_length (buffers), 1);
fail_if ((outbuffer = (GstBuffer *) buffers->data) == NULL);
- res = (gdouble *) GST_BUFFER_DATA (outbuffer);
+ gst_buffer_map (outbuffer, &map, GST_MAP_READ);
+ res = (gdouble *) map.data;
rms = 0.0;
for (i = 0; i < 1024; i++)
rms = sqrt (rms / 1024.0);
fail_unless (rms <= 0.1);
+ gst_buffer_unmap (outbuffer, &map);
+
/* cleanup */
cleanup_audiochebband (audiochebband);
}
GstCaps *caps;
gdouble *in, *res, rms;
gint i;
+ GstMapInfo map;
audiochebband = setup_audiochebband ();
/* Set to bandreject */
g_object_set (G_OBJECT (audiochebband), "upper-frequency",
44100 / 4.0 + 1000, NULL);
inbuffer = gst_buffer_new_and_alloc (1024 * sizeof (gdouble));
- in = (gdouble *) GST_BUFFER_DATA (inbuffer);
+ gst_buffer_map (inbuffer, &map, GST_MAP_WRITE);
+ in = (gdouble *) map.data;
for (i = 0; i < 1024; i++)
in[i] = 1.0;
+ gst_buffer_unmap (inbuffer, &map);
caps = gst_caps_from_string (BUFFER_CAPS_STRING_64);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_unless_equals_int (g_list_length (buffers), 1);
fail_if ((outbuffer = (GstBuffer *) buffers->data) == NULL);
- res = (gdouble *) GST_BUFFER_DATA (outbuffer);
+ gst_buffer_map (outbuffer, &map, GST_MAP_READ);
+ res = (gdouble *) map.data;
rms = 0.0;
for (i = 0; i < 1024; i++)
rms = sqrt (rms / 1024.0);
fail_unless (rms >= 0.9);
+ gst_buffer_unmap (outbuffer, &map);
+
/* cleanup */
cleanup_audiochebband (audiochebband);
}
GstCaps *caps;
gdouble *in, *res, rms;
gint i;
+ GstMapInfo map;
audiochebband = setup_audiochebband ();
/* Set to bandreject */
g_object_set (G_OBJECT (audiochebband), "upper-frequency",
44100 / 4.0 + 1000, NULL);
inbuffer = gst_buffer_new_and_alloc (1024 * sizeof (gdouble));
- in = (gdouble *) GST_BUFFER_DATA (inbuffer);
+ gst_buffer_map (inbuffer, &map, GST_MAP_WRITE);
+ in = (gdouble *) map.data;
for (i = 0; i < 1024; i += 4) {
in[i] = 0.0;
in[i + 1] = 1.0;
in[i + 2] = 0.0;
in[i + 3] = -1.0;
}
+ gst_buffer_unmap (inbuffer, &map);
caps = gst_caps_from_string (BUFFER_CAPS_STRING_64);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_unless_equals_int (g_list_length (buffers), 1);
fail_if ((outbuffer = (GstBuffer *) buffers->data) == NULL);
- res = (gdouble *) GST_BUFFER_DATA (outbuffer);
+ gst_buffer_map (outbuffer, &map, GST_MAP_READ);
+ res = (gdouble *) map.data;
rms = 0.0;
for (i = 0; i < 1024; i++)
rms = sqrt (rms / 1024.0);
fail_unless (rms <= 0.1);
+ gst_buffer_unmap (outbuffer, &map);
+
/* cleanup */
cleanup_audiochebband (audiochebband);
}
GstCaps *caps;
gdouble *in, *res, rms;
gint i;
+ GstMapInfo map;
audiochebband = setup_audiochebband ();
/* Set to bandreject */
g_object_set (G_OBJECT (audiochebband), "upper-frequency",
44100 / 4.0 + 1000, NULL);
inbuffer = gst_buffer_new_and_alloc (1024 * sizeof (gdouble));
- in = (gdouble *) GST_BUFFER_DATA (inbuffer);
+ gst_buffer_map (inbuffer, &map, GST_MAP_WRITE);
+ in = (gdouble *) map.data;
for (i = 0; i < 1024; i += 2) {
in[i] = 1.0;
in[i + 1] = -1.0;
}
+ gst_buffer_unmap (inbuffer, &map);
caps = gst_caps_from_string (BUFFER_CAPS_STRING_64);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_unless_equals_int (g_list_length (buffers), 1);
fail_if ((outbuffer = (GstBuffer *) buffers->data) == NULL);
- res = (gdouble *) GST_BUFFER_DATA (outbuffer);
+ gst_buffer_map (outbuffer, &map, GST_MAP_READ);
+ res = (gdouble *) map.data;
rms = 0.0;
for (i = 0; i < 1024; i++)
rms = sqrt (rms / 1024.0);
fail_unless (rms >= 0.9);
+ gst_buffer_unmap (outbuffer, &map);
+
/* cleanup */
cleanup_audiochebband (audiochebband);
}
GstCaps *caps;
gfloat *in, *res, rms;
gint i;
+ GstMapInfo map;
audiochebband = setup_audiochebband ();
/* Set to bandpass */
44100 / 4.0 - 1000, NULL);
g_object_set (G_OBJECT (audiochebband), "upper-frequency",
44100 / 4.0 + 1000, NULL);
- inbuffer = gst_buffer_new_and_alloc (1024 * sizeof (gfloat));
- in = (gfloat *) GST_BUFFER_DATA (inbuffer);
+ inbuffer = gst_buffer_new_allocate (NULL, 1024 * sizeof (gfloat), 0);
+ gst_buffer_map (inbuffer, &map, GST_MAP_WRITE);
+ in = (gfloat *) map.data;
for (i = 0; i < 1024; i++)
in[i] = 1.0;
+ gst_buffer_unmap (inbuffer, &map);
caps = gst_caps_from_string (BUFFER_CAPS_STRING_32);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_unless_equals_int (g_list_length (buffers), 1);
fail_if ((outbuffer = (GstBuffer *) buffers->data) == NULL);
- res = (gfloat *) GST_BUFFER_DATA (outbuffer);
+ gst_buffer_map (outbuffer, &map, GST_MAP_READ);
+ res = (gfloat *) map.data;
rms = 0.0;
for (i = 0; i < 1024; i++)
rms = sqrt (rms / 1024.0);
fail_unless (rms <= 0.1);
+ gst_buffer_unmap (outbuffer, &map);
+
/* cleanup */
cleanup_audiochebband (audiochebband);
}
GstCaps *caps;
gfloat *in, *res, rms;
gint i;
+ GstMapInfo map;
audiochebband = setup_audiochebband ();
/* Set to bandpass */
44100 / 4.0 - 1000, NULL);
g_object_set (G_OBJECT (audiochebband), "upper-frequency",
44100 / 4.0 + 1000, NULL);
- inbuffer = gst_buffer_new_and_alloc (1024 * sizeof (gfloat));
- in = (gfloat *) GST_BUFFER_DATA (inbuffer);
+ inbuffer = gst_buffer_new_allocate (NULL, 1024 * sizeof (gfloat), 0);
+ gst_buffer_map (inbuffer, &map, GST_MAP_WRITE);
+ in = (gfloat *) map.data;
for (i = 0; i < 1024; i += 4) {
in[i] = 0.0;
in[i + 1] = 1.0;
in[i + 2] = 0.0;
in[i + 3] = -1.0;
}
+ gst_buffer_unmap (inbuffer, &map);
caps = gst_caps_from_string (BUFFER_CAPS_STRING_32);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_unless_equals_int (g_list_length (buffers), 1);
fail_if ((outbuffer = (GstBuffer *) buffers->data) == NULL);
- res = (gfloat *) GST_BUFFER_DATA (outbuffer);
+ gst_buffer_map (outbuffer, &map, GST_MAP_READ);
+ res = (gfloat *) map.data;
rms = 0.0;
for (i = 0; i < 1024; i++)
rms = sqrt (rms / 1024.0);
fail_unless (rms >= 0.6);
+ gst_buffer_unmap (outbuffer, &map);
+
/* cleanup */
cleanup_audiochebband (audiochebband);
}
GstCaps *caps;
gfloat *in, *res, rms;
gint i;
+ GstMapInfo map;
audiochebband = setup_audiochebband ();
/* Set to bandpass */
44100 / 4.0 - 1000, NULL);
g_object_set (G_OBJECT (audiochebband), "upper-frequency",
44100 / 4.0 + 1000, NULL);
- inbuffer = gst_buffer_new_and_alloc (1024 * sizeof (gfloat));
- in = (gfloat *) GST_BUFFER_DATA (inbuffer);
+ inbuffer = gst_buffer_new_allocate (NULL, 1024 * sizeof (gfloat), 0);
+ gst_buffer_map (inbuffer, &map, GST_MAP_WRITE);
+ in = (gfloat *) map.data;
for (i = 0; i < 1024; i += 2) {
in[i] = 1.0;
in[i + 1] = -1.0;
}
+ gst_buffer_unmap (inbuffer, &map);
caps = gst_caps_from_string (BUFFER_CAPS_STRING_32);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_unless_equals_int (g_list_length (buffers), 1);
fail_if ((outbuffer = (GstBuffer *) buffers->data) == NULL);
- res = (gfloat *) GST_BUFFER_DATA (outbuffer);
+ gst_buffer_map (outbuffer, &map, GST_MAP_READ);
+ res = (gfloat *) map.data;
rms = 0.0;
for (i = 0; i < 1024; i++)
rms = sqrt (rms / 1024.0);
fail_unless (rms <= 0.1);
+ gst_buffer_unmap (outbuffer, &map);
+
/* cleanup */
cleanup_audiochebband (audiochebband);
}
GstCaps *caps;
gfloat *in, *res, rms;
gint i;
+ GstMapInfo map;
audiochebband = setup_audiochebband ();
/* Set to bandreject */
44100 / 4.0 - 1000, NULL);
g_object_set (G_OBJECT (audiochebband), "upper-frequency",
44100 / 4.0 + 1000, NULL);
- inbuffer = gst_buffer_new_and_alloc (1024 * sizeof (gfloat));
- in = (gfloat *) GST_BUFFER_DATA (inbuffer);
+ inbuffer = gst_buffer_new_allocate (NULL, 1024 * sizeof (gfloat), 0);
+ gst_buffer_map (inbuffer, &map, GST_MAP_WRITE);
+ in = (gfloat *) map.data;
for (i = 0; i < 1024; i++)
in[i] = 1.0;
+ gst_buffer_unmap (inbuffer, &map);
caps = gst_caps_from_string (BUFFER_CAPS_STRING_32);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_unless_equals_int (g_list_length (buffers), 1);
fail_if ((outbuffer = (GstBuffer *) buffers->data) == NULL);
- res = (gfloat *) GST_BUFFER_DATA (outbuffer);
+ gst_buffer_map (outbuffer, &map, GST_MAP_READ);
+ res = (gfloat *) map.data;
rms = 0.0;
for (i = 0; i < 1024; i++)
rms = sqrt (rms / 1024.0);
fail_unless (rms >= 0.9);
+ gst_buffer_unmap (outbuffer, &map);
+
/* cleanup */
cleanup_audiochebband (audiochebband);
}
GstCaps *caps;
gfloat *in, *res, rms;
gint i;
+ GstMapInfo map;
audiochebband = setup_audiochebband ();
/* Set to bandreject */
44100 / 4.0 - 1000, NULL);
g_object_set (G_OBJECT (audiochebband), "upper-frequency",
44100 / 4.0 + 1000, NULL);
- inbuffer = gst_buffer_new_and_alloc (1024 * sizeof (gfloat));
- in = (gfloat *) GST_BUFFER_DATA (inbuffer);
+ inbuffer = gst_buffer_new_allocate (NULL, 1024 * sizeof (gfloat), 0);
+ gst_buffer_map (inbuffer, &map, GST_MAP_WRITE);
+ in = (gfloat *) map.data;
for (i = 0; i < 1024; i += 4) {
in[i] = 0.0;
in[i + 1] = 1.0;
in[i + 2] = 0.0;
in[i + 3] = -1.0;
}
+ gst_buffer_unmap (inbuffer, &map);
caps = gst_caps_from_string (BUFFER_CAPS_STRING_32);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_unless_equals_int (g_list_length (buffers), 1);
fail_if ((outbuffer = (GstBuffer *) buffers->data) == NULL);
- res = (gfloat *) GST_BUFFER_DATA (outbuffer);
+ gst_buffer_map (outbuffer, &map, GST_MAP_READ);
+ res = (gfloat *) map.data;
rms = 0.0;
for (i = 0; i < 1024; i++)
rms = sqrt (rms / 1024.0);
fail_unless (rms <= 0.1);
+ gst_buffer_unmap (outbuffer, &map);
+
/* cleanup */
cleanup_audiochebband (audiochebband);
}
GstCaps *caps;
gfloat *in, *res, rms;
gint i;
+ GstMapInfo map;
audiochebband = setup_audiochebband ();
/* Set to bandreject */
44100 / 4.0 - 1000, NULL);
g_object_set (G_OBJECT (audiochebband), "upper-frequency",
44100 / 4.0 + 1000, NULL);
- inbuffer = gst_buffer_new_and_alloc (1024 * sizeof (gfloat));
- in = (gfloat *) GST_BUFFER_DATA (inbuffer);
+ inbuffer = gst_buffer_new_allocate (NULL, 1024 * sizeof (gfloat), 0);
+ gst_buffer_map (inbuffer, &map, GST_MAP_WRITE);
+ in = (gfloat *) map.data;
for (i = 0; i < 1024; i += 2) {
in[i] = 1.0;
in[i + 1] = -1.0;
}
+ gst_buffer_unmap (inbuffer, &map);
caps = gst_caps_from_string (BUFFER_CAPS_STRING_32);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_unless_equals_int (g_list_length (buffers), 1);
fail_if ((outbuffer = (GstBuffer *) buffers->data) == NULL);
- res = (gfloat *) GST_BUFFER_DATA (outbuffer);
+ gst_buffer_map (outbuffer, &map, GST_MAP_READ);
+ res = (gfloat *) map.data;
rms = 0.0;
for (i = 0; i < 1024; i++)
rms = sqrt (rms / 1024.0);
fail_unless (rms >= 0.9);
+ gst_buffer_unmap (outbuffer, &map);
+
/* cleanup */
cleanup_audiochebband (audiochebband);
}
GstCaps *caps;
gdouble *in, *res, rms;
gint i;
+ GstMapInfo map;
audiochebband = setup_audiochebband ();
/* Set to bandpass */
g_object_set (G_OBJECT (audiochebband), "upper-frequency",
44100 / 4.0 + 1000, NULL);
inbuffer = gst_buffer_new_and_alloc (1024 * sizeof (gdouble));
- in = (gdouble *) GST_BUFFER_DATA (inbuffer);
+ gst_buffer_map (inbuffer, &map, GST_MAP_WRITE);
+ in = (gdouble *) map.data;
for (i = 0; i < 1024; i++)
in[i] = 1.0;
+ gst_buffer_unmap (inbuffer, &map);
caps = gst_caps_from_string (BUFFER_CAPS_STRING_64);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_unless_equals_int (g_list_length (buffers), 1);
fail_if ((outbuffer = (GstBuffer *) buffers->data) == NULL);
- res = (gdouble *) GST_BUFFER_DATA (outbuffer);
+ gst_buffer_map (outbuffer, &map, GST_MAP_READ);
+ res = (gdouble *) map.data;
rms = 0.0;
for (i = 0; i < 1024; i++)
rms = sqrt (rms / 1024.0);
fail_unless (rms <= 0.1);
+ gst_buffer_unmap (outbuffer, &map);
+
/* cleanup */
cleanup_audiochebband (audiochebband);
}
GstCaps *caps;
gdouble *in, *res, rms;
gint i;
+ GstMapInfo map;
audiochebband = setup_audiochebband ();
/* Set to bandpass */
g_object_set (G_OBJECT (audiochebband), "upper-frequency",
44100 / 4.0 + 1000, NULL);
inbuffer = gst_buffer_new_and_alloc (1024 * sizeof (gdouble));
- in = (gdouble *) GST_BUFFER_DATA (inbuffer);
+ gst_buffer_map (inbuffer, &map, GST_MAP_WRITE);
+ in = (gdouble *) map.data;
for (i = 0; i < 1024; i += 4) {
in[i] = 0.0;
in[i + 1] = 1.0;
in[i + 2] = 0.0;
in[i + 3] = -1.0;
}
+ gst_buffer_unmap (inbuffer, &map);
caps = gst_caps_from_string (BUFFER_CAPS_STRING_64);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_unless_equals_int (g_list_length (buffers), 1);
fail_if ((outbuffer = (GstBuffer *) buffers->data) == NULL);
- res = (gdouble *) GST_BUFFER_DATA (outbuffer);
+ gst_buffer_map (outbuffer, &map, GST_MAP_READ);
+ res = (gdouble *) map.data;
rms = 0.0;
for (i = 0; i < 1024; i++)
rms = sqrt (rms / 1024.0);
fail_unless (rms >= 0.6);
+ gst_buffer_unmap (outbuffer, &map);
+
/* cleanup */
cleanup_audiochebband (audiochebband);
}
GstCaps *caps;
gdouble *in, *res, rms;
gint i;
+ GstMapInfo map;
audiochebband = setup_audiochebband ();
/* Set to bandpass */
g_object_set (G_OBJECT (audiochebband), "upper-frequency",
44100 / 4.0 + 1000, NULL);
inbuffer = gst_buffer_new_and_alloc (1024 * sizeof (gdouble));
- in = (gdouble *) GST_BUFFER_DATA (inbuffer);
+ gst_buffer_map (inbuffer, &map, GST_MAP_WRITE);
+ in = (gdouble *) map.data;
for (i = 0; i < 1024; i += 2) {
in[i] = 1.0;
in[i + 1] = -1.0;
}
+ gst_buffer_unmap (inbuffer, &map);
caps = gst_caps_from_string (BUFFER_CAPS_STRING_64);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_unless_equals_int (g_list_length (buffers), 1);
fail_if ((outbuffer = (GstBuffer *) buffers->data) == NULL);
- res = (gdouble *) GST_BUFFER_DATA (outbuffer);
+ gst_buffer_map (outbuffer, &map, GST_MAP_READ);
+ res = (gdouble *) map.data;
rms = 0.0;
for (i = 0; i < 1024; i++)
rms = sqrt (rms / 1024.0);
fail_unless (rms <= 0.1);
+ gst_buffer_unmap (outbuffer, &map);
+
/* cleanup */
cleanup_audiochebband (audiochebband);
}
GstCaps *caps;
gdouble *in, *res, rms;
gint i;
+ GstMapInfo map;
audiochebband = setup_audiochebband ();
/* Set to bandreject */
g_object_set (G_OBJECT (audiochebband), "upper-frequency",
44100 / 4.0 + 1000, NULL);
inbuffer = gst_buffer_new_and_alloc (1024 * sizeof (gdouble));
- in = (gdouble *) GST_BUFFER_DATA (inbuffer);
+ gst_buffer_map (inbuffer, &map, GST_MAP_WRITE);
+ in = (gdouble *) map.data;
for (i = 0; i < 1024; i++)
in[i] = 1.0;
+ gst_buffer_unmap (inbuffer, &map);
caps = gst_caps_from_string (BUFFER_CAPS_STRING_64);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_unless_equals_int (g_list_length (buffers), 1);
fail_if ((outbuffer = (GstBuffer *) buffers->data) == NULL);
- res = (gdouble *) GST_BUFFER_DATA (outbuffer);
+ gst_buffer_map (outbuffer, &map, GST_MAP_READ);
+ res = (gdouble *) map.data;
rms = 0.0;
for (i = 0; i < 1024; i++)
rms = sqrt (rms / 1024.0);
fail_unless (rms >= 0.9);
+ gst_buffer_unmap (outbuffer, &map);
+
/* cleanup */
cleanup_audiochebband (audiochebband);
}
GstCaps *caps;
gdouble *in, *res, rms;
gint i;
+ GstMapInfo map;
audiochebband = setup_audiochebband ();
/* Set to bandreject */
g_object_set (G_OBJECT (audiochebband), "upper-frequency",
44100 / 4.0 + 1000, NULL);
inbuffer = gst_buffer_new_and_alloc (1024 * sizeof (gdouble));
- in = (gdouble *) GST_BUFFER_DATA (inbuffer);
+ gst_buffer_map (inbuffer, &map, GST_MAP_WRITE);
+ in = (gdouble *) map.data;
for (i = 0; i < 1024; i += 4) {
in[i] = 0.0;
in[i + 1] = 1.0;
in[i + 2] = 0.0;
in[i + 3] = -1.0;
}
+ gst_buffer_unmap (inbuffer, &map);
caps = gst_caps_from_string (BUFFER_CAPS_STRING_64);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_unless_equals_int (g_list_length (buffers), 1);
fail_if ((outbuffer = (GstBuffer *) buffers->data) == NULL);
- res = (gdouble *) GST_BUFFER_DATA (outbuffer);
+ gst_buffer_map (outbuffer, &map, GST_MAP_READ);
+ res = (gdouble *) map.data;
rms = 0.0;
for (i = 0; i < 1024; i++)
rms = sqrt (rms / 1024.0);
fail_unless (rms <= 0.1);
+ gst_buffer_unmap (outbuffer, &map);
+
/* cleanup */
cleanup_audiochebband (audiochebband);
}
GstCaps *caps;
gdouble *in, *res, rms;
gint i;
+ GstMapInfo map;
audiochebband = setup_audiochebband ();
/* Set to bandreject */
g_object_set (G_OBJECT (audiochebband), "upper-frequency",
44100 / 4.0 + 1000, NULL);
inbuffer = gst_buffer_new_and_alloc (1024 * sizeof (gdouble));
- in = (gdouble *) GST_BUFFER_DATA (inbuffer);
+ gst_buffer_map (inbuffer, &map, GST_MAP_WRITE);
+ in = (gdouble *) map.data;
for (i = 0; i < 1024; i += 2) {
in[i] = 1.0;
in[i + 1] = -1.0;
}
+ gst_buffer_unmap (inbuffer, &map);
caps = gst_caps_from_string (BUFFER_CAPS_STRING_64);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_unless_equals_int (g_list_length (buffers), 1);
fail_if ((outbuffer = (GstBuffer *) buffers->data) == NULL);
- res = (gdouble *) GST_BUFFER_DATA (outbuffer);
+ gst_buffer_map (outbuffer, &map, GST_MAP_READ);
+ res = (gdouble *) map.data;
rms = 0.0;
for (i = 0; i < 1024; i++)
rms = sqrt (rms / 1024.0);
fail_unless (rms >= 0.9);
+ gst_buffer_unmap (outbuffer, &map);
+
/* cleanup */
cleanup_audiochebband (audiochebband);
}
*/
#include <gst/gst.h>
+#include <gst/audio/audio.h>
#include <gst/base/gstbasetransform.h>
#include <gst/check/gstcheck.h>
GstPad *mysrcpad, *mysinkpad;
#define BUFFER_CAPS_STRING_32 \
- "audio/x-raw-float, " \
+ "audio/x-raw, " \
"channels = (int) 1, " \
"rate = (int) 44100, " \
- "endianness = (int) BYTE_ORDER, " \
- "width = (int) 32" \
+ "layout = (string) interleaved, " \
+ "format = (string) " GST_AUDIO_NE(F32)
#define BUFFER_CAPS_STRING_64 \
- "audio/x-raw-float, " \
+ "audio/x-raw, " \
"channels = (int) 1, " \
"rate = (int) 44100, " \
- "endianness = (int) BYTE_ORDER, " \
- "width = (int) 64" \
+ "layout = (string) interleaved, " \
+ "format = (string) " GST_AUDIO_NE(F64)
static GstStaticPadTemplate sinktemplate = GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("audio/x-raw-float, "
+ GST_STATIC_CAPS ("audio/x-raw, "
"channels = (int) 1, "
"rate = (int) 44100, "
- "endianness = (int) BYTE_ORDER, " "width = (int) { 32, 64 }")
- );
+ "layout = (string) interleaved, "
+ "format = (string) { "
+ GST_AUDIO_NE (F32) ", " GST_AUDIO_NE (F64) " }"));
static GstStaticPadTemplate srctemplate = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("audio/x-raw-float, "
+ GST_STATIC_CAPS ("audio/x-raw, "
"channels = (int) 1, "
"rate = (int) 44100, "
- "endianness = (int) BYTE_ORDER, " "width = (int) { 32, 64 }")
- );
+ "layout = (string) interleaved, "
+ "format = (string) { "
+ GST_AUDIO_NE (F32) ", " GST_AUDIO_NE (F64) " }"));
static GstElement *
setup_audiocheblimit (void)
GST_DEBUG ("setup_audiocheblimit");
audiocheblimit = gst_check_setup_element ("audiocheblimit");
- mysrcpad = gst_check_setup_src_pad (audiocheblimit, &srctemplate, NULL);
- mysinkpad = gst_check_setup_sink_pad (audiocheblimit, &sinktemplate, NULL);
+ mysrcpad = gst_check_setup_src_pad (audiocheblimit, &srctemplate);
+ mysinkpad = gst_check_setup_sink_pad (audiocheblimit, &sinktemplate);
gst_pad_set_active (mysrcpad, TRUE);
gst_pad_set_active (mysinkpad, TRUE);
GstCaps *caps;
gfloat *in, *res, rms;
gint i;
+ GstMapInfo map;
audiocheblimit = setup_audiocheblimit ();
/* Set to lowpass */
"could not set to playing");
g_object_set (G_OBJECT (audiocheblimit), "cutoff", 44100 / 4.0, NULL);
- inbuffer = gst_buffer_new_and_alloc (128 * sizeof (gfloat));
- in = (gfloat *) GST_BUFFER_DATA (inbuffer);
+ inbuffer = gst_buffer_new_allocate (NULL, 128 * sizeof (gfloat), 0);
+ gst_buffer_map (inbuffer, &map, GST_MAP_WRITE);
+ in = (gfloat *) map.data;
for (i = 0; i < 128; i++)
in[i] = 1.0;
+ gst_buffer_unmap (inbuffer, &map);
caps = gst_caps_from_string (BUFFER_CAPS_STRING_32);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_unless_equals_int (g_list_length (buffers), 1);
fail_if ((outbuffer = (GstBuffer *) buffers->data) == NULL);
- res = (gfloat *) GST_BUFFER_DATA (outbuffer);
+ gst_buffer_map (outbuffer, &map, GST_MAP_READ);
+ res = (gfloat *) map.data;
rms = 0.0;
for (i = 0; i < 128; i++)
rms = sqrt (rms / 128.0);
fail_unless (rms >= 0.9);
+ gst_buffer_unmap (outbuffer, &map);
+
/* cleanup */
cleanup_audiocheblimit (audiocheblimit);
}
GstCaps *caps;
gfloat *in, *res, rms;
gint i;
+ GstMapInfo map;
audiocheblimit = setup_audiocheblimit ();
/* Set to lowpass */
"could not set to playing");
g_object_set (G_OBJECT (audiocheblimit), "cutoff", 44100 / 4.0, NULL);
- inbuffer = gst_buffer_new_and_alloc (128 * sizeof (gfloat));
- in = (gfloat *) GST_BUFFER_DATA (inbuffer);
+ inbuffer = gst_buffer_new_allocate (NULL, 128 * sizeof (gfloat), 0);
+ gst_buffer_map (inbuffer, &map, GST_MAP_WRITE);
+ in = (gfloat *) map.data;
for (i = 0; i < 128; i += 2) {
in[i] = 1.0;
in[i + 1] = -1.0;
}
+ gst_buffer_unmap (inbuffer, &map);
caps = gst_caps_from_string (BUFFER_CAPS_STRING_32);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_unless_equals_int (g_list_length (buffers), 1);
fail_if ((outbuffer = (GstBuffer *) buffers->data) == NULL);
- res = (gfloat *) GST_BUFFER_DATA (outbuffer);
+ gst_buffer_map (outbuffer, &map, GST_MAP_READ);
+ res = (gfloat *) map.data;
rms = 0.0;
for (i = 0; i < 128; i++)
rms = sqrt (rms / 128.0);
fail_unless (rms <= 0.1);
+ gst_buffer_unmap (outbuffer, &map);
+
/* cleanup */
cleanup_audiocheblimit (audiocheblimit);
}
GstCaps *caps;
gfloat *in, *res, rms;
gint i;
+ GstMapInfo map;
audiocheblimit = setup_audiocheblimit ();
/* Set to highpass */
"could not set to playing");
g_object_set (G_OBJECT (audiocheblimit), "cutoff", 44100 / 4.0, NULL);
- inbuffer = gst_buffer_new_and_alloc (128 * sizeof (gfloat));
- in = (gfloat *) GST_BUFFER_DATA (inbuffer);
+ inbuffer = gst_buffer_new_allocate (NULL, 128 * sizeof (gfloat), 0);
+ gst_buffer_map (inbuffer, &map, GST_MAP_WRITE);
+ in = (gfloat *) map.data;
for (i = 0; i < 128; i++)
in[i] = 1.0;
+ gst_buffer_unmap (inbuffer, &map);
caps = gst_caps_from_string (BUFFER_CAPS_STRING_32);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_unless_equals_int (g_list_length (buffers), 1);
fail_if ((outbuffer = (GstBuffer *) buffers->data) == NULL);
- res = (gfloat *) GST_BUFFER_DATA (outbuffer);
+ gst_buffer_map (outbuffer, &map, GST_MAP_READ);
+ res = (gfloat *) map.data;
rms = 0.0;
for (i = 0; i < 128; i++)
rms = sqrt (rms / 128.0);
fail_unless (rms <= 0.1);
+ gst_buffer_unmap (outbuffer, &map);
+
/* cleanup */
cleanup_audiocheblimit (audiocheblimit);
}
GstCaps *caps;
gfloat *in, *res, rms;
gint i;
+ GstMapInfo map;
audiocheblimit = setup_audiocheblimit ();
/* Set to highpass */
"could not set to playing");
g_object_set (G_OBJECT (audiocheblimit), "cutoff", 44100 / 4.0, NULL);
- inbuffer = gst_buffer_new_and_alloc (128 * sizeof (gfloat));
- in = (gfloat *) GST_BUFFER_DATA (inbuffer);
+ inbuffer = gst_buffer_new_allocate (NULL, 128 * sizeof (gfloat), 0);
+ gst_buffer_map (inbuffer, &map, GST_MAP_WRITE);
+ in = (gfloat *) map.data;
for (i = 0; i < 128; i += 2) {
in[i] = 1.0;
in[i + 1] = -1.0;
}
+ gst_buffer_unmap (inbuffer, &map);
caps = gst_caps_from_string (BUFFER_CAPS_STRING_32);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_unless_equals_int (g_list_length (buffers), 1);
fail_if ((outbuffer = (GstBuffer *) buffers->data) == NULL);
- res = (gfloat *) GST_BUFFER_DATA (outbuffer);
+ gst_buffer_map (outbuffer, &map, GST_MAP_READ);
+ res = (gfloat *) map.data;
rms = 0.0;
for (i = 0; i < 128; i++)
rms = sqrt (rms / 128.0);
fail_unless (rms >= 0.9);
+ gst_buffer_unmap (outbuffer, &map);
+
/* cleanup */
cleanup_audiocheblimit (audiocheblimit);
}
GstCaps *caps;
gdouble *in, *res, rms;
gint i;
+ GstMapInfo map;
audiocheblimit = setup_audiocheblimit ();
/* Set to lowpass */
"could not set to playing");
g_object_set (G_OBJECT (audiocheblimit), "cutoff", 44100 / 4.0, NULL);
- inbuffer = gst_buffer_new_and_alloc (128 * sizeof (gdouble));
- in = (gdouble *) GST_BUFFER_DATA (inbuffer);
+ inbuffer = gst_buffer_new_allocate (NULL, 128 * sizeof (gdouble), 0);
+ gst_buffer_map (inbuffer, &map, GST_MAP_WRITE);
+ in = (gdouble *) map.data;
for (i = 0; i < 128; i++)
in[i] = 1.0;
+ gst_buffer_unmap (inbuffer, &map);
caps = gst_caps_from_string (BUFFER_CAPS_STRING_64);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_unless_equals_int (g_list_length (buffers), 1);
fail_if ((outbuffer = (GstBuffer *) buffers->data) == NULL);
- res = (gdouble *) GST_BUFFER_DATA (outbuffer);
+ gst_buffer_map (outbuffer, &map, GST_MAP_READ);
+ res = (gdouble *) map.data;
rms = 0.0;
for (i = 0; i < 128; i++)
rms = sqrt (rms / 128.0);
fail_unless (rms >= 0.9);
+ gst_buffer_unmap (outbuffer, &map);
+
/* cleanup */
cleanup_audiocheblimit (audiocheblimit);
}
GstCaps *caps;
gdouble *in, *res, rms;
gint i;
+ GstMapInfo map;
audiocheblimit = setup_audiocheblimit ();
/* Set to lowpass */
"could not set to playing");
g_object_set (G_OBJECT (audiocheblimit), "cutoff", 44100 / 4.0, NULL);
- inbuffer = gst_buffer_new_and_alloc (128 * sizeof (gdouble));
- in = (gdouble *) GST_BUFFER_DATA (inbuffer);
+ inbuffer = gst_buffer_new_allocate (NULL, 128 * sizeof (gdouble), 0);
+ gst_buffer_map (inbuffer, &map, GST_MAP_WRITE);
+ in = (gdouble *) map.data;
for (i = 0; i < 128; i += 2) {
in[i] = 1.0;
in[i + 1] = -1.0;
}
+ gst_buffer_unmap (inbuffer, &map);
caps = gst_caps_from_string (BUFFER_CAPS_STRING_64);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_unless_equals_int (g_list_length (buffers), 1);
fail_if ((outbuffer = (GstBuffer *) buffers->data) == NULL);
- res = (gdouble *) GST_BUFFER_DATA (outbuffer);
+ gst_buffer_map (outbuffer, &map, GST_MAP_READ);
+ res = (gdouble *) map.data;
rms = 0.0;
for (i = 0; i < 128; i++)
rms = sqrt (rms / 128.0);
fail_unless (rms <= 0.1);
+ gst_buffer_unmap (outbuffer, &map);
+
/* cleanup */
cleanup_audiocheblimit (audiocheblimit);
}
GstCaps *caps;
gdouble *in, *res, rms;
gint i;
+ GstMapInfo map;
audiocheblimit = setup_audiocheblimit ();
/* Set to highpass */
"could not set to playing");
g_object_set (G_OBJECT (audiocheblimit), "cutoff", 44100 / 4.0, NULL);
- inbuffer = gst_buffer_new_and_alloc (128 * sizeof (gdouble));
- in = (gdouble *) GST_BUFFER_DATA (inbuffer);
+ inbuffer = gst_buffer_new_allocate (NULL, 128 * sizeof (gdouble), 0);
+ gst_buffer_map (inbuffer, &map, GST_MAP_WRITE);
+ in = (gdouble *) map.data;
for (i = 0; i < 128; i++)
in[i] = 1.0;
+ gst_buffer_unmap (inbuffer, &map);
caps = gst_caps_from_string (BUFFER_CAPS_STRING_64);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_unless_equals_int (g_list_length (buffers), 1);
fail_if ((outbuffer = (GstBuffer *) buffers->data) == NULL);
- res = (gdouble *) GST_BUFFER_DATA (outbuffer);
+ gst_buffer_map (outbuffer, &map, GST_MAP_READ);
+ res = (gdouble *) map.data;
rms = 0.0;
for (i = 0; i < 128; i++)
rms = sqrt (rms / 128.0);
fail_unless (rms <= 0.1);
+ gst_buffer_unmap (outbuffer, &map);
+
/* cleanup */
cleanup_audiocheblimit (audiocheblimit);
}
GstCaps *caps;
gdouble *in, *res, rms;
gint i;
+ GstMapInfo map;
audiocheblimit = setup_audiocheblimit ();
/* Set to highpass */
"could not set to playing");
g_object_set (G_OBJECT (audiocheblimit), "cutoff", 44100 / 4.0, NULL);
- inbuffer = gst_buffer_new_and_alloc (128 * sizeof (gdouble));
- in = (gdouble *) GST_BUFFER_DATA (inbuffer);
+ inbuffer = gst_buffer_new_allocate (NULL, 128 * sizeof (gdouble), 0);
+ gst_buffer_map (inbuffer, &map, GST_MAP_WRITE);
+ in = (gdouble *) map.data;
for (i = 0; i < 128; i += 2) {
in[i] = 1.0;
in[i + 1] = -1.0;
}
+ gst_buffer_unmap (inbuffer, &map);
caps = gst_caps_from_string (BUFFER_CAPS_STRING_64);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_unless_equals_int (g_list_length (buffers), 1);
fail_if ((outbuffer = (GstBuffer *) buffers->data) == NULL);
- res = (gdouble *) GST_BUFFER_DATA (outbuffer);
+ gst_buffer_map (outbuffer, &map, GST_MAP_READ);
+ res = (gdouble *) map.data;
rms = 0.0;
for (i = 0; i < 128; i++)
rms = sqrt (rms / 128.0);
fail_unless (rms >= 0.9);
+ gst_buffer_unmap (outbuffer, &map);
+
/* cleanup */
cleanup_audiocheblimit (audiocheblimit);
}
GstCaps *caps;
gfloat *in, *res, rms;
gint i;
+ GstMapInfo map;
audiocheblimit = setup_audiocheblimit ();
/* Set to lowpass */
"could not set to playing");
g_object_set (G_OBJECT (audiocheblimit), "cutoff", 44100 / 4.0, NULL);
- inbuffer = gst_buffer_new_and_alloc (128 * sizeof (gfloat));
- in = (gfloat *) GST_BUFFER_DATA (inbuffer);
+ inbuffer = gst_buffer_new_allocate (NULL, 128 * sizeof (gfloat), 0);
+ gst_buffer_map (inbuffer, &map, GST_MAP_WRITE);
+ in = (gfloat *) map.data;
for (i = 0; i < 128; i++)
in[i] = 1.0;
+ gst_buffer_unmap (inbuffer, &map);
caps = gst_caps_from_string (BUFFER_CAPS_STRING_32);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_unless_equals_int (g_list_length (buffers), 1);
fail_if ((outbuffer = (GstBuffer *) buffers->data) == NULL);
- res = (gfloat *) GST_BUFFER_DATA (outbuffer);
+ gst_buffer_map (outbuffer, &map, GST_MAP_READ);
+ res = (gfloat *) map.data;
rms = 0.0;
for (i = 0; i < 128; i++)
rms = sqrt (rms / 128.0);
fail_unless (rms >= 0.9);
+ gst_buffer_unmap (outbuffer, &map);
+
/* cleanup */
cleanup_audiocheblimit (audiocheblimit);
}
GstCaps *caps;
gfloat *in, *res, rms;
gint i;
+ GstMapInfo map;
audiocheblimit = setup_audiocheblimit ();
/* Set to lowpass */
"could not set to playing");
g_object_set (G_OBJECT (audiocheblimit), "cutoff", 44100 / 4.0, NULL);
- inbuffer = gst_buffer_new_and_alloc (128 * sizeof (gfloat));
- in = (gfloat *) GST_BUFFER_DATA (inbuffer);
+ inbuffer = gst_buffer_new_allocate (NULL, 128 * sizeof (gfloat), 0);
+ gst_buffer_map (inbuffer, &map, GST_MAP_WRITE);
+ in = (gfloat *) map.data;
for (i = 0; i < 128; i += 2) {
in[i] = 1.0;
in[i + 1] = -1.0;
}
+ gst_buffer_unmap (inbuffer, &map);
caps = gst_caps_from_string (BUFFER_CAPS_STRING_32);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_unless_equals_int (g_list_length (buffers), 1);
fail_if ((outbuffer = (GstBuffer *) buffers->data) == NULL);
- res = (gfloat *) GST_BUFFER_DATA (outbuffer);
+ gst_buffer_map (outbuffer, &map, GST_MAP_READ);
+ res = (gfloat *) map.data;
rms = 0.0;
for (i = 0; i < 128; i++)
rms = sqrt (rms / 128.0);
fail_unless (rms <= 0.1);
+ gst_buffer_unmap (outbuffer, &map);
+
/* cleanup */
cleanup_audiocheblimit (audiocheblimit);
}
GstCaps *caps;
gfloat *in, *res, rms;
gint i;
+ GstMapInfo map;
audiocheblimit = setup_audiocheblimit ();
/* Set to highpass */
"could not set to playing");
g_object_set (G_OBJECT (audiocheblimit), "cutoff", 44100 / 4.0, NULL);
- inbuffer = gst_buffer_new_and_alloc (128 * sizeof (gfloat));
- in = (gfloat *) GST_BUFFER_DATA (inbuffer);
+ inbuffer = gst_buffer_new_allocate (NULL, 128 * sizeof (gfloat), 0);
+ gst_buffer_map (inbuffer, &map, GST_MAP_WRITE);
+ in = (gfloat *) map.data;
for (i = 0; i < 128; i++)
in[i] = 1.0;
+ gst_buffer_unmap (inbuffer, &map);
caps = gst_caps_from_string (BUFFER_CAPS_STRING_32);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_unless_equals_int (g_list_length (buffers), 1);
fail_if ((outbuffer = (GstBuffer *) buffers->data) == NULL);
- res = (gfloat *) GST_BUFFER_DATA (outbuffer);
+ gst_buffer_map (outbuffer, &map, GST_MAP_READ);
+ res = (gfloat *) map.data;
rms = 0.0;
for (i = 0; i < 128; i++)
rms = sqrt (rms / 128.0);
fail_unless (rms <= 0.1);
+ gst_buffer_unmap (outbuffer, &map);
+
/* cleanup */
cleanup_audiocheblimit (audiocheblimit);
}
GstCaps *caps;
gfloat *in, *res, rms;
gint i;
+ GstMapInfo map;
audiocheblimit = setup_audiocheblimit ();
/* Set to highpass */
"could not set to playing");
g_object_set (G_OBJECT (audiocheblimit), "cutoff", 44100 / 4.0, NULL);
- inbuffer = gst_buffer_new_and_alloc (128 * sizeof (gfloat));
- in = (gfloat *) GST_BUFFER_DATA (inbuffer);
+ inbuffer = gst_buffer_new_allocate (NULL, 128 * sizeof (gfloat), 0);
+ gst_buffer_map (inbuffer, &map, GST_MAP_WRITE);
+ in = (gfloat *) map.data;
for (i = 0; i < 128; i += 2) {
in[i] = 1.0;
in[i + 1] = -1.0;
}
+ gst_buffer_unmap (inbuffer, &map);
caps = gst_caps_from_string (BUFFER_CAPS_STRING_32);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_unless_equals_int (g_list_length (buffers), 1);
fail_if ((outbuffer = (GstBuffer *) buffers->data) == NULL);
- res = (gfloat *) GST_BUFFER_DATA (outbuffer);
+ gst_buffer_map (outbuffer, &map, GST_MAP_READ);
+ res = (gfloat *) map.data;
rms = 0.0;
for (i = 0; i < 128; i++)
rms = sqrt (rms / 128.0);
fail_unless (rms >= 0.9);
+ gst_buffer_unmap (outbuffer, &map);
+
/* cleanup */
cleanup_audiocheblimit (audiocheblimit);
}
GstCaps *caps;
gdouble *in, *res, rms;
gint i;
+ GstMapInfo map;
audiocheblimit = setup_audiocheblimit ();
/* Set to lowpass */
g_object_set (G_OBJECT (audiocheblimit), "cutoff", 44100 / 4.0, NULL);
inbuffer = gst_buffer_new_and_alloc (128 * sizeof (gdouble));
- in = (gdouble *) GST_BUFFER_DATA (inbuffer);
+ gst_buffer_map (inbuffer, &map, GST_MAP_WRITE);
+ in = (gdouble *) map.data;
for (i = 0; i < 128; i++)
in[i] = 1.0;
+ gst_buffer_unmap (inbuffer, &map);
caps = gst_caps_from_string (BUFFER_CAPS_STRING_64);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_unless_equals_int (g_list_length (buffers), 1);
fail_if ((outbuffer = (GstBuffer *) buffers->data) == NULL);
- res = (gdouble *) GST_BUFFER_DATA (outbuffer);
+ gst_buffer_map (outbuffer, &map, GST_MAP_READ);
+ res = (gdouble *) map.data;
rms = 0.0;
for (i = 0; i < 128; i++)
rms = sqrt (rms / 128.0);
fail_unless (rms >= 0.9);
+ gst_buffer_unmap (outbuffer, &map);
+
/* cleanup */
cleanup_audiocheblimit (audiocheblimit);
}
GstCaps *caps;
gdouble *in, *res, rms;
gint i;
+ GstMapInfo map;
audiocheblimit = setup_audiocheblimit ();
/* Set to lowpass */
g_object_set (G_OBJECT (audiocheblimit), "cutoff", 44100 / 4.0, NULL);
inbuffer = gst_buffer_new_and_alloc (128 * sizeof (gdouble));
- in = (gdouble *) GST_BUFFER_DATA (inbuffer);
+ gst_buffer_map (inbuffer, &map, GST_MAP_WRITE);
+ in = (gdouble *) map.data;
for (i = 0; i < 128; i += 2) {
in[i] = 1.0;
in[i + 1] = -1.0;
}
+ gst_buffer_unmap (inbuffer, &map);
caps = gst_caps_from_string (BUFFER_CAPS_STRING_64);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_unless_equals_int (g_list_length (buffers), 1);
fail_if ((outbuffer = (GstBuffer *) buffers->data) == NULL);
- res = (gdouble *) GST_BUFFER_DATA (outbuffer);
+ gst_buffer_map (outbuffer, &map, GST_MAP_READ);
+ res = (gdouble *) map.data;
rms = 0.0;
for (i = 0; i < 128; i++)
rms = sqrt (rms / 128.0);
fail_unless (rms <= 0.1);
+ gst_buffer_unmap (outbuffer, &map);
+
/* cleanup */
cleanup_audiocheblimit (audiocheblimit);
}
GstCaps *caps;
gdouble *in, *res, rms;
gint i;
+ GstMapInfo map;
audiocheblimit = setup_audiocheblimit ();
/* Set to highpass */
g_object_set (G_OBJECT (audiocheblimit), "cutoff", 44100 / 4.0, NULL);
inbuffer = gst_buffer_new_and_alloc (128 * sizeof (gdouble));
- in = (gdouble *) GST_BUFFER_DATA (inbuffer);
+ gst_buffer_map (inbuffer, &map, GST_MAP_WRITE);
+ in = (gdouble *) map.data;
for (i = 0; i < 128; i++)
in[i] = 1.0;
+ gst_buffer_unmap (inbuffer, &map);
caps = gst_caps_from_string (BUFFER_CAPS_STRING_64);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_unless_equals_int (g_list_length (buffers), 1);
fail_if ((outbuffer = (GstBuffer *) buffers->data) == NULL);
- res = (gdouble *) GST_BUFFER_DATA (outbuffer);
+ gst_buffer_map (outbuffer, &map, GST_MAP_READ);
+ res = (gdouble *) map.data;
rms = 0.0;
for (i = 0; i < 128; i++)
rms = sqrt (rms / 128.0);
fail_unless (rms <= 0.1);
+ gst_buffer_unmap (outbuffer, &map);
+
/* cleanup */
cleanup_audiocheblimit (audiocheblimit);
}
GstCaps *caps;
gdouble *in, *res, rms;
gint i;
+ GstMapInfo map;
audiocheblimit = setup_audiocheblimit ();
/* Set to highpass */
g_object_set (G_OBJECT (audiocheblimit), "cutoff", 44100 / 4.0, NULL);
inbuffer = gst_buffer_new_and_alloc (128 * sizeof (gdouble));
- in = (gdouble *) GST_BUFFER_DATA (inbuffer);
+ gst_buffer_map (inbuffer, &map, GST_MAP_WRITE);
+ in = (gdouble *) map.data;
for (i = 0; i < 128; i += 2) {
in[i] = 1.0;
in[i + 1] = -1.0;
}
+ gst_buffer_unmap (inbuffer, &map);
caps = gst_caps_from_string (BUFFER_CAPS_STRING_64);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_unless_equals_int (g_list_length (buffers), 1);
fail_if ((outbuffer = (GstBuffer *) buffers->data) == NULL);
- res = (gdouble *) GST_BUFFER_DATA (outbuffer);
+ gst_buffer_map (outbuffer, &map, GST_MAP_READ);
+ res = (gdouble *) map.data;
rms = 0.0;
for (i = 0; i < 128; i++)
rms = sqrt (rms / 128.0);
fail_unless (rms >= 0.9);
+ gst_buffer_unmap (outbuffer, &map);
+
/* cleanup */
cleanup_audiocheblimit (audiocheblimit);
}
#include <unistd.h>
+#include <gst/audio/audio.h>
#include <gst/base/gstbasetransform.h>
#include <gst/check/gstcheck.h>
#define DYNAMIC_CAPS_STRING \
- "audio/x-raw-int, " \
+ "audio/x-raw, " \
"channels = (int) 1, " \
"rate = (int) 44100, " \
- "endianness = (int) BYTE_ORDER, " \
- "width = (int) 16, " \
- "depth = (int) 16, " \
- "signed = (bool) TRUE"
+ "layout = (string) interleaved, " \
+ "format = (string) " GST_AUDIO_NE(S16)
static GstStaticPadTemplate sinktemplate = GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("audio/x-raw-int, "
+ GST_STATIC_CAPS ("audio/x-raw, "
"channels = (int) 1, "
"rate = (int) [ 1, MAX ], "
- "endianness = (int) BYTE_ORDER, "
- "width = (int) 16, " "depth = (int) 16, " "signed = (bool) TRUE")
- );
+ "layout = (string) interleaved, "
+ "format = (string)" GST_AUDIO_NE (S16)));
static GstStaticPadTemplate srctemplate = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("audio/x-raw-int, "
+ GST_STATIC_CAPS ("audio/x-raw, "
"channels = (int) 1, "
"rate = (int) [ 1, MAX ], "
- "endianness = (int) BYTE_ORDER, "
- "width = (int) 16, " "depth = (int) 16, " "signed = (bool) TRUE")
- );
+ "layout = (string) interleaved, "
+ "format = (string) " GST_AUDIO_NE (S16)));
static GstElement *
setup_dynamic (void)
GST_DEBUG ("setup_dynamic");
dynamic = gst_check_setup_element ("audiodynamic");
- mysrcpad = gst_check_setup_src_pad (dynamic, &srctemplate, NULL);
- mysinkpad = gst_check_setup_sink_pad (dynamic, &sinktemplate, NULL);
+ mysrcpad = gst_check_setup_src_pad (dynamic, &srctemplate);
+ mysinkpad = gst_check_setup_sink_pad (dynamic, &sinktemplate);
gst_pad_set_active (mysrcpad, TRUE);
gst_pad_set_active (mysinkpad, TRUE);
GstBuffer *inbuffer, *outbuffer;
GstCaps *caps;
gint16 in[6] = { 24576, -16384, 256, -128, 0, -24576 };
- gint16 *res;
+ gint16 res[6];
dynamic = setup_dynamic ();
fail_unless (gst_element_set_state (dynamic,
GST_STATE_PLAYING) == GST_STATE_CHANGE_SUCCESS,
"could not set to playing");
- inbuffer = gst_buffer_new_and_alloc (12);
- memcpy (GST_BUFFER_DATA (inbuffer), in, 12);
- fail_unless (memcmp (GST_BUFFER_DATA (inbuffer), in, 12) == 0);
+ inbuffer = gst_buffer_new_wrapped_full (in, NULL, 0, 12);
+ fail_unless (gst_buffer_memcmp (inbuffer, 0, in, 12) == 0);
caps = gst_caps_from_string (DYNAMIC_CAPS_STRING);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_unless_equals_int (g_list_length (buffers), 1);
fail_if ((outbuffer = (GstBuffer *) buffers->data) == NULL);
- res = (gint16 *) GST_BUFFER_DATA (outbuffer);
+ fail_unless (gst_buffer_extract (outbuffer, 0, res, 12) == 12);
GST_INFO
("expected %+5d %+5d %+5d %+5d %+5d %+5d real %+5d %+5d %+5d %+5d %+5d %+5d",
in[0], in[1], in[2], in[3], in[4], in[5], res[0], res[1], res[2], res[3],
res[4], res[5]);
- fail_unless (memcmp (GST_BUFFER_DATA (outbuffer), in, 12) == 0);
+ fail_unless (gst_buffer_memcmp (outbuffer, 0, in, 12) == 0);
/* cleanup */
cleanup_dynamic (dynamic);
GstBuffer *inbuffer, *outbuffer;
GstCaps *caps;
gint16 in[8] = { -30000, 24576, -16384, 256, -128, 0, -24576, 30000 };
- gint16 *res;
+ gint16 res[8];
dynamic = setup_dynamic ();
g_object_set (G_OBJECT (dynamic), "mode", 0, NULL);
GST_STATE_PLAYING) == GST_STATE_CHANGE_SUCCESS,
"could not set to playing");
- inbuffer = gst_buffer_new_and_alloc (16);
- memcpy (GST_BUFFER_DATA (inbuffer), in, 16);
- fail_unless (memcmp (GST_BUFFER_DATA (inbuffer), in, 16) == 0);
+ inbuffer = gst_buffer_new_wrapped_full (in, NULL, 0, 16);
+ fail_unless (gst_buffer_memcmp (inbuffer, 0, in, 16) == 0);
caps = gst_caps_from_string (DYNAMIC_CAPS_STRING);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_unless_equals_int (g_list_length (buffers), 1);
fail_if ((outbuffer = (GstBuffer *) buffers->data) == NULL);
- res = (gint16 *) GST_BUFFER_DATA (outbuffer);
+ fail_unless (gst_buffer_extract (outbuffer, 0, res, 16) == 16);
fail_unless (res[0] > in[0]);
fail_unless (res[1] < in[1]);
GstBuffer *inbuffer, *outbuffer;
GstCaps *caps;
gint16 in[8] = { -30000, 24576, -16384, 256, -128, 0, -24576, 30000 };
- gint16 *res;
+ gint16 res[8];
dynamic = setup_dynamic ();
g_object_set (G_OBJECT (dynamic), "mode", 0, NULL);
GST_STATE_PLAYING) == GST_STATE_CHANGE_SUCCESS,
"could not set to playing");
- inbuffer = gst_buffer_new_and_alloc (16);
- memcpy (GST_BUFFER_DATA (inbuffer), in, 16);
- fail_unless (memcmp (GST_BUFFER_DATA (inbuffer), in, 16) == 0);
+ inbuffer = gst_buffer_new_wrapped_full (in, NULL, 0, 16);
+ fail_unless (gst_buffer_memcmp (inbuffer, 0, in, 16) == 0);
caps = gst_caps_from_string (DYNAMIC_CAPS_STRING);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_unless_equals_int (g_list_length (buffers), 1);
fail_if ((outbuffer = (GstBuffer *) buffers->data) == NULL);
- res = (gint16 *) GST_BUFFER_DATA (outbuffer);
+ fail_unless (gst_buffer_extract (outbuffer, 0, res, 16) == 16);
fail_unless (res[0] > in[0]);
fail_unless (res[1] < in[1]);
GstBuffer *inbuffer, *outbuffer;
GstCaps *caps;
gint16 in[8] = { -30000, 24576, -16384, 256, -128, 0, -24576, 30000 };
- gint16 *res;
+ gint16 res[8];
dynamic = setup_dynamic ();
g_object_set (G_OBJECT (dynamic), "mode", 0, NULL);
GST_STATE_PLAYING) == GST_STATE_CHANGE_SUCCESS,
"could not set to playing");
- inbuffer = gst_buffer_new_and_alloc (16);
- memcpy (GST_BUFFER_DATA (inbuffer), in, 16);
- fail_unless (memcmp (GST_BUFFER_DATA (inbuffer), in, 16) == 0);
+ inbuffer = gst_buffer_new_wrapped_full (in, NULL, 0, 16);
+ fail_unless (gst_buffer_memcmp (inbuffer, 0, in, 16) == 0);
caps = gst_caps_from_string (DYNAMIC_CAPS_STRING);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_unless_equals_int (g_list_length (buffers), 1);
fail_if ((outbuffer = (GstBuffer *) buffers->data) == NULL);
- res = (gint16 *) GST_BUFFER_DATA (outbuffer);
+ fail_unless (gst_buffer_extract (outbuffer, 0, res, 16) == 16);
fail_unless (res[0] == in[0]);
fail_unless (res[1] == in[1]);
GstBuffer *inbuffer, *outbuffer;
GstCaps *caps;
gint16 in[8] = { -30000, 24576, -16383, 256, -128, 0, -24576, 30000 };
- gint16 *res;
+ gint16 res[8];
dynamic = setup_dynamic ();
g_object_set (G_OBJECT (dynamic), "mode", 1, NULL);
GST_STATE_PLAYING) == GST_STATE_CHANGE_SUCCESS,
"could not set to playing");
- inbuffer = gst_buffer_new_and_alloc (16);
- memcpy (GST_BUFFER_DATA (inbuffer), in, 16);
- fail_unless (memcmp (GST_BUFFER_DATA (inbuffer), in, 16) == 0);
+ inbuffer = gst_buffer_new_wrapped_full (in, NULL, 0, 16);
+ fail_unless (gst_buffer_memcmp (inbuffer, 0, in, 16) == 0);
caps = gst_caps_from_string (DYNAMIC_CAPS_STRING);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_unless_equals_int (g_list_length (buffers), 1);
fail_if ((outbuffer = (GstBuffer *) buffers->data) == NULL);
- res = (gint16 *) GST_BUFFER_DATA (outbuffer);
+ fail_unless (gst_buffer_extract (outbuffer, 0, res, 16) == 16);
fail_unless (res[0] == in[0]);
fail_unless (res[1] == in[1]);
GstBuffer *inbuffer, *outbuffer;
GstCaps *caps;
gint16 in[8] = { -30000, 24576, -16383, 256, -128, 0, -24576, 30000 };
- gint16 *res;
+ gint16 res[8];
dynamic = setup_dynamic ();
g_object_set (G_OBJECT (dynamic), "mode", 1, NULL);
GST_STATE_PLAYING) == GST_STATE_CHANGE_SUCCESS,
"could not set to playing");
- inbuffer = gst_buffer_new_and_alloc (16);
- memcpy (GST_BUFFER_DATA (inbuffer), in, 16);
- fail_unless (memcmp (GST_BUFFER_DATA (inbuffer), in, 16) == 0);
+ inbuffer = gst_buffer_new_wrapped_full (in, NULL, 0, 16);
+ fail_unless (gst_buffer_memcmp (inbuffer, 0, in, 16) == 0);
caps = gst_caps_from_string (DYNAMIC_CAPS_STRING);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_unless_equals_int (g_list_length (buffers), 1);
fail_if ((outbuffer = (GstBuffer *) buffers->data) == NULL);
- res = (gint16 *) GST_BUFFER_DATA (outbuffer);
+ fail_unless (gst_buffer_extract (outbuffer, 0, res, 16) == 16);
fail_unless (res[0] == in[0]);
fail_unless (res[1] == in[1]);
GstBuffer *inbuffer, *outbuffer;
GstCaps *caps;
gint16 in[8] = { -30000, 24576, -16383, 256, -128, 0, -24576, 30000 };
- gint16 *res;
+ gint16 res[8];
dynamic = setup_dynamic ();
g_object_set (G_OBJECT (dynamic), "mode", 1, NULL);
GST_STATE_PLAYING) == GST_STATE_CHANGE_SUCCESS,
"could not set to playing");
- inbuffer = gst_buffer_new_and_alloc (16);
- memcpy (GST_BUFFER_DATA (inbuffer), in, 16);
- fail_unless (memcmp (GST_BUFFER_DATA (inbuffer), in, 16) == 0);
+ inbuffer = gst_buffer_new_wrapped_full (in, NULL, 0, 16);
+ fail_unless (gst_buffer_memcmp (inbuffer, 0, in, 16) == 0);
caps = gst_caps_from_string (DYNAMIC_CAPS_STRING);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_unless_equals_int (g_list_length (buffers), 1);
fail_if ((outbuffer = (GstBuffer *) buffers->data) == NULL);
- res = (gint16 *) GST_BUFFER_DATA (outbuffer);
+ fail_unless (gst_buffer_extract (outbuffer, 0, res, 16) == 16);
fail_unless (res[0] == in[0]);
fail_unless (res[1] == in[1]);
*/
#include <gst/check/gstcheck.h>
+#include <gst/audio/audio.h>
gboolean have_eos = FALSE;
GstPad *mysrcpad, *mysinkpad;
#define ECHO_CAPS_STRING \
- "audio/x-raw-float, " \
+ "audio/x-raw, " \
"channels = (int) 2, " \
+ "channel-mask = (bitmask) 3, " \
"rate = (int) 100000, " \
- "endianness = (int) BYTE_ORDER, " \
- "width = (int) 64"
+ "layout = (string) interleaved, " \
+ "format = (string) " GST_AUDIO_NE(F64)
static GstStaticPadTemplate sinktemplate = GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("audio/x-raw-float, "
+ GST_STATIC_CAPS ("audio/x-raw, "
"channels = (int) [ 1, 2 ], "
"rate = (int) [ 1, MAX ], "
- "endianness = (int) BYTE_ORDER, " "width = (int) { 32, 64 }")
- );
+ "format = (string) { "
+ GST_AUDIO_NE (F32) ", " GST_AUDIO_NE (F64) " }"));
static GstStaticPadTemplate srctemplate = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("audio/x-raw-float, "
+ GST_STATIC_CAPS ("audio/x-raw, "
"channels = (int) [ 1, 2 ], "
"rate = (int) [ 1, MAX ], "
- "endianness = (int) BYTE_ORDER, " "width = (int) { 32, 64 }")
- );
+ "format = (string) { "
+ GST_AUDIO_NE (F32) ", " GST_AUDIO_NE (F64) " }"));
static GstElement *
setup_echo (void)
GST_DEBUG ("setup_echo");
echo = gst_check_setup_element ("audioecho");
- mysrcpad = gst_check_setup_src_pad (echo, &srctemplate, NULL);
- mysinkpad = gst_check_setup_sink_pad (echo, &sinktemplate, NULL);
+ mysrcpad = gst_check_setup_src_pad (echo, &srctemplate);
+ mysinkpad = gst_check_setup_sink_pad (echo, &sinktemplate);
gst_pad_set_active (mysrcpad, TRUE);
gst_pad_set_active (mysinkpad, TRUE);
GstBuffer *inbuffer, *outbuffer;
GstCaps *caps;
gdouble in[] = { 1.0, -1.0, 0.0, 0.5, -0.5, 0.0 };
- gdouble *res;
+ gdouble res[6];
echo = setup_echo ();
g_object_set (G_OBJECT (echo), "delay", (GstClockTime) 1, "intensity", 0.0,
GST_STATE_PLAYING) == GST_STATE_CHANGE_SUCCESS,
"could not set to playing");
- inbuffer = gst_buffer_new_and_alloc (sizeof (in));
- memcpy (GST_BUFFER_DATA (inbuffer), in, sizeof (in));
- fail_unless (memcmp (GST_BUFFER_DATA (inbuffer), in, sizeof (in)) == 0);
caps = gst_caps_from_string (ECHO_CAPS_STRING);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
+
+ inbuffer = gst_buffer_new_wrapped_full (in, NULL, 0, sizeof (in));
+ fail_unless (gst_buffer_memcmp (inbuffer, 0, in, sizeof (in)) == 0);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
/* pushing gives away my reference ... */
fail_unless_equals_int (g_list_length (buffers), 1);
fail_if ((outbuffer = (GstBuffer *) buffers->data) == NULL);
- res = (gdouble *) GST_BUFFER_DATA (outbuffer);
+ fail_unless (gst_buffer_extract (outbuffer, 0, res,
+ sizeof (res)) == sizeof (res));
GST_INFO
("expected %+lf %+lf %+lf %+lf %+lf %+lf real %+lf %+lf %+lf %+lf %+lf %+lf",
in[0], in[1], in[2], in[3], in[4], in[5], res[0], res[1], res[2], res[3],
res[4], res[5]);
- fail_unless (memcmp (GST_BUFFER_DATA (outbuffer), in, sizeof (in)) == 0);
+ fail_unless (gst_buffer_memcmp (outbuffer, 0, in, sizeof (in)) == 0);
/* cleanup */
cleanup_echo (echo);
GstCaps *caps;
gdouble in[] = { 1.0, -1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, };
gdouble out[] = { 1.0, -1.0, 0.0, 0.0, 1.0, -1.0, 0.0, 0.0, 0.0, 0.0 };
- gdouble *res;
+ gdouble res[10];
echo = setup_echo ();
g_object_set (G_OBJECT (echo), "delay", (GstClockTime) 20000, "intensity",
GST_STATE_PLAYING) == GST_STATE_CHANGE_SUCCESS,
"could not set to playing");
- inbuffer = gst_buffer_new_and_alloc (sizeof (in));
- memcpy (GST_BUFFER_DATA (inbuffer), in, sizeof (in));
- fail_unless (memcmp (GST_BUFFER_DATA (inbuffer), in, sizeof (in)) == 0);
caps = gst_caps_from_string (ECHO_CAPS_STRING);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
+
+ inbuffer = gst_buffer_new_wrapped_full (in, NULL, 0, sizeof (in));
+ fail_unless (gst_buffer_memcmp (inbuffer, 0, in, sizeof (in)) == 0);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
/* pushing gives away my reference ... */
fail_unless_equals_int (g_list_length (buffers), 1);
fail_if ((outbuffer = (GstBuffer *) buffers->data) == NULL);
- res = (gdouble *) GST_BUFFER_DATA (outbuffer);
+ fail_unless (gst_buffer_extract (outbuffer, 0, res,
+ sizeof (res)) == sizeof (res));
GST_INFO
("expected %+lf %+lf %+lf %+lf %+lf %+lf %+lf %+lf %+lf %+lf real %+lf %+lf %+lf %+lf %+lf %+lf %+lf %+lf %+lf %+lf",
out[0], out[1], out[2], out[3], out[4], out[5], out[6], out[7], out[8],
out[9], res[0], res[1], res[2], res[3], res[4], res[5], res[6], res[7],
res[8], res[9]);
- fail_unless (memcmp (GST_BUFFER_DATA (outbuffer), out, sizeof (out)) == 0);
+ fail_unless (gst_buffer_memcmp (outbuffer, 0, out, sizeof (out)) == 0);
/* cleanup */
cleanup_echo (echo);
GstCaps *caps;
gdouble in[] = { 1.0, -1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, };
gdouble out[] = { 1.0, -1.0, 0.0, 0.0, 1.0, -1.0, 0.0, 0.0, 1.0, -1.0 };
- gdouble *res;
+ gdouble res[10];
echo = setup_echo ();
g_object_set (G_OBJECT (echo), "delay", (GstClockTime) 20000, "intensity",
GST_STATE_PLAYING) == GST_STATE_CHANGE_SUCCESS,
"could not set to playing");
- inbuffer = gst_buffer_new_and_alloc (sizeof (in));
- memcpy (GST_BUFFER_DATA (inbuffer), in, sizeof (in));
- fail_unless (memcmp (GST_BUFFER_DATA (inbuffer), in, sizeof (in)) == 0);
caps = gst_caps_from_string (ECHO_CAPS_STRING);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
+
+ inbuffer = gst_buffer_new_wrapped_full (in, NULL, 0, sizeof (in));
+ fail_unless (gst_buffer_memcmp (inbuffer, 0, in, sizeof (in)) == 0);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
/* pushing gives away my reference ... */
fail_unless_equals_int (g_list_length (buffers), 1);
fail_if ((outbuffer = (GstBuffer *) buffers->data) == NULL);
- res = (gdouble *) GST_BUFFER_DATA (outbuffer);
+ fail_unless (gst_buffer_extract (outbuffer, 0, res,
+ sizeof (res)) == sizeof (res));
GST_INFO
("expected %+lf %+lf %+lf %+lf %+lf %+lf %+lf %+lf %+lf %+lf real %+lf %+lf %+lf %+lf %+lf %+lf %+lf %+lf %+lf %+lf",
out[0], out[1], out[2], out[3], out[4], out[5], out[6], out[7], out[8],
out[9], res[0], res[1], res[2], res[3], res[4], res[5], res[6], res[7],
res[8], res[9]);
- fail_unless (memcmp (GST_BUFFER_DATA (outbuffer), out, sizeof (out)) == 0);
+ fail_unless (gst_buffer_memcmp (outbuffer, 0, out, sizeof (out)) == 0);
/* cleanup */
cleanup_echo (echo);
* 02110-1301 USA
*/
+/* FIXME 0.11: suppress warnings for deprecated API such as GValueArray
+ * with newer GLib versions (>= 2.31.0) */
+#define GLIB_DISABLE_DEPRECATION_WARNINGS
+
#include <gst/gst.h>
#include <gst/check/gstcheck.h>
gpointer user_data)
{
if (!have_data) {
- gdouble *data = (gdouble *) GST_BUFFER_DATA (buffer);
+ GstMapInfo map;
+ gdouble *data;
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ data = (gdouble *) map.data;
- fail_unless (GST_BUFFER_SIZE (buffer) > 5 * sizeof (gdouble));
+ fail_unless (map.size > 5 * sizeof (gdouble));
fail_unless (data[0] == 0.0);
fail_unless (data[1] == 0.0);
fail_unless (data[2] == 0.0);
fail_unless (data[3] == 0.0);
fail_unless (data[4] == 0.0);
fail_unless (data[5] != 0.0);
+
+ gst_buffer_unmap (buffer, &map);
have_data = TRUE;
}
}
* 02110-1301 USA
*/
+/* FIXME 0.11: suppress warnings for deprecated API such as GValueArray
+ * with newer GLib versions (>= 2.31.0) */
+#define GLIB_DISABLE_DEPRECATION_WARNINGS
+
#include <gst/gst.h>
#include <gst/check/gstcheck.h>
g_value_array_append (va, &v);
g_value_reset (&v);
- g_object_set (G_OBJECT (element), "a", va, NULL);
+ g_object_set (G_OBJECT (element), "b", va, NULL);
g_value_array_free (va);
va = g_value_array_new (6);
- g_value_set_double (&v, 0.0);
+ g_value_set_double (&v, 1.0);
g_value_array_append (va, &v);
g_value_reset (&v);
- g_object_set (G_OBJECT (element), "b", va, NULL);
+ g_object_set (G_OBJECT (element), "a", va, NULL);
g_value_array_free (va);
}
gpointer user_data)
{
if (!have_data) {
- gdouble *data = (gdouble *) GST_BUFFER_DATA (buffer);
+ GstMapInfo map;
+ gfloat *data;
- fail_unless (GST_BUFFER_SIZE (buffer) > 5 * sizeof (gdouble));
+ fail_unless (gst_buffer_map (buffer, &map, GST_MAP_READ));
+ data = (gfloat *) map.data;
+
+ fail_unless (map.size > 5 * sizeof (gdouble));
fail_unless (data[0] == 0.0);
fail_unless (data[1] == 0.0);
fail_unless (data[2] == 0.0);
fail_unless (data[3] == 0.0);
fail_unless (data[4] == 0.0);
fail_unless (data[5] != 0.0);
+
+ gst_buffer_unmap (buffer, &map);
have_data = TRUE;
}
}
#include <unistd.h>
+#include <gst/audio/audio.h>
#include <gst/base/gstbasetransform.h>
#include <gst/check/gstcheck.h>
#define INVERT_CAPS_STRING \
- "audio/x-raw-int, " \
- "channels = (int) 1, " \
- "rate = (int) 44100, " \
- "endianness = (int) BYTE_ORDER, " \
- "width = (int) 16, " \
- "depth = (int) 16, " \
- "signed = (bool) TRUE"
+ "audio/x-raw, " \
+ "format = (string) "GST_AUDIO_NE(S16)", " \
+ "layout = (string) interleaved, " \
+ "channels = (int) 1, " \
+ "rate = (int) 44100"
static GstStaticPadTemplate sinktemplate = GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("audio/x-raw-int, "
- "channels = (int) 1, "
- "rate = (int) [ 1, MAX ], "
- "endianness = (int) BYTE_ORDER, "
- "width = (int) 16, " "depth = (int) 16, " "signed = (bool) TRUE")
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "format = (string) " GST_AUDIO_NE (S16) ", "
+ "layout = (string) interleaved, "
+ "channels = (int) 1, " "rate = (int) [ 1, MAX ]")
);
static GstStaticPadTemplate srctemplate = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("audio/x-raw-int, "
- "channels = (int) 1, "
- "rate = (int) [ 1, MAX ], "
- "endianness = (int) BYTE_ORDER, "
- "width = (int) 16, " "depth = (int) 16, " "signed = (bool) TRUE")
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "format = (string) " GST_AUDIO_NE (S16) ", "
+ "layout = (string) interleaved, "
+ "channels = (int) 1, " "rate = (int) [ 1, MAX ]")
);
static GstElement *
GST_DEBUG ("setup_invert");
invert = gst_check_setup_element ("audioinvert");
- mysrcpad = gst_check_setup_src_pad (invert, &srctemplate, NULL);
- mysinkpad = gst_check_setup_sink_pad (invert, &sinktemplate, NULL);
+ mysrcpad = gst_check_setup_src_pad (invert, &srctemplate);
+ mysinkpad = gst_check_setup_sink_pad (invert, &sinktemplate);
gst_pad_set_active (mysrcpad, TRUE);
gst_pad_set_active (mysinkpad, TRUE);
GstCaps *caps;
gint16 in[4] = { 16384, -256, 128, -512 };
gint16 *res;
+ GstMapInfo map;
invert = setup_invert ();
fail_unless (gst_element_set_state (invert,
"could not set to playing");
inbuffer = gst_buffer_new_and_alloc (8);
- memcpy (GST_BUFFER_DATA (inbuffer), in, 8);
- fail_unless (memcmp (GST_BUFFER_DATA (inbuffer), in, 8) == 0);
+ gst_buffer_fill (inbuffer, 0, in, 8);
+ fail_unless (gst_buffer_memcmp (inbuffer, 0, in, 8) == 0);
caps = gst_caps_from_string (INVERT_CAPS_STRING);
- gst_buffer_set_caps (inbuffer, caps);
+ gst_pad_set_caps (mysrcpad, caps);
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_unless_equals_int (g_list_length (buffers), 1);
fail_if ((outbuffer = (GstBuffer *) buffers->data) == NULL);
- res = (gint16 *) GST_BUFFER_DATA (outbuffer);
+ gst_buffer_map (outbuffer, &map, GST_MAP_READ);
+ res = (gint16 *) map.data;
GST_INFO ("expected %+5d %+5d %+5d %+5d real %+5d %+5d %+5d %+5d",
in[0], in[1], in[2], in[3], res[0], res[1], res[2], res[3]);
- fail_unless (memcmp (GST_BUFFER_DATA (outbuffer), in, 8) == 0);
+ gst_buffer_unmap (outbuffer, &map);
+
+ fail_unless (gst_buffer_memcmp (outbuffer, 0, in, 8) == 0);
/* cleanup */
cleanup_invert (invert);
gint16 in[4] = { 16384, -256, 128, -512 };
gint16 out[4] = { 0, 0, 0, 0 };
gint16 *res;
+ GstMapInfo map;
invert = setup_invert ();
g_object_set (G_OBJECT (invert), "degree", 0.5, NULL);
"could not set to playing");
inbuffer = gst_buffer_new_and_alloc (8);
- memcpy (GST_BUFFER_DATA (inbuffer), in, 8);
- fail_unless (memcmp (GST_BUFFER_DATA (inbuffer), in, 8) == 0);
+ gst_buffer_fill (inbuffer, 0, in, 8);
+ fail_unless (gst_buffer_memcmp (inbuffer, 0, in, 8) == 0);
caps = gst_caps_from_string (INVERT_CAPS_STRING);
- gst_buffer_set_caps (inbuffer, caps);
+ gst_pad_set_caps (mysrcpad, caps);
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_unless_equals_int (g_list_length (buffers), 1);
fail_if ((outbuffer = (GstBuffer *) buffers->data) == NULL);
- res = (gint16 *) GST_BUFFER_DATA (outbuffer);
+ gst_buffer_map (outbuffer, &map, GST_MAP_READ);
+ res = (gint16 *) map.data;
GST_INFO ("expected %+5d %+5d %+5d %+5d real %+5d %+5d %+5d %+5d",
out[0], out[1], out[2], out[3], res[0], res[1], res[2], res[3]);
- fail_unless (memcmp (GST_BUFFER_DATA (outbuffer), out, 8) == 0);
+ gst_buffer_unmap (outbuffer, &map);
+
+ fail_unless (gst_buffer_memcmp (outbuffer, 0, out, 8) == 0);
/* cleanup */
cleanup_invert (invert);
gint16 in[4] = { 16384, -256, 128, -512 };
gint16 out[4] = { -16385, 255, -129, 511 };
gint16 *res;
+ GstMapInfo map;
invert = setup_invert ();
g_object_set (G_OBJECT (invert), "degree", 1.0, NULL);
"could not set to playing");
inbuffer = gst_buffer_new_and_alloc (8);
- memcpy (GST_BUFFER_DATA (inbuffer), in, 8);
- fail_unless (memcmp (GST_BUFFER_DATA (inbuffer), in, 8) == 0);
+ gst_buffer_fill (inbuffer, 0, in, 8);
+ fail_unless (gst_buffer_memcmp (inbuffer, 0, in, 8) == 0);
caps = gst_caps_from_string (INVERT_CAPS_STRING);
- gst_buffer_set_caps (inbuffer, caps);
+ gst_pad_set_caps (mysrcpad, caps);
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_unless_equals_int (g_list_length (buffers), 1);
fail_if ((outbuffer = (GstBuffer *) buffers->data) == NULL);
- res = (gint16 *) GST_BUFFER_DATA (outbuffer);
+ gst_buffer_map (outbuffer, &map, GST_MAP_READ);
+ res = (gint16 *) map.data;
GST_INFO ("expected %+5d %+5d %+5d %+5d real %+5d %+5d %+5d %+5d",
out[0], out[1], out[2], out[3], res[0], res[1], res[2], res[3]);
- fail_unless (memcmp (GST_BUFFER_DATA (outbuffer), out, 8) == 0);
+ gst_buffer_unmap (outbuffer, &map);
+
+ fail_unless (gst_buffer_memcmp (outbuffer, 0, out, 8) == 0);
/* cleanup */
cleanup_invert (invert);
gint16 in[4] = { 16384, -256, 128, -512 };
gint16 out[4] = { 8191, -128, 63, -256 };
gint16 *res;
+ GstMapInfo map;
invert = setup_invert ();
g_object_set (G_OBJECT (invert), "degree", 0.25, NULL);
"could not set to playing");
inbuffer = gst_buffer_new_and_alloc (8);
- memcpy (GST_BUFFER_DATA (inbuffer), in, 8);
- fail_unless (memcmp (GST_BUFFER_DATA (inbuffer), in, 8) == 0);
+ gst_buffer_fill (inbuffer, 0, in, 8);
+ fail_unless (gst_buffer_memcmp (inbuffer, 0, in, 8) == 0);
caps = gst_caps_from_string (INVERT_CAPS_STRING);
- gst_buffer_set_caps (inbuffer, caps);
+ gst_pad_set_caps (mysrcpad, caps);
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_unless_equals_int (g_list_length (buffers), 1);
fail_if ((outbuffer = (GstBuffer *) buffers->data) == NULL);
- res = (gint16 *) GST_BUFFER_DATA (outbuffer);
+ gst_buffer_map (outbuffer, &map, GST_MAP_READ);
+ res = (gint16 *) map.data;
GST_INFO ("expected %+5d %+5d %+5d %+5d real %+5d %+5d %+5d %+5d",
out[0], out[1], out[2], out[3], res[0], res[1], res[2], res[3]);
- fail_unless (memcmp (GST_BUFFER_DATA (outbuffer), out, 8) == 0);
+ gst_buffer_unmap (outbuffer, &map);
+
+ fail_unless (gst_buffer_memcmp (outbuffer, 0, out, 8) == 0);
/* cleanup */
cleanup_invert (invert);
#include <unistd.h>
+#include <gst/audio/audio.h>
#include <gst/base/gstbasetransform.h>
#include <gst/check/gstcheck.h>
#define PANORAMA_MONO_CAPS_STRING \
- "audio/x-raw-int, " \
+ "audio/x-raw, " \
"channels = (int) 1, " \
"rate = (int) 44100, " \
- "endianness = (int) BYTE_ORDER, " \
- "width = (int) 16, " \
- "depth = (int) 16, " \
- "signed = (bool) TRUE"
+ "layout = (string) interleaved, " \
+ "format = (string) " GST_AUDIO_NE(S16)
#define PANORAMA_STEREO_CAPS_STRING \
- "audio/x-raw-int, " \
+ "audio/x-raw, " \
"channels = (int) 2, " \
+ "channel-mask = (bitmask) 3, " \
"rate = (int) 44100, " \
- "endianness = (int) BYTE_ORDER, " \
- "width = (int) 16, " \
- "depth = (int) 16, " \
- "signed = (bool) TRUE"
+ "layout = (string) interleaved, " \
+ "format = (string) " GST_AUDIO_NE(S16)
#define PANORAMA_WRONG_CAPS_STRING \
- "audio/x-raw-int, " \
+ "audio/x-raw, " \
"channels = (int) 5, " \
"rate = (int) 44100, " \
- "endianness = (int) BYTE_ORDER, " \
- "width = (int) 16, " \
- "depth = (int) 16, " \
- "signed = (bool) FALSE"
+ "layout = (string) interleaved, " \
+ "format = (string) " GST_AUDIO_NE(U16)
static GstStaticPadTemplate sinktemplate = GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("audio/x-raw-int, "
+ GST_STATIC_CAPS ("audio/x-raw, "
"channels = (int) 2, "
"rate = (int) [ 1, MAX ], "
- "endianness = (int) BYTE_ORDER, "
- "width = (int) 16, " "depth = (int) 16, " "signed = (bool) TRUE")
+ "layout = (string) interleaved, "
+ "format = (string) " GST_AUDIO_NE (S16))
);
static GstStaticPadTemplate msrctemplate = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("audio/x-raw-int, "
+ GST_STATIC_CAPS ("audio/x-raw, "
"channels = (int) 1, "
"rate = (int) [ 1, MAX ], "
- "endianness = (int) BYTE_ORDER, "
- "width = (int) 16, " "depth = (int) 16, " "signed = (bool) TRUE")
+ "layout = (string) interleaved, "
+ "format = (string) " GST_AUDIO_NE (S16))
);
static GstStaticPadTemplate ssrctemplate = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("audio/x-raw-int, "
+ GST_STATIC_CAPS ("audio/x-raw, "
"channels = (int) 2, "
"rate = (int) [ 1, MAX ], "
- "endianness = (int) BYTE_ORDER, "
- "width = (int) 16, " "depth = (int) 16, " "signed = (bool) TRUE")
+ "layout = (string) interleaved, "
+ "format = (string) " GST_AUDIO_NE (S16))
);
static GstElement *
GST_DEBUG ("setup_panorama");
panorama = gst_check_setup_element ("audiopanorama");
- mysrcpad = gst_check_setup_src_pad (panorama, &msrctemplate, NULL);
- mysinkpad = gst_check_setup_sink_pad (panorama, &sinktemplate, NULL);
+ mysrcpad = gst_check_setup_src_pad (panorama, &msrctemplate);
+ mysinkpad = gst_check_setup_sink_pad (panorama, &sinktemplate);
gst_pad_set_active (mysrcpad, TRUE);
gst_pad_set_active (mysinkpad, TRUE);
GST_DEBUG ("setup_panorama");
panorama = gst_check_setup_element ("audiopanorama");
- mysrcpad = gst_check_setup_src_pad (panorama, &ssrctemplate, NULL);
- mysinkpad = gst_check_setup_sink_pad (panorama, &sinktemplate, NULL);
+ mysrcpad = gst_check_setup_src_pad (panorama, &ssrctemplate);
+ mysinkpad = gst_check_setup_sink_pad (panorama, &sinktemplate);
gst_pad_set_active (mysrcpad, TRUE);
gst_pad_set_active (mysinkpad, TRUE);
GstCaps *caps;
gint16 in[2] = { 16384, -256 };
gint16 out[4] = { 8192, 8192, -128, -128 };
- gint16 *res;
+ gint16 res[4];
panorama = setup_panorama_m ();
fail_unless (gst_element_set_state (panorama,
GST_STATE_PLAYING) == GST_STATE_CHANGE_SUCCESS,
"could not set to playing");
- inbuffer = gst_buffer_new_and_alloc (4);
- memcpy (GST_BUFFER_DATA (inbuffer), in, 4);
caps = gst_caps_from_string (PANORAMA_MONO_CAPS_STRING);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
+
+ inbuffer = gst_buffer_new_wrapped_full (in, NULL, 0, 4);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
/* pushing gives away my reference ... */
fail_unless_equals_int (g_list_length (buffers), 1);
fail_if ((outbuffer = (GstBuffer *) buffers->data) == NULL);
- res = (gint16 *) GST_BUFFER_DATA (outbuffer);
+ fail_unless (gst_buffer_extract (outbuffer, 0, res, 8) == 8);
GST_INFO ("expected %+5d %+5d %+5d %+5d real %+5d %+5d %+5d %+5d",
out[0], out[1], out[2], out[3], res[0], res[1], res[2], res[3]);
- fail_unless (memcmp (GST_BUFFER_DATA (outbuffer), out, 8) == 0);
+ fail_unless (gst_buffer_memcmp (outbuffer, 0, out, 8) == 0);
/* cleanup */
cleanup_panorama (panorama);
GstCaps *caps;
gint16 in[2] = { 16384, -256 };
gint16 out[4] = { 16384, 0, -256, 0 };
- gint16 *res;
+ gint16 res[4];
panorama = setup_panorama_m ();
g_object_set (G_OBJECT (panorama), "panorama", -1.0, NULL);
GST_STATE_PLAYING) == GST_STATE_CHANGE_SUCCESS,
"could not set to playing");
- inbuffer = gst_buffer_new_and_alloc (4);
- memcpy (GST_BUFFER_DATA (inbuffer), in, 4);
- fail_unless (memcmp (GST_BUFFER_DATA (inbuffer), in, 4) == 0);
caps = gst_caps_from_string (PANORAMA_MONO_CAPS_STRING);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
+
+ inbuffer = gst_buffer_new_wrapped_full (in, NULL, 0, 4);
+ fail_unless (gst_buffer_memcmp (inbuffer, 0, in, 4) == 0);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
/* pushing gives away my reference ... */
fail_unless_equals_int (g_list_length (buffers), 1);
fail_if ((outbuffer = (GstBuffer *) buffers->data) == NULL);
- res = (gint16 *) GST_BUFFER_DATA (outbuffer);
+ fail_unless (gst_buffer_extract (outbuffer, 0, res, 8) == 8);
GST_INFO ("expected %+5d %+5d %+5d %+5d real %+5d %+5d %+5d %+5d",
out[0], out[1], out[2], out[3], res[0], res[1], res[2], res[3]);
- fail_unless (memcmp (GST_BUFFER_DATA (outbuffer), out, 8) == 0);
+ fail_unless (gst_buffer_memcmp (outbuffer, 0, out, 8) == 0);
/* cleanup */
cleanup_panorama (panorama);
GstCaps *caps;
gint16 in[2] = { 16384, -256 };
gint16 out[4] = { 0, 16384, 0, -256 };
- gint16 *res;
+ gint16 res[4];
panorama = setup_panorama_m ();
g_object_set (G_OBJECT (panorama), "panorama", 1.0, NULL);
GST_STATE_PLAYING) == GST_STATE_CHANGE_SUCCESS,
"could not set to playing");
- inbuffer = gst_buffer_new_and_alloc (4);
- memcpy (GST_BUFFER_DATA (inbuffer), in, 4);
- fail_unless (memcmp (GST_BUFFER_DATA (inbuffer), in, 4) == 0);
caps = gst_caps_from_string (PANORAMA_MONO_CAPS_STRING);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
+
+ inbuffer = gst_buffer_new_wrapped_full (in, NULL, 0, 4);
+ fail_unless (gst_buffer_memcmp (inbuffer, 0, in, 4) == 0);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
/* pushing gives away my reference ... */
fail_unless_equals_int (g_list_length (buffers), 1);
fail_if ((outbuffer = (GstBuffer *) buffers->data) == NULL);
- res = (gint16 *) GST_BUFFER_DATA (outbuffer);
+ fail_unless (gst_buffer_extract (outbuffer, 0, res, 8) == 8);
GST_INFO ("expected %+5d %+5d %+5d %+5d real %+5d %+5d %+5d %+5d",
out[0], out[1], out[2], out[3], res[0], res[1], res[2], res[3]);
- fail_unless (memcmp (GST_BUFFER_DATA (outbuffer), out, 8) == 0);
+ fail_unless (gst_buffer_memcmp (outbuffer, 0, out, 8) == 0);
/* cleanup */
cleanup_panorama (panorama);
GstBuffer *inbuffer, *outbuffer;
GstCaps *caps;
gint16 in[4] = { 16384, -256, 8192, 128 };
- gint16 *res;
+ gint16 res[4];
panorama = setup_panorama_s ();
fail_unless (gst_element_set_state (panorama,
GST_STATE_PLAYING) == GST_STATE_CHANGE_SUCCESS,
"could not set to playing");
- inbuffer = gst_buffer_new_and_alloc (8);
- memcpy (GST_BUFFER_DATA (inbuffer), in, 8);
caps = gst_caps_from_string (PANORAMA_STEREO_CAPS_STRING);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
- ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
- /* pushing gives away my reference ... so keep an extra one */
- gst_buffer_ref (inbuffer);
+ inbuffer = gst_buffer_new_wrapped_full (in, NULL, 0, 8);
+ ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
+ /* pushing gives away my reference ... */
fail_unless (gst_pad_push (mysrcpad, inbuffer) == GST_FLOW_OK);
/* ... but it ends up being collected on the global buffer list */
fail_unless_equals_int (g_list_length (buffers), 1);
fail_if ((outbuffer = (GstBuffer *) buffers->data) == NULL);
- res = (gint16 *) GST_BUFFER_DATA (inbuffer);
+ fail_unless (gst_buffer_extract (outbuffer, 0, res, 8) == 8);
GST_INFO ("expected %+5d %+5d %+5d %+5d real %+5d %+5d %+5d %+5d",
in[0], in[1], in[2], in[3], res[0], res[1], res[2], res[3]);
- fail_unless (memcmp (GST_BUFFER_DATA (outbuffer), in, 8) == 0);
+ fail_unless (gst_buffer_memcmp (outbuffer, 0, in, 8) == 0);
/* cleanup */
- gst_buffer_unref (inbuffer);
cleanup_panorama (panorama);
}
GstCaps *caps;
gint16 in[4] = { 16384, -256, 8192, 128 };
gint16 out[4] = { 16384 - 256, 0, 8192 + 128, 0 };
- gint16 *res;
+ gint16 res[4];
panorama = setup_panorama_s ();
g_object_set (G_OBJECT (panorama), "panorama", -1.0, NULL);
GST_STATE_PLAYING) == GST_STATE_CHANGE_SUCCESS,
"could not set to playing");
- inbuffer = gst_buffer_new_and_alloc (8);
- memcpy (GST_BUFFER_DATA (inbuffer), in, 8);
- fail_unless (memcmp (GST_BUFFER_DATA (inbuffer), in, 8) == 0);
caps = gst_caps_from_string (PANORAMA_STEREO_CAPS_STRING);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
+
+ inbuffer = gst_buffer_new_wrapped_full (in, NULL, 0, 8);
+ fail_unless (gst_buffer_memcmp (inbuffer, 0, in, 8) == 0);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
/* pushing gives away my reference ... */
fail_unless_equals_int (g_list_length (buffers), 1);
fail_if ((outbuffer = (GstBuffer *) buffers->data) == NULL);
- res = (gint16 *) GST_BUFFER_DATA (outbuffer);
+ fail_unless (gst_buffer_extract (outbuffer, 0, res, 8) == 8);
GST_INFO ("expected %+5d %+5d %+5d %+5d real %+5d %+5d %+5d %+5d",
out[0], out[1], out[2], out[3], res[0], res[1], res[2], res[3]);
- fail_unless (memcmp (GST_BUFFER_DATA (outbuffer), out, 8) == 0);
+ fail_unless (gst_buffer_memcmp (outbuffer, 0, out, 8) == 0);
/* cleanup */
cleanup_panorama (panorama);
GstCaps *caps;
gint16 in[4] = { 16384, -256, 8192, 128 };
gint16 out[4] = { 0, -256 + 16384, 0, 128 + 8192 };
- gint16 *res;
+ gint16 res[4];
panorama = setup_panorama_s ();
g_object_set (G_OBJECT (panorama), "panorama", 1.0, NULL);
GST_STATE_PLAYING) == GST_STATE_CHANGE_SUCCESS,
"could not set to playing");
- inbuffer = gst_buffer_new_and_alloc (8);
- memcpy (GST_BUFFER_DATA (inbuffer), in, 8);
- fail_unless (memcmp (GST_BUFFER_DATA (inbuffer), in, 8) == 0);
caps = gst_caps_from_string (PANORAMA_STEREO_CAPS_STRING);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
+
+ inbuffer = gst_buffer_new_wrapped_full (in, NULL, 0, 8);
+ fail_unless (gst_buffer_memcmp (inbuffer, 0, in, 8) == 0);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
/* pushing gives away my reference ... */
fail_unless_equals_int (g_list_length (buffers), 1);
fail_if ((outbuffer = (GstBuffer *) buffers->data) == NULL);
- res = (gint16 *) GST_BUFFER_DATA (outbuffer);
+ fail_unless (gst_buffer_extract (outbuffer, 0, res, 8));
GST_INFO ("expected %+5d %+5d %+5d %+5d real %+5d %+5d %+5d %+5d",
out[0], out[1], out[2], out[3], res[0], res[1], res[2], res[3]);
- fail_unless (memcmp (GST_BUFFER_DATA (outbuffer), out, 8) == 0);
+ fail_unless (gst_buffer_memcmp (outbuffer, 0, out, 8) == 0);
/* cleanup */
cleanup_panorama (panorama);
GstCaps *caps;
gint16 in[2] = { 16384, -256 };
gint16 out[4] = { 16384, 16384, -256, -256 };
- gint16 *res;
+ gint16 res[4];
panorama = setup_panorama_m ();
g_object_set (G_OBJECT (panorama), "method", 1, NULL);
GST_STATE_PLAYING) == GST_STATE_CHANGE_SUCCESS,
"could not set to playing");
- inbuffer = gst_buffer_new_and_alloc (4);
- memcpy (GST_BUFFER_DATA (inbuffer), in, 4);
+ inbuffer = gst_buffer_new_wrapped_full (in, NULL, 0, 4);
caps = gst_caps_from_string (PANORAMA_MONO_CAPS_STRING);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_unless_equals_int (g_list_length (buffers), 1);
fail_if ((outbuffer = (GstBuffer *) buffers->data) == NULL);
- res = (gint16 *) GST_BUFFER_DATA (outbuffer);
+ fail_unless (gst_buffer_extract (outbuffer, 0, res, 8) == 8);
GST_INFO ("expected %+5d %+5d %+5d %+5d real %+5d %+5d %+5d %+5d",
out[0], out[1], out[2], out[3], res[0], res[1], res[2], res[3]);
- fail_unless (memcmp (GST_BUFFER_DATA (outbuffer), out, 8) == 0);
+ fail_unless (gst_buffer_memcmp (outbuffer, 0, out, 8) == 0);
/* cleanup */
cleanup_panorama (panorama);
GstCaps *caps;
gint16 in[2] = { 16384, -256 };
gint16 out[4] = { 16384, 0, -256, 0 };
- gint16 *res;
+ gint16 res[4];
panorama = setup_panorama_m ();
g_object_set (G_OBJECT (panorama), "method", 1, NULL);
GST_STATE_PLAYING) == GST_STATE_CHANGE_SUCCESS,
"could not set to playing");
- inbuffer = gst_buffer_new_and_alloc (4);
- memcpy (GST_BUFFER_DATA (inbuffer), in, 4);
- fail_unless (memcmp (GST_BUFFER_DATA (inbuffer), in, 4) == 0);
+ inbuffer = gst_buffer_new_wrapped_full (in, NULL, 0, 4);
+ fail_unless (gst_buffer_memcmp (inbuffer, 0, in, 4) == 0);
caps = gst_caps_from_string (PANORAMA_MONO_CAPS_STRING);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_unless_equals_int (g_list_length (buffers), 1);
fail_if ((outbuffer = (GstBuffer *) buffers->data) == NULL);
- res = (gint16 *) GST_BUFFER_DATA (outbuffer);
+ fail_unless (gst_buffer_extract (outbuffer, 0, res, 8) == 8);
GST_INFO ("expected %+5d %+5d %+5d %+5d real %+5d %+5d %+5d %+5d",
out[0], out[1], out[2], out[3], res[0], res[1], res[2], res[3]);
- fail_unless (memcmp (GST_BUFFER_DATA (outbuffer), out, 8) == 0);
+ fail_unless (gst_buffer_memcmp (outbuffer, 0, out, 8) == 0);
/* cleanup */
cleanup_panorama (panorama);
GstCaps *caps;
gint16 in[2] = { 16384, -256 };
gint16 out[4] = { 0, 16384, 0, -256 };
- gint16 *res;
+ gint16 res[4];
panorama = setup_panorama_m ();
g_object_set (G_OBJECT (panorama), "method", 1, NULL);
GST_STATE_PLAYING) == GST_STATE_CHANGE_SUCCESS,
"could not set to playing");
- inbuffer = gst_buffer_new_and_alloc (4);
- memcpy (GST_BUFFER_DATA (inbuffer), in, 4);
- fail_unless (memcmp (GST_BUFFER_DATA (inbuffer), in, 4) == 0);
+ inbuffer = gst_buffer_new_wrapped_full (in, NULL, 0, 4);
+ fail_unless (gst_buffer_memcmp (inbuffer, 0, in, 4) == 0);
caps = gst_caps_from_string (PANORAMA_MONO_CAPS_STRING);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_unless_equals_int (g_list_length (buffers), 1);
fail_if ((outbuffer = (GstBuffer *) buffers->data) == NULL);
- res = (gint16 *) GST_BUFFER_DATA (outbuffer);
+ fail_unless (gst_buffer_extract (outbuffer, 0, res, 8) == 8);
GST_INFO ("expected %+5d %+5d %+5d %+5d real %+5d %+5d %+5d %+5d",
out[0], out[1], out[2], out[3], res[0], res[1], res[2], res[3]);
- fail_unless (memcmp (GST_BUFFER_DATA (outbuffer), out, 8) == 0);
+ fail_unless (gst_buffer_memcmp (outbuffer, 0, out, 8) == 0);
/* cleanup */
cleanup_panorama (panorama);
GstBuffer *inbuffer, *outbuffer;
GstCaps *caps;
gint16 in[4] = { 16384, -256, 8192, 128 };
- gint16 *res;
+ gint16 res[4];
panorama = setup_panorama_s ();
g_object_set (G_OBJECT (panorama), "method", 1, NULL);
GST_STATE_PLAYING) == GST_STATE_CHANGE_SUCCESS,
"could not set to playing");
- inbuffer = gst_buffer_new_and_alloc (8);
- memcpy (GST_BUFFER_DATA (inbuffer), in, 8);
+ inbuffer = gst_buffer_new_wrapped_full (in, NULL, 0, 8);
caps = gst_caps_from_string (PANORAMA_STEREO_CAPS_STRING);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
- /* pushing gives away my reference ... so keep an extra one */
- gst_buffer_ref (inbuffer);
-
+ /* pushing gives away my reference ... */
fail_unless (gst_pad_push (mysrcpad, inbuffer) == GST_FLOW_OK);
/* ... but it ends up being collected on the global buffer list */
fail_unless_equals_int (g_list_length (buffers), 1);
fail_if ((outbuffer = (GstBuffer *) buffers->data) == NULL);
- res = (gint16 *) GST_BUFFER_DATA (inbuffer);
+ fail_unless (gst_buffer_extract (outbuffer, 0, res, 8) == 8);
GST_INFO ("expected %+5d %+5d %+5d %+5d real %+5d %+5d %+5d %+5d",
in[0], in[1], in[2], in[3], res[0], res[1], res[2], res[3]);
- fail_unless (memcmp (GST_BUFFER_DATA (outbuffer), in, 8) == 0);
+ fail_unless (gst_buffer_memcmp (outbuffer, 0, in, 8) == 0);
/* cleanup */
- gst_buffer_unref (inbuffer);
cleanup_panorama (panorama);
}
GstCaps *caps;
gint16 in[4] = { 16384, -256, 8192, 128 };
gint16 out[4] = { 16384, 0, 8192, 0 };
- gint16 *res;
+ gint16 res[4];
panorama = setup_panorama_s ();
g_object_set (G_OBJECT (panorama), "method", 1, NULL);
GST_STATE_PLAYING) == GST_STATE_CHANGE_SUCCESS,
"could not set to playing");
- inbuffer = gst_buffer_new_and_alloc (8);
- memcpy (GST_BUFFER_DATA (inbuffer), in, 8);
- fail_unless (memcmp (GST_BUFFER_DATA (inbuffer), in, 8) == 0);
+ inbuffer = gst_buffer_new_wrapped_full (in, NULL, 0, 8);
+ fail_unless (gst_buffer_memcmp (inbuffer, 0, in, 8) == 0);
caps = gst_caps_from_string (PANORAMA_STEREO_CAPS_STRING);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_unless_equals_int (g_list_length (buffers), 1);
fail_if ((outbuffer = (GstBuffer *) buffers->data) == NULL);
- res = (gint16 *) GST_BUFFER_DATA (outbuffer);
+ fail_unless (gst_buffer_extract (outbuffer, 0, res, 8) == 8);
GST_INFO ("expected %+5d %+5d %+5d %+5d real %+5d %+5d %+5d %+5d",
out[0], out[1], out[2], out[3], res[0], res[1], res[2], res[3]);
- fail_unless (memcmp (GST_BUFFER_DATA (outbuffer), out, 8) == 0);
+ fail_unless (gst_buffer_memcmp (outbuffer, 0, out, 8) == 0);
/* cleanup */
cleanup_panorama (panorama);
GstCaps *caps;
gint16 in[4] = { 16384, -256, 8192, 128 };
gint16 out[4] = { 0, -256, 0, 128 };
- gint16 *res;
+ gint16 res[4];
panorama = setup_panorama_s ();
g_object_set (G_OBJECT (panorama), "method", 1, NULL);
GST_STATE_PLAYING) == GST_STATE_CHANGE_SUCCESS,
"could not set to playing");
- inbuffer = gst_buffer_new_and_alloc (8);
- memcpy (GST_BUFFER_DATA (inbuffer), in, 8);
- fail_unless (memcmp (GST_BUFFER_DATA (inbuffer), in, 8) == 0);
+ inbuffer = gst_buffer_new_wrapped_full (in, NULL, 0, 8);
+ fail_unless (gst_buffer_memcmp (inbuffer, 0, in, 8) == 0);
caps = gst_caps_from_string (PANORAMA_STEREO_CAPS_STRING);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_unless_equals_int (g_list_length (buffers), 1);
fail_if ((outbuffer = (GstBuffer *) buffers->data) == NULL);
- res = (gint16 *) GST_BUFFER_DATA (outbuffer);
+ fail_unless (gst_buffer_extract (outbuffer, 0, res, 8) == 8);
GST_INFO ("expected %+5d %+5d %+5d %+5d real %+5d %+5d %+5d %+5d",
out[0], out[1], out[2], out[3], res[0], res[1], res[2], res[3]);
- fail_unless (memcmp (GST_BUFFER_DATA (outbuffer), out, 8) == 0);
+ fail_unless (gst_buffer_memcmp (outbuffer, 0, out, 8) == 0);
/* cleanup */
cleanup_panorama (panorama);
GST_STATE_PLAYING) == GST_STATE_CHANGE_SUCCESS,
"could not set to playing");
- inbuffer = gst_buffer_new_and_alloc (4);
- memcpy (GST_BUFFER_DATA (inbuffer), in, 4);
- caps = gst_caps_from_string (PANORAMA_WRONG_CAPS_STRING);
- gst_buffer_set_caps (inbuffer, caps);
- gst_caps_unref (caps);
+ inbuffer = gst_buffer_new_wrapped_full (in, NULL, 0, 4);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
gst_buffer_ref (inbuffer);
/* set a bus here so we avoid getting state change messages */
gst_element_set_bus (panorama, bus);
+ caps = gst_caps_from_string (PANORAMA_WRONG_CAPS_STRING);
+ /* this actually succeeds, because the caps event is sticky */
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
+ gst_caps_unref (caps);
+
/* pushing gives an error because it can't negotiate with wrong caps */
fail_unless_equals_int (gst_pad_push (mysrcpad, inbuffer),
GST_FLOW_NOT_NEGOTIATED);
*/
#include <gst/gst.h>
+#include <gst/audio/audio.h>
#include <gst/base/gstbasetransform.h>
#include <gst/check/gstcheck.h>
GstPad *mysrcpad, *mysinkpad;
#define AUDIO_WSINC_BAND_CAPS_STRING_32 \
- "audio/x-raw-float, " \
- "channels = (int) 1, " \
- "rate = (int) 44100, " \
- "endianness = (int) BYTE_ORDER, " \
- "width = (int) 32" \
+ "audio/x-raw, " \
+ "format = (string) " GST_AUDIO_NE (F32) ", " \
+ "layout = (string) interleaved, " \
+ "channels = (int) 1, " \
+ "rate = (int) 44100"
#define AUDIO_WSINC_BAND_CAPS_STRING_64 \
- "audio/x-raw-float, " \
- "channels = (int) 1, " \
- "rate = (int) 44100, " \
- "endianness = (int) BYTE_ORDER, " \
- "width = (int) 64" \
+ "audio/x-raw, " \
+ "format = (string) " GST_AUDIO_NE (F64) ", " \
+ "layout = (string) interleaved, " \
+ "channels = (int) 1, " \
+ "rate = (int) 44100"
+
+#define FORMATS "{ "GST_AUDIO_NE (F32)","GST_AUDIO_NE (F64)" }"
static GstStaticPadTemplate sinktemplate = GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("audio/x-raw-float, "
- "channels = (int) 1, "
- "rate = (int) 44100, "
- "endianness = (int) BYTE_ORDER, " "width = (int) { 32, 64 } ")
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "format = (string) " FORMATS ", "
+ "layout = (string) interleaved, "
+ "channels = (int) 1, " "rate = (int) 44100")
);
static GstStaticPadTemplate srctemplate = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("audio/x-raw-float, "
- "channels = (int) 1, "
- "rate = (int) 44100, "
- "endianness = (int) BYTE_ORDER, " "width = (int) { 32, 64 } ")
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "format = (string) " FORMATS ", "
+ "layout = (string) interleaved, "
+ "channels = (int) 1, " "rate = (int) 44100")
);
static GstElement *
GST_DEBUG ("setup_audiowsincband");
audiowsincband = gst_check_setup_element ("audiowsincband");
- mysrcpad = gst_check_setup_src_pad (audiowsincband, &srctemplate, NULL);
- mysinkpad = gst_check_setup_sink_pad (audiowsincband, &sinktemplate, NULL);
+ mysrcpad = gst_check_setup_src_pad (audiowsincband, &srctemplate);
+ mysinkpad = gst_check_setup_sink_pad (audiowsincband, &sinktemplate);
gst_pad_set_active (mysrcpad, TRUE);
gst_pad_set_active (mysinkpad, TRUE);
GstCaps *caps;
gfloat *in, *res, rms;
gint i;
+ GstMapInfo map;
GList *node;
audiowsincband = setup_audiowsincband ();
44100 / 4.0 + 1000, NULL);
inbuffer = gst_buffer_new_and_alloc (1024 * sizeof (gfloat));
GST_BUFFER_TIMESTAMP (inbuffer) = 0;
- in = (gfloat *) GST_BUFFER_DATA (inbuffer);
+
+ gst_buffer_map (inbuffer, &map, GST_MAP_WRITE);
+ in = (gfloat *) map.data;
for (i = 0; i < 1024; i++)
in[i] = 1.0;
+ gst_buffer_unmap (inbuffer, &map);
caps = gst_caps_from_string (AUDIO_WSINC_BAND_CAPS_STRING_32);
- gst_buffer_set_caps (inbuffer, caps);
+ gst_pad_set_caps (mysrcpad, caps);
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_if ((outbuffer = (GstBuffer *) node->data) == NULL);
- res = (gfloat *) GST_BUFFER_DATA (outbuffer);
- buffer_length = GST_BUFFER_SIZE (outbuffer) / sizeof (gfloat);
+ gst_buffer_map (outbuffer, &map, GST_MAP_READ);
+ res = (gfloat *) map.data;
+ buffer_length = map.size / sizeof (gfloat);
+
rms = 0.0;
for (i = 0; i < buffer_length; i++)
rms += res[i] * res[i];
rms = sqrt (rms / buffer_length);
fail_unless (rms <= 0.1);
+
+ gst_buffer_unmap (outbuffer, &map);
}
/* cleanup */
GstCaps *caps;
gfloat *in, *res, rms;
gint i;
+ GstMapInfo map;
GList *node;
audiowsincband = setup_audiowsincband ();
44100 / 4.0 + 1000, NULL);
inbuffer = gst_buffer_new_and_alloc (1024 * sizeof (gfloat));
GST_BUFFER_TIMESTAMP (inbuffer) = 0;
- in = (gfloat *) GST_BUFFER_DATA (inbuffer);
+ gst_buffer_map (inbuffer, &map, GST_MAP_WRITE);
+ in = (gfloat *) map.data;
for (i = 0; i < 1024; i += 4) {
in[i] = 0.0;
in[i + 1] = 1.0;
in[i + 2] = 0.0;
in[i + 3] = -1.0;
}
+ gst_buffer_unmap (inbuffer, &map);
caps = gst_caps_from_string (AUDIO_WSINC_BAND_CAPS_STRING_32);
- gst_buffer_set_caps (inbuffer, caps);
+ gst_pad_set_caps (mysrcpad, caps);
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_if ((outbuffer = (GstBuffer *) node->data) == NULL);
- res = (gfloat *) GST_BUFFER_DATA (outbuffer);
- buffer_length = GST_BUFFER_SIZE (outbuffer) / sizeof (gfloat);
+ gst_buffer_map (outbuffer, &map, GST_MAP_READ);
+ res = (gfloat *) map.data;
+ buffer_length = map.size / sizeof (gfloat);
rms = 0.0;
for (i = 0; i < buffer_length; i++)
rms += res[i] * res[i];
GstCaps *caps;
gfloat *in, *res, rms;
gint i;
+ GstMapInfo map;
GList *node;
audiowsincband = setup_audiowsincband ();
44100 / 4.0 + 1000, NULL);
inbuffer = gst_buffer_new_and_alloc (1024 * sizeof (gfloat));
GST_BUFFER_TIMESTAMP (inbuffer) = 0;
- in = (gfloat *) GST_BUFFER_DATA (inbuffer);
+ gst_buffer_map (inbuffer, &map, GST_MAP_WRITE);
+ in = (gfloat *) map.data;
for (i = 0; i < 1024; i += 2) {
in[i] = 1.0;
in[i + 1] = -1.0;
}
+ gst_buffer_unmap (inbuffer, &map);
caps = gst_caps_from_string (AUDIO_WSINC_BAND_CAPS_STRING_32);
- gst_buffer_set_caps (inbuffer, caps);
+ gst_pad_set_caps (mysrcpad, caps);
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_if ((outbuffer = (GstBuffer *) node->data) == NULL);
- res = (gfloat *) GST_BUFFER_DATA (outbuffer);
- buffer_length = GST_BUFFER_SIZE (outbuffer) / sizeof (gfloat);
+ gst_buffer_map (outbuffer, &map, GST_MAP_READ);
+ res = (gfloat *) map.data;
+ buffer_length = map.size / sizeof (gfloat);
rms = 0.0;
for (i = 0; i < buffer_length; i++)
rms += res[i] * res[i];
GstCaps *caps;
gfloat *in, *res, rms;
gint i;
+ GstMapInfo map;
GList *node;
audiowsincband = setup_audiowsincband ();
44100 / 4.0 + 1000, NULL);
inbuffer = gst_buffer_new_and_alloc (1024 * sizeof (gfloat));
GST_BUFFER_TIMESTAMP (inbuffer) = 0;
- in = (gfloat *) GST_BUFFER_DATA (inbuffer);
+ gst_buffer_map (inbuffer, &map, GST_MAP_WRITE);
+ in = (gfloat *) map.data;
for (i = 0; i < 1024; i++)
in[i] = 1.0;
+ gst_buffer_unmap (inbuffer, &map);
caps = gst_caps_from_string (AUDIO_WSINC_BAND_CAPS_STRING_32);
- gst_buffer_set_caps (inbuffer, caps);
+ gst_pad_set_caps (mysrcpad, caps);
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_if ((outbuffer = (GstBuffer *) node->data) == NULL);
- res = (gfloat *) GST_BUFFER_DATA (outbuffer);
- buffer_length = GST_BUFFER_SIZE (outbuffer) / sizeof (gfloat);
+ gst_buffer_map (outbuffer, &map, GST_MAP_READ);
+ res = (gfloat *) map.data;
+ buffer_length = map.size / sizeof (gfloat);
rms = 0.0;
for (i = 0; i < buffer_length; i++)
rms += res[i] * res[i];
GstCaps *caps;
gfloat *in, *res, rms;
gint i;
+ GstMapInfo map;
GList *node;
audiowsincband = setup_audiowsincband ();
44100 / 4.0 + 1000, NULL);
inbuffer = gst_buffer_new_and_alloc (1024 * sizeof (gfloat));
GST_BUFFER_TIMESTAMP (inbuffer) = 0;
- in = (gfloat *) GST_BUFFER_DATA (inbuffer);
+ gst_buffer_map (inbuffer, &map, GST_MAP_WRITE);
+ in = (gfloat *) map.data;
for (i = 0; i < 1024; i += 4) {
in[i] = 0.0;
in[i + 2] = 0.0;
in[i + 3] = -1.0;
}
+ gst_buffer_unmap (inbuffer, &map);
caps = gst_caps_from_string (AUDIO_WSINC_BAND_CAPS_STRING_32);
- gst_buffer_set_caps (inbuffer, caps);
+ gst_pad_set_caps (mysrcpad, caps);
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_if ((outbuffer = (GstBuffer *) node->data) == NULL);
- res = (gfloat *) GST_BUFFER_DATA (outbuffer);
- buffer_length = GST_BUFFER_SIZE (outbuffer) / sizeof (gfloat);
+ gst_buffer_map (outbuffer, &map, GST_MAP_READ);
+ res = (gfloat *) map.data;
+ buffer_length = map.size / sizeof (gfloat);
rms = 0.0;
for (i = 0; i < buffer_length; i++)
rms += res[i] * res[i];
GstCaps *caps;
gfloat *in, *res, rms;
gint i;
+ GstMapInfo map;
GList *node;
audiowsincband = setup_audiowsincband ();
44100 / 4.0 + 1000, NULL);
inbuffer = gst_buffer_new_and_alloc (1024 * sizeof (gfloat));
GST_BUFFER_TIMESTAMP (inbuffer) = 0;
- in = (gfloat *) GST_BUFFER_DATA (inbuffer);
+ gst_buffer_map (inbuffer, &map, GST_MAP_WRITE);
+ in = (gfloat *) map.data;
for (i = 0; i < 1024; i += 2) {
in[i] = 1.0;
in[i + 1] = -1.0;
}
+ gst_buffer_unmap (inbuffer, &map);
caps = gst_caps_from_string (AUDIO_WSINC_BAND_CAPS_STRING_32);
- gst_buffer_set_caps (inbuffer, caps);
+ gst_pad_set_caps (mysrcpad, caps);
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_if ((outbuffer = (GstBuffer *) node->data) == NULL);
- res = (gfloat *) GST_BUFFER_DATA (outbuffer);
- buffer_length = GST_BUFFER_SIZE (outbuffer) / sizeof (gfloat);
+ gst_buffer_map (outbuffer, &map, GST_MAP_READ);
+ res = (gfloat *) map.data;
+ buffer_length = map.size / sizeof (gfloat);
rms = 0.0;
for (i = 0; i < buffer_length; i++)
rms += res[i] * res[i];
GstCaps *caps;
gfloat *in;
gint i;
+ GstMapInfo map;
audiowsincband = setup_audiowsincband ();
/* Set to bandpass */
44100 / 4.0 + 44100 / 16.0, NULL);
inbuffer = gst_buffer_new_and_alloc (20 * sizeof (gfloat));
GST_BUFFER_TIMESTAMP (inbuffer) = 0;
- in = (gfloat *) GST_BUFFER_DATA (inbuffer);
+ gst_buffer_map (inbuffer, &map, GST_MAP_WRITE);
+ in = (gfloat *) map.data;
for (i = 0; i < 20; i++)
in[i] = 1.0;
+ gst_buffer_unmap (inbuffer, &map);
caps = gst_caps_from_string (AUDIO_WSINC_BAND_CAPS_STRING_32);
- gst_buffer_set_caps (inbuffer, caps);
+ gst_pad_set_caps (mysrcpad, caps);
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
GstCaps *caps;
gdouble *in, *res, rms;
gint i;
+ GstMapInfo map;
GList *node;
audiowsincband = setup_audiowsincband ();
44100 / 4.0 + 1000, NULL);
inbuffer = gst_buffer_new_and_alloc (1024 * sizeof (gdouble));
GST_BUFFER_TIMESTAMP (inbuffer) = 0;
- in = (gdouble *) GST_BUFFER_DATA (inbuffer);
+ gst_buffer_map (inbuffer, &map, GST_MAP_WRITE);
+ in = (gdouble *) map.data;
for (i = 0; i < 1024; i++)
in[i] = 1.0;
+ gst_buffer_unmap (inbuffer, &map);
caps = gst_caps_from_string (AUDIO_WSINC_BAND_CAPS_STRING_64);
- gst_buffer_set_caps (inbuffer, caps);
+ gst_pad_set_caps (mysrcpad, caps);
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_if ((outbuffer = (GstBuffer *) node->data) == NULL);
- res = (gdouble *) GST_BUFFER_DATA (outbuffer);
- buffer_length = GST_BUFFER_SIZE (outbuffer) / sizeof (gdouble);
+ gst_buffer_map (outbuffer, &map, GST_MAP_READ);
+ res = (gdouble *) map.data;
+ buffer_length = map.size / sizeof (gdouble);
rms = 0.0;
for (i = 0; i < buffer_length; i++)
rms += res[i] * res[i];
GstCaps *caps;
gdouble *in, *res, rms;
gint i;
+ GstMapInfo map;
GList *node;
audiowsincband = setup_audiowsincband ();
44100 / 4.0 + 1000, NULL);
inbuffer = gst_buffer_new_and_alloc (1024 * sizeof (gdouble));
GST_BUFFER_TIMESTAMP (inbuffer) = 0;
- in = (gdouble *) GST_BUFFER_DATA (inbuffer);
+ gst_buffer_map (inbuffer, &map, GST_MAP_WRITE);
+ in = (gdouble *) map.data;
for (i = 0; i < 1024; i += 4) {
in[i] = 0.0;
in[i + 1] = 1.0;
in[i + 2] = 0.0;
in[i + 3] = -1.0;
}
+ gst_buffer_unmap (inbuffer, &map);
caps = gst_caps_from_string (AUDIO_WSINC_BAND_CAPS_STRING_64);
- gst_buffer_set_caps (inbuffer, caps);
+ gst_pad_set_caps (mysrcpad, caps);
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_if ((outbuffer = (GstBuffer *) node->data) == NULL);
- res = (gdouble *) GST_BUFFER_DATA (outbuffer);
- buffer_length = GST_BUFFER_SIZE (outbuffer) / sizeof (gdouble);
+ gst_buffer_map (outbuffer, &map, GST_MAP_READ);
+ res = (gdouble *) map.data;
+ buffer_length = map.size / sizeof (gdouble);
rms = 0.0;
for (i = 0; i < buffer_length; i++)
rms += res[i] * res[i];
GstCaps *caps;
gdouble *in, *res, rms;
gint i;
+ GstMapInfo map;
GList *node;
audiowsincband = setup_audiowsincband ();
44100 / 4.0 + 1000, NULL);
inbuffer = gst_buffer_new_and_alloc (1024 * sizeof (gdouble));
GST_BUFFER_TIMESTAMP (inbuffer) = 0;
- in = (gdouble *) GST_BUFFER_DATA (inbuffer);
+ gst_buffer_map (inbuffer, &map, GST_MAP_WRITE);
+ in = (gdouble *) map.data;
for (i = 0; i < 1024; i += 2) {
in[i] = 1.0;
in[i + 1] = -1.0;
}
+ gst_buffer_unmap (inbuffer, &map);
caps = gst_caps_from_string (AUDIO_WSINC_BAND_CAPS_STRING_64);
- gst_buffer_set_caps (inbuffer, caps);
+ gst_pad_set_caps (mysrcpad, caps);
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_if ((outbuffer = (GstBuffer *) node->data) == NULL);
- res = (gdouble *) GST_BUFFER_DATA (outbuffer);
- buffer_length = GST_BUFFER_SIZE (outbuffer) / sizeof (gdouble);
+ gst_buffer_map (outbuffer, &map, GST_MAP_READ);
+ res = (gdouble *) map.data;
+ buffer_length = map.size / sizeof (gdouble);
rms = 0.0;
for (i = 0; i < buffer_length; i++)
rms += res[i] * res[i];
GstCaps *caps;
gdouble *in, *res, rms;
gint i;
+ GstMapInfo map;
GList *node;
audiowsincband = setup_audiowsincband ();
44100 / 4.0 + 1000, NULL);
inbuffer = gst_buffer_new_and_alloc (1024 * sizeof (gdouble));
GST_BUFFER_TIMESTAMP (inbuffer) = 0;
- in = (gdouble *) GST_BUFFER_DATA (inbuffer);
+ gst_buffer_map (inbuffer, &map, GST_MAP_WRITE);
+ in = (gdouble *) map.data;
for (i = 0; i < 1024; i++)
in[i] = 1.0;
+ gst_buffer_unmap (inbuffer, &map);
caps = gst_caps_from_string (AUDIO_WSINC_BAND_CAPS_STRING_64);
- gst_buffer_set_caps (inbuffer, caps);
+ gst_pad_set_caps (mysrcpad, caps);
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_if ((outbuffer = (GstBuffer *) node->data) == NULL);
- res = (gdouble *) GST_BUFFER_DATA (outbuffer);
- buffer_length = GST_BUFFER_SIZE (outbuffer) / sizeof (gdouble);
+ gst_buffer_map (outbuffer, &map, GST_MAP_READ);
+ res = (gdouble *) map.data;
+ buffer_length = map.size / sizeof (gdouble);
rms = 0.0;
for (i = 0; i < buffer_length; i++)
rms += res[i] * res[i];
GstCaps *caps;
gdouble *in, *res, rms;
gint i;
+ GstMapInfo map;
GList *node;
audiowsincband = setup_audiowsincband ();
44100 / 4.0 + 1000, NULL);
inbuffer = gst_buffer_new_and_alloc (1024 * sizeof (gdouble));
GST_BUFFER_TIMESTAMP (inbuffer) = 0;
- in = (gdouble *) GST_BUFFER_DATA (inbuffer);
+ gst_buffer_map (inbuffer, &map, GST_MAP_WRITE);
+ in = (gdouble *) map.data;
for (i = 0; i < 1024; i += 4) {
in[i] = 0.0;
in[i + 2] = 0.0;
in[i + 3] = -1.0;
}
+ gst_buffer_unmap (inbuffer, &map);
caps = gst_caps_from_string (AUDIO_WSINC_BAND_CAPS_STRING_64);
- gst_buffer_set_caps (inbuffer, caps);
+ gst_pad_set_caps (mysrcpad, caps);
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_if ((outbuffer = (GstBuffer *) node->data) == NULL);
- res = (gdouble *) GST_BUFFER_DATA (outbuffer);
- buffer_length = GST_BUFFER_SIZE (outbuffer) / sizeof (gdouble);
+ gst_buffer_map (outbuffer, &map, GST_MAP_READ);
+ res = (gdouble *) map.data;
+ buffer_length = map.size / sizeof (gdouble);
rms = 0.0;
for (i = 0; i < buffer_length; i++)
rms += res[i] * res[i];
GstCaps *caps;
gdouble *in, *res, rms;
gint i;
+ GstMapInfo map;
GList *node;
audiowsincband = setup_audiowsincband ();
44100 / 4.0 + 1000, NULL);
inbuffer = gst_buffer_new_and_alloc (1024 * sizeof (gdouble));
GST_BUFFER_TIMESTAMP (inbuffer) = 0;
- in = (gdouble *) GST_BUFFER_DATA (inbuffer);
+ gst_buffer_map (inbuffer, &map, GST_MAP_WRITE);
+ in = (gdouble *) map.data;
for (i = 0; i < 1024; i += 2) {
in[i] = 1.0;
in[i + 1] = -1.0;
}
+ gst_buffer_unmap (inbuffer, &map);
caps = gst_caps_from_string (AUDIO_WSINC_BAND_CAPS_STRING_64);
- gst_buffer_set_caps (inbuffer, caps);
+ gst_pad_set_caps (mysrcpad, caps);
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_if ((outbuffer = (GstBuffer *) node->data) == NULL);
- res = (gdouble *) GST_BUFFER_DATA (outbuffer);
- buffer_length = GST_BUFFER_SIZE (outbuffer) / sizeof (gdouble);
+ gst_buffer_map (outbuffer, &map, GST_MAP_READ);
+ res = (gdouble *) map.data;
+ buffer_length = map.size / sizeof (gdouble);
rms = 0.0;
for (i = 0; i < buffer_length; i++)
rms += res[i] * res[i];
GstCaps *caps;
gdouble *in;
gint i;
+ GstMapInfo map;
audiowsincband = setup_audiowsincband ();
/* Set to bandpass */
44100 / 4.0 + 44100 / 16.0, NULL);
inbuffer = gst_buffer_new_and_alloc (20 * sizeof (gdouble));
GST_BUFFER_TIMESTAMP (inbuffer) = 0;
- in = (gdouble *) GST_BUFFER_DATA (inbuffer);
+ gst_buffer_map (inbuffer, &map, GST_MAP_WRITE);
+ in = (gdouble *) map.data;
for (i = 0; i < 20; i++)
in[i] = 1.0;
+ gst_buffer_unmap (inbuffer, &map);
caps = gst_caps_from_string (AUDIO_WSINC_BAND_CAPS_STRING_64);
- gst_buffer_set_caps (inbuffer, caps);
+ gst_pad_set_caps (mysrcpad, caps);
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
* modify it under the terms of the GNU Lesser General Public License
* as published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
- *
+ *
* This library is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
- *
+ *
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
*/
#include <gst/gst.h>
+#include <gst/audio/audio.h>
#include <gst/base/gstbasetransform.h>
#include <gst/check/gstcheck.h>
GstPad *mysrcpad, *mysinkpad;
#define AUDIO_WSINC_LIMIT_CAPS_STRING_32 \
- "audio/x-raw-float, " \
- "channels = (int) 1, " \
- "rate = (int) 44100, " \
- "endianness = (int) BYTE_ORDER, " \
- "width = (int) 32" \
+ "audio/x-raw, " \
+ "format = (string) " GST_AUDIO_NE (F32) ", " \
+ "layout = (string) interleaved, " \
+ "channels = (int) 1, " \
+ "rate = (int) 44100"
#define AUDIO_WSINC_LIMIT_CAPS_STRING_64 \
- "audio/x-raw-float, " \
- "channels = (int) 1, " \
- "rate = (int) 44100, " \
- "endianness = (int) BYTE_ORDER, " \
- "width = (int) 64" \
+ "audio/x-raw, " \
+ "format = (string) " GST_AUDIO_NE (F64) ", " \
+ "layout = (string) interleaved, " \
+ "channels = (int) 1, " \
+ "rate = (int) 44100"
+
+#define FORMATS "{ "GST_AUDIO_NE (F32)","GST_AUDIO_NE (F64)" }"
static GstStaticPadTemplate sinktemplate = GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("audio/x-raw-float, "
- "channels = (int) 1, "
- "rate = (int) 44100, "
- "endianness = (int) BYTE_ORDER, " "width = (int) { 32, 64 } ")
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "format = (string) " FORMATS ", "
+ "layout = (string) interleaved, "
+ "channels = (int) 1, " "rate = (int) 44100")
);
static GstStaticPadTemplate srctemplate = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("audio/x-raw-float, "
- "channels = (int) 1, "
- "rate = (int) 44100, "
- "endianness = (int) BYTE_ORDER, " "width = (int) { 32, 64 } ")
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "format = (string) " FORMATS ", "
+ "layout = (string) interleaved, "
+ "channels = (int) 1, " "rate = (int) 44100")
);
static GstElement *
GST_DEBUG ("setup_audiowsinclimit");
audiowsinclimit = gst_check_setup_element ("audiowsinclimit");
- mysrcpad = gst_check_setup_src_pad (audiowsinclimit, &srctemplate, NULL);
- mysinkpad = gst_check_setup_sink_pad (audiowsinclimit, &sinktemplate, NULL);
+ mysrcpad = gst_check_setup_src_pad (audiowsinclimit, &srctemplate);
+ mysinkpad = gst_check_setup_sink_pad (audiowsinclimit, &sinktemplate);
gst_pad_set_active (mysrcpad, TRUE);
gst_pad_set_active (mysinkpad, TRUE);
GstCaps *caps;
gfloat *in, *res, rms;
gint i;
+ GstMapInfo map;
GList *node;
audiowsinclimit = setup_audiowsinclimit ();
g_object_set (G_OBJECT (audiowsinclimit), "cutoff", 44100 / 4.0, NULL);
inbuffer = gst_buffer_new_and_alloc (128 * sizeof (gfloat));
GST_BUFFER_TIMESTAMP (inbuffer) = 0;
- in = (gfloat *) GST_BUFFER_DATA (inbuffer);
+ gst_buffer_map (inbuffer, &map, GST_MAP_WRITE);
+ in = (gfloat *) map.data;
for (i = 0; i < 128; i++)
in[i] = 1.0;
+ gst_buffer_unmap (inbuffer, &map);
caps = gst_caps_from_string (AUDIO_WSINC_LIMIT_CAPS_STRING_32);
- gst_buffer_set_caps (inbuffer, caps);
+ gst_pad_set_caps (mysrcpad, caps);
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_if ((outbuffer = (GstBuffer *) node->data) == NULL);
- res = (gfloat *) GST_BUFFER_DATA (outbuffer);
- buffer_length = GST_BUFFER_SIZE (outbuffer) / sizeof (gfloat);
+ gst_buffer_map (outbuffer, &map, GST_MAP_READ);
+ res = (gfloat *) map.data;
+ buffer_length = map.size / sizeof (gfloat);
rms = 0.0;
for (i = 0; i < buffer_length; i++)
rms += res[i] * res[i];
rms = sqrt (rms / buffer_length);
+ gst_buffer_unmap (outbuffer, &map);
fail_unless (rms >= 0.9);
}
GstCaps *caps;
gfloat *in, *res, rms;
gint i;
+ GstMapInfo map;
GList *node;
audiowsinclimit = setup_audiowsinclimit ();
g_object_set (G_OBJECT (audiowsinclimit), "cutoff", 44100 / 4.0, NULL);
inbuffer = gst_buffer_new_and_alloc (128 * sizeof (gfloat));
GST_BUFFER_TIMESTAMP (inbuffer) = 0;
- in = (gfloat *) GST_BUFFER_DATA (inbuffer);
+ gst_buffer_map (inbuffer, &map, GST_MAP_WRITE);
+ in = (gfloat *) map.data;
for (i = 0; i < 128; i += 2) {
in[i] = 1.0;
in[i + 1] = -1.0;
}
+ gst_buffer_unmap (inbuffer, &map);
caps = gst_caps_from_string (AUDIO_WSINC_LIMIT_CAPS_STRING_32);
- gst_buffer_set_caps (inbuffer, caps);
+ gst_pad_set_caps (mysrcpad, caps);
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_if ((outbuffer = (GstBuffer *) node->data) == NULL);
- res = (gfloat *) GST_BUFFER_DATA (outbuffer);
- buffer_length = GST_BUFFER_SIZE (outbuffer) / sizeof (gfloat);
+ gst_buffer_map (outbuffer, &map, GST_MAP_READ);
+ res = (gfloat *) map.data;
+ buffer_length = map.size / sizeof (gfloat);
rms = 0.0;
for (i = 0; i < buffer_length; i++)
rms += res[i] * res[i];
rms = sqrt (rms / buffer_length);
+ gst_buffer_unmap (outbuffer, &map);
fail_unless (rms <= 0.1);
}
GstCaps *caps;
gfloat *in, *res, rms;
gint i;
+ GstMapInfo map;
GList *node;
audiowsinclimit = setup_audiowsinclimit ();
g_object_set (G_OBJECT (audiowsinclimit), "cutoff", 44100 / 4.0, NULL);
inbuffer = gst_buffer_new_and_alloc (128 * sizeof (gfloat));
GST_BUFFER_TIMESTAMP (inbuffer) = 0;
- in = (gfloat *) GST_BUFFER_DATA (inbuffer);
+ gst_buffer_map (inbuffer, &map, GST_MAP_WRITE);
+ in = (gfloat *) map.data;
for (i = 0; i < 128; i++)
in[i] = 1.0;
+ gst_buffer_unmap (inbuffer, &map);
caps = gst_caps_from_string (AUDIO_WSINC_LIMIT_CAPS_STRING_32);
- gst_buffer_set_caps (inbuffer, caps);
+ gst_pad_set_caps (mysrcpad, caps);
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_if ((outbuffer = (GstBuffer *) node->data) == NULL);
- res = (gfloat *) GST_BUFFER_DATA (outbuffer);
- buffer_length = GST_BUFFER_SIZE (outbuffer) / sizeof (gfloat);
+ gst_buffer_map (outbuffer, &map, GST_MAP_READ);
+ res = (gfloat *) map.data;
+ buffer_length = map.size / sizeof (gfloat);
rms = 0.0;
for (i = 0; i < buffer_length; i++)
rms += res[i] * res[i];
rms = sqrt (rms / buffer_length);
+ gst_buffer_unmap (outbuffer, &map);
fail_unless (rms <= 0.1);
}
GstCaps *caps;
gfloat *in, *res, rms;
gint i;
+ GstMapInfo map;
GList *node;
audiowsinclimit = setup_audiowsinclimit ();
g_object_set (G_OBJECT (audiowsinclimit), "cutoff", 44100 / 4.0, NULL);
inbuffer = gst_buffer_new_and_alloc (128 * sizeof (gfloat));
GST_BUFFER_TIMESTAMP (inbuffer) = 0;
- in = (gfloat *) GST_BUFFER_DATA (inbuffer);
+ gst_buffer_map (inbuffer, &map, GST_MAP_WRITE);
+ in = (gfloat *) map.data;
for (i = 0; i < 128; i += 2) {
in[i] = 1.0;
in[i + 1] = -1.0;
}
+ gst_buffer_unmap (inbuffer, &map);
caps = gst_caps_from_string (AUDIO_WSINC_LIMIT_CAPS_STRING_32);
- gst_buffer_set_caps (inbuffer, caps);
+ gst_pad_set_caps (mysrcpad, caps);
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_if ((outbuffer = (GstBuffer *) node->data) == NULL);
- res = (gfloat *) GST_BUFFER_DATA (outbuffer);
- buffer_length = GST_BUFFER_SIZE (outbuffer) / sizeof (gfloat);
+ gst_buffer_map (outbuffer, &map, GST_MAP_READ);
+ res = (gfloat *) map.data;
+ buffer_length = map.size / sizeof (gfloat);
rms = 0.0;
for (i = 0; i < buffer_length; i++)
rms += res[i] * res[i];
rms = sqrt (rms / buffer_length);
+ gst_buffer_unmap (outbuffer, &map);
fail_unless (rms >= 0.9);
}
GstCaps *caps;
gfloat *in;
gint i;
+ GstMapInfo map;
audiowsinclimit = setup_audiowsinclimit ();
/* Set to lowpass */
g_object_set (G_OBJECT (audiowsinclimit), "cutoff", 44100 / 4.0, NULL);
inbuffer = gst_buffer_new_and_alloc (20 * sizeof (gfloat));
GST_BUFFER_TIMESTAMP (inbuffer) = 0;
- in = (gfloat *) GST_BUFFER_DATA (inbuffer);
+ gst_buffer_map (inbuffer, &map, GST_MAP_WRITE);
+ in = (gfloat *) map.data;
for (i = 0; i < 20; i++)
in[i] = 1.0;
+ gst_buffer_unmap (inbuffer, &map);
caps = gst_caps_from_string (AUDIO_WSINC_LIMIT_CAPS_STRING_32);
- gst_buffer_set_caps (inbuffer, caps);
+ gst_pad_set_caps (mysrcpad, caps);
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
GstCaps *caps;
gdouble *in, *res, rms;
gint i;
+ GstMapInfo map;
GList *node;
audiowsinclimit = setup_audiowsinclimit ();
g_object_set (G_OBJECT (audiowsinclimit), "cutoff", 44100 / 4.0, NULL);
inbuffer = gst_buffer_new_and_alloc (128 * sizeof (gdouble));
GST_BUFFER_TIMESTAMP (inbuffer) = 0;
- in = (gdouble *) GST_BUFFER_DATA (inbuffer);
+ gst_buffer_map (inbuffer, &map, GST_MAP_WRITE);
+ in = (gdouble *) map.data;
for (i = 0; i < 128; i++)
in[i] = 1.0;
+ gst_buffer_unmap (inbuffer, &map);
caps = gst_caps_from_string (AUDIO_WSINC_LIMIT_CAPS_STRING_64);
- gst_buffer_set_caps (inbuffer, caps);
+ gst_pad_set_caps (mysrcpad, caps);
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_if ((outbuffer = (GstBuffer *) node->data) == NULL);
- res = (gdouble *) GST_BUFFER_DATA (outbuffer);
- buffer_length = GST_BUFFER_SIZE (outbuffer) / sizeof (gdouble);
+ gst_buffer_map (outbuffer, &map, GST_MAP_READ);
+ res = (gdouble *) map.data;
+ buffer_length = map.size / sizeof (gdouble);
rms = 0.0;
for (i = 0; i < buffer_length; i++)
rms += res[i] * res[i];
rms = sqrt (rms / buffer_length);
+ gst_buffer_unmap (outbuffer, &map);
fail_unless (rms >= 0.9);
}
GstCaps *caps;
gdouble *in, *res, rms;
gint i;
+ GstMapInfo map;
GList *node;
audiowsinclimit = setup_audiowsinclimit ();
g_object_set (G_OBJECT (audiowsinclimit), "cutoff", 44100 / 4.0, NULL);
inbuffer = gst_buffer_new_and_alloc (128 * sizeof (gdouble));
GST_BUFFER_TIMESTAMP (inbuffer) = 0;
- in = (gdouble *) GST_BUFFER_DATA (inbuffer);
+ gst_buffer_map (inbuffer, &map, GST_MAP_WRITE);
+ in = (gdouble *) map.data;
for (i = 0; i < 128; i += 2) {
in[i] = 1.0;
in[i + 1] = -1.0;
}
+ gst_buffer_unmap (inbuffer, &map);
caps = gst_caps_from_string (AUDIO_WSINC_LIMIT_CAPS_STRING_64);
- gst_buffer_set_caps (inbuffer, caps);
+ gst_pad_set_caps (mysrcpad, caps);
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_if ((outbuffer = (GstBuffer *) node->data) == NULL);
- res = (gdouble *) GST_BUFFER_DATA (outbuffer);
- buffer_length = GST_BUFFER_SIZE (outbuffer) / sizeof (gdouble);
+ gst_buffer_map (outbuffer, &map, GST_MAP_READ);
+ res = (gdouble *) map.data;
+ buffer_length = map.size / sizeof (gdouble);
rms = 0.0;
for (i = 0; i < buffer_length; i++)
rms += res[i] * res[i];
rms = sqrt (rms / buffer_length);
+ gst_buffer_unmap (outbuffer, &map);
fail_unless (rms <= 0.1);
}
GstCaps *caps;
gdouble *in, *res, rms;
gint i;
+ GstMapInfo map;
GList *node;
audiowsinclimit = setup_audiowsinclimit ();
g_object_set (G_OBJECT (audiowsinclimit), "cutoff", 44100 / 4.0, NULL);
inbuffer = gst_buffer_new_and_alloc (128 * sizeof (gdouble));
GST_BUFFER_TIMESTAMP (inbuffer) = 0;
- in = (gdouble *) GST_BUFFER_DATA (inbuffer);
+ gst_buffer_map (inbuffer, &map, GST_MAP_WRITE);
+ in = (gdouble *) map.data;
for (i = 0; i < 128; i++)
in[i] = 1.0;
+ gst_buffer_unmap (inbuffer, &map);
caps = gst_caps_from_string (AUDIO_WSINC_LIMIT_CAPS_STRING_64);
- gst_buffer_set_caps (inbuffer, caps);
+ gst_pad_set_caps (mysrcpad, caps);
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_if ((outbuffer = (GstBuffer *) node->data) == NULL);
- res = (gdouble *) GST_BUFFER_DATA (outbuffer);
- buffer_length = GST_BUFFER_SIZE (outbuffer) / sizeof (gdouble);
+ gst_buffer_map (outbuffer, &map, GST_MAP_READ);
+ res = (gdouble *) map.data;
+ buffer_length = map.size / sizeof (gdouble);
rms = 0.0;
for (i = 0; i < buffer_length; i++)
rms += res[i] * res[i];
rms = sqrt (rms / buffer_length);
+ gst_buffer_unmap (outbuffer, &map);
fail_unless (rms <= 0.1);
}
GstCaps *caps;
gdouble *in, *res, rms;
gint i;
+ GstMapInfo map;
GList *node;
audiowsinclimit = setup_audiowsinclimit ();
g_object_set (G_OBJECT (audiowsinclimit), "cutoff", 44100 / 4.0, NULL);
inbuffer = gst_buffer_new_and_alloc (128 * sizeof (gdouble));
GST_BUFFER_TIMESTAMP (inbuffer) = 0;
- in = (gdouble *) GST_BUFFER_DATA (inbuffer);
+ gst_buffer_map (inbuffer, &map, GST_MAP_WRITE);
+ in = (gdouble *) map.data;
for (i = 0; i < 128; i += 2) {
in[i] = 1.0;
in[i + 1] = -1.0;
}
+ gst_buffer_unmap (inbuffer, &map);
caps = gst_caps_from_string (AUDIO_WSINC_LIMIT_CAPS_STRING_64);
- gst_buffer_set_caps (inbuffer, caps);
+ gst_pad_set_caps (mysrcpad, caps);
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_if ((outbuffer = (GstBuffer *) node->data) == NULL);
- res = (gdouble *) GST_BUFFER_DATA (outbuffer);
- buffer_length = GST_BUFFER_SIZE (outbuffer) / sizeof (gdouble);
+ gst_buffer_map (outbuffer, &map, GST_MAP_READ);
+ res = (gdouble *) map.data;
+ buffer_length = map.size / sizeof (gdouble);
rms = 0.0;
for (i = 0; i < buffer_length; i++)
rms += res[i] * res[i];
rms = sqrt (rms / buffer_length);
+ gst_buffer_unmap (outbuffer, &map);
fail_unless (rms >= 0.9);
}
GstCaps *caps;
gdouble *in;
gint i;
+ GstMapInfo map;
audiowsinclimit = setup_audiowsinclimit ();
/* Set to lowpass */
g_object_set (G_OBJECT (audiowsinclimit), "cutoff", 44100 / 4.0, NULL);
inbuffer = gst_buffer_new_and_alloc (20 * sizeof (gdouble));
GST_BUFFER_TIMESTAMP (inbuffer) = 0;
- in = (gdouble *) GST_BUFFER_DATA (inbuffer);
+ gst_buffer_map (inbuffer, &map, GST_MAP_WRITE);
+ in = (gdouble *) map.data;
for (i = 0; i < 20; i++)
in[i] = 1.0;
+ gst_buffer_unmap (inbuffer, &map);
caps = gst_caps_from_string (AUDIO_WSINC_LIMIT_CAPS_STRING_64);
- gst_buffer_set_caps (inbuffer, caps);
+ gst_pad_set_caps (mysrcpad, caps);
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
GstElement *pipeline, *src, *filter, *sink;
GstCaps *caps;
- /* check that there's a usable video sink (think of build bot case) */
- sink = gst_element_factory_make ("autovideosink", NULL);
- state_ret = gst_element_set_state (sink, GST_STATE_READY);
-
- /* need to set state back to NULL, or our test won't work since we
- * already have detected the real caps in ready and then linking fails */
- gst_element_set_state (sink, GST_STATE_NULL);
-
- if (state_ret != GST_STATE_CHANGE_SUCCESS) {
- GST_WARNING ("No usable video sink, skipping test");
- gst_object_unref (sink);
- return;
- }
-
pipeline = gst_pipeline_new ("pipeline");
src = gst_element_factory_make ("fakesrc", NULL);
filter = gst_element_factory_make ("capsfilter", NULL);
+ sink = gst_element_factory_make ("autovideosink", NULL);
- caps = gst_caps_new_simple ("video/x-raw-yuv", "format", GST_TYPE_FOURCC,
- GST_MAKE_FOURCC ('A', 'C', 'D', 'C'), NULL);
+ caps = gst_caps_new_simple ("video/x-raw", "format", G_TYPE_STRING,
+ "ABCD", NULL);
g_object_set (filter, "caps", caps, NULL);
gst_caps_unref (caps);
GstElement *pipeline, *src, *filter, *sink;
GstCaps *caps;
- /* check that there's a usable audio sink (think of build bot case) */
- sink = gst_element_factory_make ("autoaudiosink", NULL);
- state_ret = gst_element_set_state (sink, GST_STATE_READY);
-
- /* need to set state back to NULL, or our test won't work since we
- * already have detected the real caps in ready and then linking fails */
- gst_element_set_state (sink, GST_STATE_NULL);
-
- if (state_ret != GST_STATE_CHANGE_SUCCESS) {
- GST_WARNING ("No usable audio sink, skipping test");
- gst_object_unref (sink);
- return;
- }
-
pipeline = gst_pipeline_new ("pipeline");
src = gst_element_factory_make ("fakesrc", NULL);
filter = gst_element_factory_make ("capsfilter", NULL);
+ sink = gst_element_factory_make ("autoaudiosink", NULL);
caps = gst_caps_new_simple ("audio/x-raw-int", "width", G_TYPE_INT, 42, NULL);
GST_DEBUG ("setup_avimux");
avimux = gst_check_setup_element ("avimux");
mysrcpad = setup_src_pad (avimux, srctemplate, NULL, sinkname);
- mysinkpad = gst_check_setup_sink_pad (avimux, &sinktemplate, NULL);
+ mysinkpad = gst_check_setup_sink_pad (avimux, &sinktemplate);
gst_pad_set_active (mysrcpad, TRUE);
gst_pad_set_active (mysinkpad, TRUE);
inbuffer = gst_buffer_new_and_alloc (1);
caps = gst_caps_from_string (src_caps_string);
- gst_buffer_set_caps (inbuffer, caps);
+ gst_pad_set_caps (mysrcpad, caps);
gst_caps_unref (caps);
GST_BUFFER_TIMESTAMP (inbuffer) = 0;
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
switch (i) {
case 0:{ /* check riff header */
/* avi header */
- guint8 *data = GST_BUFFER_DATA (outbuffer);
+ GstMapInfo map;
+ gsize size;
+ guint8 *data;
+
+ gst_buffer_map (outbuffer, &map, GST_MAP_READ);
+ data = map.data;
+ size = map.size;
fail_unless (memcmp (data, data0, sizeof (data0)) == 0);
fail_unless (memcmp (data + 8, data1, sizeof (data1)) == 0);
fail_unless (memcmp (data + 8, data4, sizeof (data4)) == 0);
fail_unless (memcmp (data + 76, data5, sizeof (data5)) == 0);
/* avi data header */
- data = GST_BUFFER_DATA (outbuffer);
- data += GST_BUFFER_SIZE (outbuffer) - 12;
+ data = map.data;
+ data += size - 12;
fail_unless (memcmp (data, data6, sizeof (data6)) == 0);
data += 8;
fail_unless (memcmp (data, data7, sizeof (data7)) == 0);
+ gst_buffer_unmap (outbuffer, &map);
break;
}
case 1: /* chunk header */
- fail_unless (GST_BUFFER_SIZE (outbuffer) == 8);
- fail_unless (memcmp (GST_BUFFER_DATA (outbuffer), chunk_id, 4) == 0);
+ fail_unless (gst_buffer_get_size (outbuffer) == 8);
+ fail_unless (gst_buffer_memcmp (outbuffer, 0, chunk_id, 4) == 0);
break;
case 2:
- fail_unless (GST_BUFFER_SIZE (outbuffer) == 1);
+ fail_unless (gst_buffer_get_size (outbuffer) == 1);
break;
case 3: /* buffer we put in, must be padded to even size */
- fail_unless (GST_BUFFER_SIZE (outbuffer) == 1);
+ fail_unless (gst_buffer_get_size (outbuffer) == 1);
break;
default:
break;
GST_START_TEST (test_video_pad)
{
- check_avimux_pad (&srcvideotemplate, VIDEO_CAPS_STRING, "00db", "video_%d");
+ check_avimux_pad (&srcvideotemplate, VIDEO_CAPS_STRING, "00db", "video_%u");
}
GST_END_TEST;
GST_START_TEST (test_audio_pad)
{
- check_avimux_pad (&srcaudiotemplate, AUDIO_CAPS_STRING, "00wb", "audio_%d");
+ check_avimux_pad (&srcaudiotemplate, AUDIO_CAPS_STRING, "00wb", "audio_%u");
}
GST_END_TEST;
setup_avisubtitle (void)
{
GstElement *avisubtitle;
- GstCaps *caps;
+ GstCaps *sinkcaps, *srccaps;
GST_DEBUG ("setup_avisubtitle");
avisubtitle = gst_check_setup_element ("avisubtitle");
- caps = gst_caps_new_simple ("application/x-subtitle", NULL);
- mysinkpad = gst_check_setup_sink_pad (avisubtitle, &sink_template, caps);
- gst_caps_unref (caps);
- caps = gst_caps_new_simple ("application/x-subtitle-avi", NULL);
- mysrcpad = gst_check_setup_src_pad (avisubtitle, &src_template, caps);
- gst_caps_unref (caps);
+ sinkcaps = gst_caps_new_empty_simple ("application/x-subtitle");
+ mysinkpad = gst_check_setup_sink_pad (avisubtitle, &sink_template);
+ srccaps = gst_caps_new_empty_simple ("application/x-subtitle-avi");
+ mysrcpad = gst_check_setup_src_pad (avisubtitle, &src_template);
gst_pad_set_active (mysinkpad, TRUE);
gst_pad_set_active (mysrcpad, TRUE);
+ fail_unless (gst_pad_set_caps (mysinkpad, sinkcaps));
+ fail_unless (gst_pad_set_caps (mysrcpad, srccaps));
+ gst_caps_unref (sinkcaps);
+ gst_caps_unref (srccaps);
return avisubtitle;
}
static void
check_wrong_buffer (guint8 * data, guint length)
{
- GstBuffer *buffer = gst_buffer_new ();
+ GstBuffer *buffer = gst_buffer_new_allocate (NULL, length, 0);
GstElement *avisubtitle = setup_avisubtitle ();
- gst_buffer_set_data (buffer, data, length);
+ gst_buffer_fill (buffer, 0, data, length);
fail_unless (gst_element_set_state (avisubtitle,
GST_STATE_PLAYING) == GST_STATE_CHANGE_SUCCESS,
"could not set to playing");
check_correct_buffer (guint8 * src_data, guint src_size, guint8 * dst_data,
guint dst_size)
{
- GstBuffer *buffer = gst_buffer_new ();
+ GstBuffer *buffer = gst_buffer_new_allocate (NULL, src_size, 0);
GstBuffer *newBuffer;
GstElement *avisubtitle = setup_avisubtitle ();
GstEvent *event;
fail_unless (g_list_length (buffers) == 0, "Buffers list needs to be empty");
- gst_buffer_set_data (buffer, src_data, src_size);
+ gst_buffer_fill (buffer, 0, src_data, src_size);
fail_unless (gst_element_set_state (avisubtitle,
GST_STATE_PLAYING) == GST_STATE_CHANGE_SUCCESS,
"could not set to playing");
newBuffer = GST_BUFFER (buffers->data);
buffers = g_list_remove (buffers, newBuffer);
fail_unless (g_list_length (buffers) == 1, "Buffers list needs to be empty");
- fail_unless (GST_BUFFER_SIZE (newBuffer) == dst_size,
+ fail_unless (gst_buffer_get_size (newBuffer) == dst_size,
"size of the new buffer is wrong ( %d != %d)",
- GST_BUFFER_SIZE (newBuffer), dst_size);
- fail_unless (memcmp (GST_BUFFER_DATA (newBuffer), dst_data, dst_size) == 0,
+ gst_buffer_get_size (newBuffer), dst_size);
+ fail_unless (gst_buffer_memcmp (newBuffer, 0, dst_data, dst_size) == 0,
"data of the buffer is not correct");
gst_buffer_unref (newBuffer);
/* free the buffer from seeking */
GST_DEBUG ("setup_capssetter");
capssetter = gst_check_setup_element ("capssetter");
- mysrcpad = gst_check_setup_src_pad (capssetter, &srctemplate, NULL);
- mysinkpad = gst_check_setup_sink_pad (capssetter, &sinktemplate, NULL);
+ mysrcpad = gst_check_setup_src_pad (capssetter, &srctemplate);
+ mysinkpad = gst_check_setup_sink_pad (capssetter, &sinktemplate);
gst_pad_set_active (mysrcpad, TRUE);
gst_pad_set_active (mysinkpad, TRUE);
{
GstElement *capssetter;
GstBuffer *buffer;
+ GstCaps *current_out;
capssetter = setup_capssetter ();
fail_unless (gst_element_set_state (capssetter,
GST_STATE_PLAYING) == GST_STATE_CHANGE_SUCCESS,
"could not set to playing");
- buffer = gst_buffer_new_and_alloc (4);
- ASSERT_BUFFER_REFCOUNT (buffer, "buffer", 1);
- memcpy (GST_BUFFER_DATA (buffer), "data", 4);
-
- gst_buffer_set_caps (buffer, in_caps);
- gst_caps_unref (in_caps);
-
g_object_set (capssetter, "join", join, NULL);
g_object_set (capssetter, "replace", replace, NULL);
g_object_set (capssetter, "caps", prop_caps, NULL);
gst_caps_unref (prop_caps);
+ buffer = gst_buffer_new_and_alloc (4);
+ ASSERT_BUFFER_REFCOUNT (buffer, "buffer", 1);
+ gst_buffer_fill (buffer, 0, "data", 4);
+
+ gst_pad_set_caps (mysrcpad, in_caps);
+ gst_caps_unref (in_caps);
+
/* pushing gives away my reference ... */
fail_unless (gst_pad_push (mysrcpad, buffer) == GST_FLOW_OK,
"Failed pushing buffer to capssetter");
buffer = g_list_first (buffers)->data;
ASSERT_BUFFER_REFCOUNT (buffer, "buffer", 1);
- fail_unless (gst_caps_is_equal (out_caps, GST_BUFFER_CAPS (buffer)));
+ current_out = gst_pad_get_current_caps (mysinkpad);
+ fail_unless (gst_caps_is_equal (out_caps, current_out));
+ gst_caps_unref (current_out);
gst_caps_unref (out_caps);
/* cleanup */
* Boston, MA 02111-1307, USA.
*/
+/* FIXME 0.11: suppress warnings for deprecated API such as GValueArray
+ * with newer GLib versions (>= 2.31.0) */
+#define GLIB_DISABLE_DEPRECATION_WARNINGS
+
#include <gst/check/gstcheck.h>
#include <gst/tag/tag.h>
buffer_new (const gchar * buffer_data, guint size)
{
GstBuffer *buffer;
+ guint8 *data;
- GstCaps *caps;
+ data = g_malloc (size);
+ memcpy (data, buffer_data, size);
- buffer = gst_buffer_new_and_alloc (size);
- memcpy (GST_BUFFER_DATA (buffer), buffer_data, size);
- caps = gst_caps_from_string (SRC_CAPS);
- gst_buffer_set_caps (buffer, caps);
- gst_caps_unref (caps);
+ if (data)
+ buffer = gst_buffer_new_wrapped (data, size);
+ else
+ buffer = gst_buffer_new ();
return buffer;
}
{
GST_DEBUG ("setup_cmmldec");
cmmldec = gst_check_setup_element ("cmmldec");
- srcpad = gst_check_setup_src_pad (cmmldec, &srctemplate, NULL);
- sinkpad = gst_check_setup_sink_pad (cmmldec, &sinktemplate, NULL);
+ srcpad = gst_check_setup_src_pad (cmmldec, &srctemplate);
+ sinkpad = gst_check_setup_sink_pad (cmmldec, &sinktemplate);
gst_pad_set_active (srcpad, TRUE);
gst_pad_set_active (sinkpad, TRUE);
const gchar * data, gint refcount)
{
GstBuffer *buffer;
+ GstMapInfo map;
if (current_buf == NULL)
current_buf = buffers;
fail_unless (current_buf != NULL);
buffer = GST_BUFFER (current_buf->data);
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
ASSERT_OBJECT_REFCOUNT (buffer, name, refcount);
- fail_unless (memcmp (GST_BUFFER_DATA (buffer), data,
- GST_BUFFER_SIZE (buffer)) == 0,
- "'%s' (%s) is not equal to (%s)", name, GST_BUFFER_DATA (buffer), data);
+ fail_unless (memcmp (map.data, data, map.size) == 0,
+ "'%s' (%s) is not equal to (%s)", name, map.data, data);
+
+ gst_buffer_unmap (buffer, &map);
}
static GstFlowReturn
return gst_pad_push (srcpad, buffer);
}
+static void
+push_caps (void)
+{
+ GstCaps *caps;
+
+ caps = gst_caps_from_string (SRC_CAPS);
+ fail_unless (gst_pad_set_caps (srcpad, caps));
+ gst_caps_unref (caps);
+}
+
static GObject *
cmml_tag_message_pop (GstBus * bus, const gchar * tag)
{
GValueArray *meta;
+ push_caps ();
+
/* push the ident header */
flow = push_data ("ident-header", IDENT_HEADER, IDENT_HEADER_SIZE, 0);
fail_unless_equals_flow_return (flow, GST_FLOW_OK);
GST_START_TEST (test_preamble_no_pi)
{
+ push_caps ();
+
flow = push_data ("ident-header", IDENT_HEADER, IDENT_HEADER_SIZE, 0);
fail_unless_equals_flow_return (flow, GST_FLOW_OK);
fail_unless_equals_int (g_list_length (buffers), 0);
{
const gchar *bad_xml = "<?xml version=\"1.0\"?><a><b></a>";
+ push_caps ();
+
/* malformed ident header */
flow = push_data ("bad-ident-header", "CMML\0\0\0\0garbage", 15, 0);
fail_unless_equals_flow_return (flow, GST_FLOW_ERROR);
granulepos = keyindex + keyoffset;
fail_unless (gst_pad_query_convert (GST_PAD_PEER (srcpad),
- GST_FORMAT_DEFAULT, granulepos, &dstfmt, &dstval));
+ GST_FORMAT_DEFAULT, granulepos, dstfmt, &dstval));
fail_unless (dstfmt == GST_FORMAT_TIME);
/* fail unless dstval == index + offset */
buffer_new (const gchar * buffer_data, guint size)
{
GstBuffer *buffer;
- GstCaps *caps;
+ guint8 *data;
- buffer = gst_buffer_new_and_alloc (size);
- memcpy (GST_BUFFER_DATA (buffer), buffer_data, size);
- caps = gst_caps_from_string (SRC_CAPS);
- gst_buffer_set_caps (buffer, caps);
- gst_caps_unref (caps);
+ data = g_malloc (size);
+ memcpy (data, buffer_data, size);
+
+ buffer = gst_buffer_new_wrapped (data, size);
return buffer;
}
GST_DEBUG ("setup_cmmlenc");
cmmlenc = gst_check_setup_element ("cmmlenc");
- srcpad = gst_check_setup_src_pad (cmmlenc, &srctemplate, NULL);
- sinkpad = gst_check_setup_sink_pad (cmmlenc, &sinktemplate, NULL);
+ srcpad = gst_check_setup_src_pad (cmmlenc, &srctemplate);
+ sinkpad = gst_check_setup_sink_pad (cmmlenc, &sinktemplate);
gst_pad_set_active (srcpad, TRUE);
gst_pad_set_active (sinkpad, TRUE);
const gchar * data, gint refcount)
{
GstBuffer *buffer;
+ GstMapInfo map;
if (current_buf == NULL)
current_buf = buffers;
fail_unless (current_buf != NULL);
buffer = GST_BUFFER (current_buf->data);
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
ASSERT_OBJECT_REFCOUNT (buffer, name, refcount);
- fail_unless (memcmp (GST_BUFFER_DATA (buffer), data,
- GST_BUFFER_SIZE (buffer)) == 0,
- "'%s' (%s) is not equal to (%s)", name, GST_BUFFER_DATA (buffer), data);
+ fail_unless (memcmp (map.data, data, map.size) == 0,
+ "'%s' (%s) is not equal to (%s)", name, map.data, data);
+
+ gst_buffer_unmap (buffer, &map);
}
static GstFlowReturn
}
static void
+push_caps (void)
+{
+ GstCaps *caps;
+
+ caps = gst_caps_from_string (SRC_CAPS);
+ fail_unless (gst_pad_set_caps (srcpad, caps));
+ gst_caps_unref (caps);
+}
+
+static void
check_headers (void)
{
+ push_caps ();
+
/* push the cmml start tag */
flow = push_data ("preamble", PREAMBLE, strlen (PREAMBLE));
fail_unless_equals_flow_return (flow, GST_FLOW_OK);
static gboolean
gst_caps_is_interlaced (GstCaps * caps)
{
- GstStructure G_GNUC_UNUSED *structure;
- gboolean interlaced = FALSE;
+ GstVideoInfo info;
fail_unless (gst_caps_is_fixed (caps));
- structure = gst_caps_get_structure (caps, 0);
- fail_unless (gst_video_format_parse_caps_interlaced (caps, &interlaced));
- return interlaced;
+ fail_unless (gst_video_info_from_caps (&info, caps));
+
+ return GST_VIDEO_INFO_IS_INTERLACED (&info);
}
GST_START_TEST (test_create_and_unref)
"width=(int)3200, height=(int)3400, framerate=(fraction)0/1"
#define CAPS_YUY2 \
- "video/x-raw-yuv, " \
+ "video/x-raw, " \
CAPS_VIDEO_COMMON ", " \
- "format=(fourcc)YUY2"
+ "format=(string)YUY2"
#define CAPS_YUY2_INTERLACED \
CAPS_YUY2 ", " \
- "interlaced=(boolean)true"
+ "interlace-mode=interleaved"
#define CAPS_YVYU \
- "video/x-raw-yuv, " \
+ "video/x-raw, " \
CAPS_VIDEO_COMMON ", " \
- "format=(fourcc)YVYU"
+ "format=(string)YVYU"
#define CAPS_YVYU_INTERLACED \
CAPS_YVYU ", " \
- "interlaced=(boolean)true"
+ "interlace-mode=interleaved"
#define CAPS_YUY2_IMAGE \
- "video/x-raw-yuv, " \
+ "video/x-raw, " \
CAPS_IMAGE_COMMON ", " \
- "format=(fourcc)YUY2"
+ "format=(string)YUY2"
#define CAPS_YUY2_INTERLACED_IMAGE \
CAPS_YUY2_IMAGE ", " \
- "interlaced=(boolean)true"
+ "interlace-mode=interleaved"
#define CAPS_YVYU_IMAGE \
- "video/x-raw-yuv, " \
+ "video/x-raw, " \
CAPS_IMAGE_COMMON ", " \
- "format=(fourcc)YVYU"
+ "format=(string)YVYU"
#define CAPS_YVYU_INTERLACED_IMAGE \
CAPS_YVYU_IMAGE ", " \
- "interlaced=(boolean)true"
+ "interlace-mode=interleaved"
static GstElement *deinterlace;
static GstPad *srcpad;
/*
* Checks if 2 buffers are equal
*
- * Equals means same caps and same data
+ * Equals means same data
*/
static gboolean
test_buffer_equals (GstBuffer * buf_a, GstBuffer * buf_b)
{
- GstCaps *caps_a;
- GstCaps *caps_b;
-
- if (GST_BUFFER_SIZE (buf_a) != GST_BUFFER_SIZE (buf_b))
- return FALSE;
-
- caps_a = gst_buffer_get_caps (buf_a);
- caps_b = gst_buffer_get_caps (buf_b);
+ GstMapInfo m1, m2;
+ gboolean res = FALSE;
- if (!gst_caps_is_equal (caps_a, caps_b))
- return FALSE;
+ gst_buffer_map (buf_a, &m1, GST_MAP_READ);
+ gst_buffer_map (buf_b, &m2, GST_MAP_READ);
- gst_caps_unref (caps_a);
- gst_caps_unref (caps_b);
+ if (m1.size == m2.size) {
+ res = memcmp (m1.data, m2.data, m1.size) == 0;
+ }
+ gst_buffer_unmap (buf_a, &m1);
+ gst_buffer_unmap (buf_b, &m2);
- return memcmp (GST_BUFFER_DATA (buf_a), GST_BUFFER_DATA (buf_b),
- GST_BUFFER_SIZE (buf_a)) == 0;
+ return res;
}
-static gboolean
-sinkpad_enqueue_buffer (GstPad * pad, GstBuffer * buf, gpointer data)
+static GstPadProbeReturn
+sinkpad_enqueue_buffer (GstPad * pad, GstPadProbeInfo * info, gpointer data)
{
GQueue *queue = (GQueue *) data;
+ GstBuffer *buf = GST_PAD_PROBE_INFO_BUFFER (info);
/* enqueue a copy for being compared later */
g_queue_push_tail (queue, gst_buffer_copy (buf));
- return TRUE;
+ return GST_PAD_PROBE_OK;
}
/*
* pad buffer probe that compares the buffer with the top one
* in the GQueue passed as the user data
*/
-static gboolean
-srcpad_dequeue_and_compare_buffer (GstPad * pad, GstBuffer * buf, gpointer data)
+static GstPadProbeReturn
+srcpad_dequeue_and_compare_buffer (GstPad * pad, GstPadProbeInfo * info,
+ gpointer data)
{
GQueue *queue = (GQueue *) data;
+ GstBuffer *buf = GST_PAD_PROBE_INFO_BUFFER (info);
GstBuffer *queue_buf;
queue_buf = (GstBuffer *) g_queue_pop_head (queue);
gst_buffer_unref (queue_buf);
- return TRUE;
+ return GST_PAD_PROBE_OK;
}
/*
queue = g_queue_new ();
/* set up probes for testing */
- gst_pad_add_buffer_probe (sinkpad, (GCallback) sinkpad_enqueue_buffer, queue);
- gst_pad_add_buffer_probe (srcpad,
- (GCallback) srcpad_dequeue_and_compare_buffer, queue);
+ gst_pad_add_probe (sinkpad, GST_PAD_PROBE_TYPE_BUFFER, sinkpad_enqueue_buffer,
+ queue, NULL);
+ gst_pad_add_probe (srcpad, GST_PAD_PROBE_TYPE_BUFFER,
+ srcpad_dequeue_and_compare_buffer, queue, NULL);
fail_unless (gst_element_set_state (pipeline, GST_STATE_PLAYING) !=
GST_STATE_CHANGE_FAILURE);
#endif
#include <stdio.h>
+#include <gst/audio/audio.h>
#include <gst/check/gstcheck.h>
-#include <gst/audio/multichannel.h>
+#include <gst/audio/audio.h>
GST_START_TEST (test_create_and_unref)
{
static GstStaticPadTemplate sinktemplate = GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("audio/x-raw-float, "
- "width = (int) 32, "
- "channels = (int) 1, "
- "rate = (int) {32000, 48000}, " "endianness = (int) BYTE_ORDER"));
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "format = (string) " GST_AUDIO_NE (F32) ", "
+ "channels = (int) 1, layout = (string) {interleaved, non-interleaved}, rate = (int) {32000, 48000}"));
static GstStaticPadTemplate srctemplate = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("audio/x-raw-float, "
- "width = (int) 32, "
- "channels = (int) { 2, 3 }, "
- "rate = (int) {32000, 48000}, " "endianness = (int) BYTE_ORDER"));
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "format = (string) " GST_AUDIO_NE (F32) ", "
+ "channels = (int) { 2, 3 }, layout = (string) interleaved, rate = (int) {32000, 48000}"));
#define CAPS_32khz \
- "audio/x-raw-float, " \
- "width = (int) 32, " \
- "channels = (int) 2, " \
- "rate = (int) 32000, " \
- "endianness = (int) BYTE_ORDER"
+ "audio/x-raw, " \
+ "format = (string) "GST_AUDIO_NE (F32) ", " \
+ "channels = (int) 2, layout = (string) interleaved, " \
+ "rate = (int) 32000"
#define CAPS_48khz \
- "audio/x-raw-float, " \
- "width = (int) 32, " \
- "channels = (int) 2, " \
- "rate = (int) 48000, " \
- "endianness = (int) BYTE_ORDER"
+ "audio/x-raw, " \
+ "format = (string) "GST_AUDIO_NE (F32) ", " \
+ "channels = (int) 2, layout = (string) interleaved, " \
+ "rate = (int) 48000"
#define CAPS_48khz_3CH \
- "audio/x-raw-float, " \
- "width = (int) 32, " \
- "channels = (int) 3, " \
- "rate = (int) 48000, " \
- "endianness = (int) BYTE_ORDER"
+ "audio/x-raw, " \
+ "format = (string) "GST_AUDIO_NE (F32) ", " \
+ "channels = (int) 3, layout = (string) interleaved, " \
+ "rate = (int) 48000"
static GstFlowReturn
-deinterleave_chain_func (GstPad * pad, GstBuffer * buffer)
+deinterleave_chain_func (GstPad * pad, GstObject * parent, GstBuffer * buffer)
{
gint i;
+ GstMapInfo map;
gfloat *indata;
fail_unless (GST_IS_BUFFER (buffer));
- fail_unless_equals_int (GST_BUFFER_SIZE (buffer), 48000 * sizeof (gfloat));
- fail_unless (GST_BUFFER_DATA (buffer) != NULL);
-
- indata = (gfloat *) GST_BUFFER_DATA (buffer);
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ indata = (gfloat *) map.data;
+ fail_unless_equals_int (map.size, 48000 * sizeof (gfloat));
+ fail_unless (indata != NULL);
if (strcmp (GST_PAD_NAME (pad), "sink0") == 0) {
for (i = 0; i < 48000; i++)
} else {
g_assert_not_reached ();
}
-
+ gst_buffer_unmap (buffer, &map);
gst_buffer_unref (buffer);
return GST_FLOW_OK;
GstBuffer *inbuf;
GstCaps *caps;
gfloat *indata;
+ GstMapInfo map;
+ guint64 channel_mask = 0;
mysinkpads = g_new0 (GstPad *, 2);
nsinkpads = 0;
mysrcpad = gst_pad_new_from_static_template (&srctemplate, "src");
fail_unless (mysrcpad != NULL);
+ gst_pad_set_active (mysrcpad, TRUE);
caps = gst_caps_from_string (CAPS_48khz);
+ channel_mask |=
+ G_GUINT64_CONSTANT (1) << GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT;
+ channel_mask |=
+ G_GUINT64_CONSTANT (1) << GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT;
+ gst_caps_set_simple (caps, "channel-mask", GST_TYPE_BITMASK, channel_mask,
+ NULL);
+
fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_pad_use_fixed_caps (mysrcpad);
GST_STATE_PLAYING) == GST_STATE_CHANGE_SUCCESS);
inbuf = gst_buffer_new_and_alloc (2 * 48000 * sizeof (gfloat));
- indata = (gfloat *) GST_BUFFER_DATA (inbuf);
+ inbuf = gst_buffer_make_writable (inbuf);
+ gst_buffer_map (inbuf, &map, GST_MAP_WRITE);
+ indata = (gfloat *) map.data;
for (i = 0; i < 2 * 48000; i += 2) {
indata[i] = -1.0;
indata[i + 1] = 1.0;
}
- gst_buffer_set_caps (inbuf, caps);
+ gst_buffer_unmap (inbuf, &map);
+ gst_pad_set_caps (mysrcpad, caps);
fail_unless (gst_pad_push (mysrcpad, inbuf) == GST_FLOW_OK);
GstBuffer *inbuf;
GstCaps *caps;
gfloat *indata;
+ GstMapInfo map;
+ guint64 channel_mask = 0;
nsinkpads = 0;
mysinkpads = g_new0 (GstPad *, 2);
mysrcpad = gst_pad_new_from_static_template (&srctemplate, "src");
fail_unless (mysrcpad != NULL);
+ gst_pad_set_active (mysrcpad, TRUE);
caps = gst_caps_from_string (CAPS_48khz);
+ channel_mask |=
+ G_GUINT64_CONSTANT (1) << GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT;
+ channel_mask |=
+ G_GUINT64_CONSTANT (1) << GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT;
+ gst_caps_set_simple (caps, "channel-mask", GST_TYPE_BITMASK, channel_mask,
+ NULL);
+
fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_pad_use_fixed_caps (mysrcpad);
GST_STATE_PLAYING) == GST_STATE_CHANGE_SUCCESS);
inbuf = gst_buffer_new_and_alloc (2 * 48000 * sizeof (gfloat));
- indata = (gfloat *) GST_BUFFER_DATA (inbuf);
+ inbuf = gst_buffer_make_writable (inbuf);
+ gst_buffer_map (inbuf, &map, GST_MAP_WRITE);
+ indata = (gfloat *) map.data;
for (i = 0; i < 2 * 48000; i += 2) {
indata[i] = -1.0;
indata[i + 1] = 1.0;
}
- gst_buffer_set_caps (inbuf, caps);
+ gst_buffer_unmap (inbuf, &map);
+ gst_pad_set_caps (mysrcpad, caps);
fail_unless (gst_pad_push (mysrcpad, inbuf) == GST_FLOW_OK);
gint i;
GstBuffer *inbuf;
gfloat *indata;
+ GstMapInfo map;
+ guint64 channel_mask;
nsinkpads = 0;
mysinkpads = g_new0 (GstPad *, 2);
mysrcpad = gst_pad_new_from_static_template (&srctemplate, "src");
fail_unless (mysrcpad != NULL);
+ gst_pad_set_active (mysrcpad, TRUE);
+
caps = gst_caps_from_string (CAPS_48khz);
+ channel_mask = 0;
+ channel_mask |=
+ G_GUINT64_CONSTANT (1) << GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT;
+ channel_mask |=
+ G_GUINT64_CONSTANT (1) << GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT;
+ gst_caps_set_simple (caps, "channel-mask", GST_TYPE_BITMASK, channel_mask,
+ NULL);
fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_pad_use_fixed_caps (mysrcpad);
GST_STATE_PLAYING) == GST_STATE_CHANGE_SUCCESS);
inbuf = gst_buffer_new_and_alloc (2 * 48000 * sizeof (gfloat));
- indata = (gfloat *) GST_BUFFER_DATA (inbuf);
+ inbuf = gst_buffer_make_writable (inbuf);
+ gst_buffer_map (inbuf, &map, GST_MAP_WRITE);
+ indata = (gfloat *) map.data;
for (i = 0; i < 2 * 48000; i += 2) {
indata[i] = -1.0;
indata[i + 1] = 1.0;
}
- gst_buffer_set_caps (inbuf, caps);
+ gst_buffer_unmap (inbuf, &map);
+ gst_pad_set_caps (mysrcpad, caps);
fail_unless (gst_pad_push (mysrcpad, inbuf) == GST_FLOW_OK);
caps2 = gst_caps_from_string (CAPS_32khz);
+ channel_mask = 0;
+ channel_mask |=
+ G_GUINT64_CONSTANT (1) << GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT;
+ channel_mask |=
+ G_GUINT64_CONSTANT (1) << GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT;
+ gst_caps_set_simple (caps2, "channel-mask", GST_TYPE_BITMASK, channel_mask,
+ NULL);
gst_pad_set_caps (mysrcpad, caps2);
inbuf = gst_buffer_new_and_alloc (2 * 48000 * sizeof (gfloat));
- indata = (gfloat *) GST_BUFFER_DATA (inbuf);
+ inbuf = gst_buffer_make_writable (inbuf);
+ gst_buffer_map (inbuf, &map, GST_MAP_WRITE);
+ indata = (gfloat *) map.data;
for (i = 0; i < 2 * 48000; i += 2) {
indata[i] = -1.0;
indata[i + 1] = 1.0;
}
- gst_buffer_set_caps (inbuf, caps2);
+ gst_buffer_unmap (inbuf, &map);
+ gst_pad_set_caps (mysrcpad, caps2);
/* Should work fine because the caps changed in a compatible way */
fail_unless (gst_pad_push (mysrcpad, inbuf) == GST_FLOW_OK);
gst_caps_unref (caps2);
caps2 = gst_caps_from_string (CAPS_48khz_3CH);
+ channel_mask = 0;
+ channel_mask |=
+ G_GUINT64_CONSTANT (1) << GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT;
+ channel_mask |=
+ G_GUINT64_CONSTANT (1) << GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT;
+ channel_mask |=
+ G_GUINT64_CONSTANT (1) << GST_AUDIO_CHANNEL_POSITION_FRONT_CENTER;
+ gst_caps_set_simple (caps2, "channel-mask", GST_TYPE_BITMASK, channel_mask,
+ NULL);
gst_pad_set_caps (mysrcpad, caps2);
inbuf = gst_buffer_new_and_alloc (3 * 48000 * sizeof (gfloat));
- indata = (gfloat *) GST_BUFFER_DATA (inbuf);
+ inbuf = gst_buffer_make_writable (inbuf);
+ gst_buffer_map (inbuf, &map, GST_MAP_WRITE);
+ indata = (gfloat *) map.data;
for (i = 0; i < 3 * 48000; i += 3) {
indata[i] = -1.0;
indata[i + 1] = 1.0;
indata[i + 2] = 0.0;
}
- gst_buffer_set_caps (inbuf, caps2);
+ gst_buffer_unmap (inbuf, &map);
+ gst_pad_set_caps (mysrcpad, caps2);
/* Should break because the caps changed in an incompatible way */
fail_if (gst_pad_push (mysrcpad, inbuf) == GST_FLOW_OK);
set_channel_positions (GstCaps * caps, int channels,
GstAudioChannelPosition * channelpositions)
{
- GValue chanpos = { 0 };
- GValue pos = { 0 };
- GstStructure *structure = gst_caps_get_structure (caps, 0);
int c;
+ guint64 channel_mask = 0;
- g_value_init (&chanpos, GST_TYPE_ARRAY);
- g_value_init (&pos, GST_TYPE_AUDIO_CHANNEL_POSITION);
+ for (c = 0; c < channels; c++)
+ channel_mask |= G_GUINT64_CONSTANT (1) << channelpositions[c];
- for (c = 0; c < channels; c++) {
- g_value_set_enum (&pos, channelpositions[c]);
- gst_value_array_append_value (&chanpos, &pos);
- }
- g_value_unset (&pos);
-
- gst_structure_set_value (structure, "channel-positions", &chanpos);
- g_value_unset (&chanpos);
+ gst_caps_set_simple (caps, "channel-mask", GST_TYPE_BITMASK, channel_mask,
+ NULL);
}
static void
src_handoff_float32_8ch (GstElement * src, GstBuffer * buf, GstPad * pad,
gpointer user_data)
{
+ gfloat *data, *p;
+ guint size, i, c;
GstAudioChannelPosition layout[NUM_CHANNELS];
GstCaps *caps;
- gfloat *data;
- guint size, i, c;
- caps = gst_caps_new_simple ("audio/x-raw-float",
- "width", G_TYPE_INT, 32,
- "depth", G_TYPE_INT, 32,
+ caps = gst_caps_new_simple ("audio/x-raw",
+ "format", G_TYPE_STRING, GST_AUDIO_NE (F32),
"channels", G_TYPE_INT, NUM_CHANNELS,
- "rate", G_TYPE_INT, SAMPLE_RATE,
- "endianness", G_TYPE_INT, G_BYTE_ORDER, NULL);
+ "layout", G_TYPE_STRING, "interleaved",
+ "rate", G_TYPE_INT, SAMPLE_RATE, NULL);
for (i = 0; i < NUM_CHANNELS; ++i)
- layout[i] = GST_AUDIO_CHANNEL_POSITION_NONE;
+ layout[i] = GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT + i;
set_channel_positions (caps, NUM_CHANNELS, layout);
+ gst_pad_set_caps (pad, caps);
+ gst_caps_unref (caps);
size = sizeof (gfloat) * SAMPLES_PER_BUFFER * NUM_CHANNELS;
- data = (gfloat *) g_malloc (size);
-
- GST_BUFFER_MALLOCDATA (buf) = (guint8 *) data;
- GST_BUFFER_DATA (buf) = (guint8 *) data;
- GST_BUFFER_SIZE (buf) = size;
-
- GST_BUFFER_OFFSET (buf) = 0;
- GST_BUFFER_TIMESTAMP (buf) = 0;
-
- GST_BUFFER_CAPS (buf) = caps;
+ data = p = (gfloat *) g_malloc (size);
for (i = 0; i < SAMPLES_PER_BUFFER; ++i) {
for (c = 0; c < NUM_CHANNELS; ++c) {
- *data = (gfloat) ((i * NUM_CHANNELS) + c);
- ++data;
+ *p = (gfloat) ((i * NUM_CHANNELS) + c);
+ ++p;
}
}
+
+ buf = gst_buffer_new ();
+ gst_buffer_take_memory (buf, -1, gst_memory_new_wrapped (0, data, g_free,
+ size, 0, size));
+ GST_BUFFER_OFFSET (buf) = 0;
+ GST_BUFFER_TIMESTAMP (buf) = 0;
}
-static gboolean
-float_buffer_check_probe (GstPad * pad, GstBuffer * buf, gpointer userdata)
+static GstPadProbeReturn
+float_buffer_check_probe (GstPad * pad, GstPadProbeInfo * info,
+ gpointer userdata)
{
+ GstMapInfo map;
gfloat *data;
guint padnum, numpads;
guint num, i;
GstStructure *s;
GstAudioChannelPosition *pos;
gint channels;
+ GstBuffer *buffer = GST_PAD_PROBE_INFO_BUFFER (info);
+ GstAudioInfo audio_info;
+ guint pad_id = GPOINTER_TO_UINT (userdata);
- fail_unless_equals_int (sscanf (GST_PAD_NAME (pad), "src%u", &padnum), 1);
+ fail_unless_equals_int (sscanf (GST_PAD_NAME (pad), "src_%u", &padnum), 1);
numpads = pads_created;
/* Check caps */
- caps = GST_BUFFER_CAPS (buf);
+ caps = gst_pad_get_current_caps (pad);
fail_unless (caps != NULL);
s = gst_caps_get_structure (caps, 0);
fail_unless (gst_structure_get_int (s, "channels", &channels));
fail_unless_equals_int (channels, 1);
- fail_unless (gst_structure_has_field (s, "channel-positions"));
- pos = gst_audio_get_channel_positions (s);
- fail_unless (pos != NULL && pos[0] == GST_AUDIO_CHANNEL_POSITION_NONE);
- g_free (pos);
- data = (gfloat *) GST_BUFFER_DATA (buf);
- num = GST_BUFFER_SIZE (buf) / sizeof (gfloat);
+ gst_audio_info_init (&audio_info);
+ fail_unless (gst_audio_info_from_caps (&audio_info, caps));
+
+ pos = audio_info.position;
+ fail_unless (pos != NULL
+ && pos[0] == GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT + pad_id);
+ gst_caps_unref (caps);
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ data = (gfloat *) map.data;
+ num = map.size / sizeof (gfloat);
/* Check buffer content */
for (i = 0; i < num; ++i) {
/* check that the first channel is on pad src0, the second on src1 etc. */
fail_unless_equals_int (rest, padnum);
}
+ gst_buffer_unmap (buffer, &map);
- return TRUE; /* don't drop data */
+ return GST_PAD_PROBE_OK; /* don't drop data */
}
static void
fail_unless (sink != NULL);
gst_bin_add_many (GST_BIN (pipeline), queue, sink, NULL);
- fail_unless (gst_element_link_many (queue, sink, NULL));
+ gst_element_link_pads_full (queue, "src", sink, "sink",
+ GST_PAD_LINK_CHECK_NOTHING);
sinkpad = gst_element_get_static_pad (queue, "sink");
+
fail_unless_equals_int (gst_pad_link (pad, sinkpad), GST_PAD_LINK_OK);
gst_object_unref (sinkpad);
- gst_pad_add_buffer_probe (pad, G_CALLBACK (float_buffer_check_probe), NULL);
+
+ gst_pad_add_probe (pad, GST_PAD_PROBE_TYPE_BUFFER, float_buffer_check_probe,
+ GUINT_TO_POINTER (pads_created), NULL);
gst_element_set_state (sink, GST_STATE_PLAYING);
gst_element_set_state (queue, GST_STATE_PLAYING);
*/
#include <gst/gst.h>
+#include <gst/audio/audio.h>
#include <gst/base/gstbasetransform.h>
#include <gst/check/gstcheck.h>
* get_peer, and then remove references in every test function */
GstPad *mysrcpad, *mysinkpad;
-#define EQUALIZER_CAPS_STRING \
- "audio/x-raw-float, " \
- "channels = (int) 1, " \
- "rate = (int) 48000, " \
- "endianness = (int) BYTE_ORDER, " \
- "width = (int) 64" \
+#define EQUALIZER_CAPS_STRING \
+ "audio/x-raw, " \
+ "format = (string) "GST_AUDIO_NE (F64) ", " \
+ "layout = (string) interleaved, " \
+ "channels = (int) 1, " \
+ "rate = (int) 48000"
static GstStaticPadTemplate sinktemplate = GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("audio/x-raw-float, "
- "channels = (int) 1, "
- "rate = (int) 48000, "
- "endianness = (int) BYTE_ORDER, " "width = (int) 64 ")
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "format = (string) " GST_AUDIO_NE (F64) ", "
+ "layout = (string) interleaved, "
+ "channels = (int) 1, " "rate = (int) 48000")
);
static GstStaticPadTemplate srctemplate = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("audio/x-raw-float, "
- "channels = (int) 1, "
- "rate = (int) 48000, "
- "endianness = (int) BYTE_ORDER, " "width = (int) 64 ")
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "format = (string) " GST_AUDIO_NE (F64) ", "
+ "layout = (string) interleaved, "
+ "channels = (int) 1, " "rate = (int) 48000")
);
static GstElement *
GST_DEBUG ("setup_equalizer");
equalizer = gst_check_setup_element ("equalizer-nbands");
- mysrcpad = gst_check_setup_src_pad (equalizer, &srctemplate, NULL);
- mysinkpad = gst_check_setup_sink_pad (equalizer, &sinktemplate, NULL);
+ mysrcpad = gst_check_setup_src_pad (equalizer, &srctemplate);
+ mysinkpad = gst_check_setup_sink_pad (equalizer, &sinktemplate);
gst_pad_set_active (mysrcpad, TRUE);
gst_pad_set_active (mysinkpad, TRUE);
GstCaps *caps;
gdouble *in, *res;
gint i;
+ GstMapInfo map;
equalizer = setup_equalizer ();
g_object_set (G_OBJECT (equalizer), "num-bands", 5, NULL);
"could not set to playing");
inbuffer = gst_buffer_new_and_alloc (1024 * sizeof (gdouble));
- in = (gdouble *) GST_BUFFER_DATA (inbuffer);
+ gst_buffer_map (inbuffer, &map, GST_MAP_WRITE);
+ in = (gdouble *) map.data;
for (i = 0; i < 1024; i++)
in[i] = g_random_double_range (-1.0, 1.0);
+ gst_buffer_unmap (inbuffer, &map);
caps = gst_caps_from_string (EQUALIZER_CAPS_STRING);
- gst_buffer_set_caps (inbuffer, caps);
+ gst_pad_set_caps (mysrcpad, caps);
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
/* ... and puts a new buffer on the global list */
fail_unless (g_list_length (buffers) == 1);
- res = (gdouble *) GST_BUFFER_DATA (GST_BUFFER (buffers->data));
+ gst_buffer_map (GST_BUFFER (buffers->data), &map, GST_MAP_READ);
+ res = (gdouble *) map.data;
for (i = 0; i < 1024; i++)
fail_unless_equals_float (in[i], res[i]);
+ gst_buffer_unmap (GST_BUFFER (buffers->data), &map);
/* cleanup */
cleanup_equalizer (equalizer);
GstCaps *caps;
gdouble *in, *res, rms_in, rms_out;
gint i;
+ GstMapInfo map;
equalizer = setup_equalizer ();
g_object_set (G_OBJECT (equalizer), "num-bands", 5, NULL);
"could not set to playing");
inbuffer = gst_buffer_new_and_alloc (1024 * sizeof (gdouble));
- in = (gdouble *) GST_BUFFER_DATA (inbuffer);
+ gst_buffer_map (inbuffer, &map, GST_MAP_WRITE);
+ in = (gdouble *) map.data;
for (i = 0; i < 1024; i++)
in[i] = g_random_double_range (-1.0, 1.0);
+ gst_buffer_unmap (inbuffer, &map);
rms_in = 0.0;
for (i = 0; i < 1024; i++)
rms_in = sqrt (rms_in / 1024);
caps = gst_caps_from_string (EQUALIZER_CAPS_STRING);
- gst_buffer_set_caps (inbuffer, caps);
+ gst_pad_set_caps (mysrcpad, caps);
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
/* ... and puts a new buffer on the global list */
fail_unless (g_list_length (buffers) == 1);
- res = (gdouble *) GST_BUFFER_DATA (GST_BUFFER (buffers->data));
+ gst_buffer_map (GST_BUFFER (buffers->data), &map, GST_MAP_READ);
+ res = (gdouble *) map.data;
rms_out = 0.0;
for (i = 0; i < 1024; i++)
rms_out += res[i] * res[i];
rms_out = sqrt (rms_out / 1024);
+ gst_buffer_unmap (GST_BUFFER (buffers->data), &map);
fail_unless (rms_in > rms_out);
GstCaps *caps;
gdouble *in, *res, rms_in, rms_out;
gint i;
+ GstMapInfo map;
equalizer = setup_equalizer ();
g_object_set (G_OBJECT (equalizer), "num-bands", 5, NULL);
"could not set to playing");
inbuffer = gst_buffer_new_and_alloc (1024 * sizeof (gdouble));
- in = (gdouble *) GST_BUFFER_DATA (inbuffer);
+ gst_buffer_map (inbuffer, &map, GST_MAP_WRITE);
+ in = (gdouble *) map.data;
for (i = 0; i < 1024; i++)
in[i] = g_random_double_range (-1.0, 1.0);
+ gst_buffer_unmap (inbuffer, &map);
rms_in = 0.0;
for (i = 0; i < 1024; i++)
rms_in = sqrt (rms_in / 1024);
caps = gst_caps_from_string (EQUALIZER_CAPS_STRING);
- gst_buffer_set_caps (inbuffer, caps);
+ gst_pad_set_caps (mysrcpad, caps);
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
/* ... and puts a new buffer on the global list */
fail_unless (g_list_length (buffers) == 1);
- res = (gdouble *) GST_BUFFER_DATA (GST_BUFFER (buffers->data));
+ gst_buffer_map (GST_BUFFER (buffers->data), &map, GST_MAP_READ);
+ res = (gdouble *) map.data;
rms_out = 0.0;
for (i = 0; i < 1024; i++)
rms_out += res[i] * res[i];
rms_out = sqrt (rms_out / 1024);
+ gst_buffer_unmap (GST_BUFFER (buffers->data), &map);
fail_unless (rms_in < rms_out);
fail_unless (G_VALUE_TYPE (bufval) == GST_TYPE_BUFFER);
buf = g_value_peek_pointer (bufval);
if (i == 0) {
- fail_unless (GST_BUFFER_SIZE (buf) == sizeof (streaminfo_header));
- fail_unless (memcmp (GST_BUFFER_DATA (buf), streaminfo_header,
+ fail_unless (gst_buffer_get_size (buf) == sizeof (streaminfo_header));
+ fail_unless (gst_buffer_memcmp (buf, 0, streaminfo_header,
sizeof (streaminfo_header)) == 0);
} else if (i == 1) {
- fail_unless (GST_BUFFER_SIZE (buf) == sizeof (comment_header));
- fail_unless (memcmp (GST_BUFFER_DATA (buf), comment_header,
+ fail_unless (gst_buffer_get_size (buf) == sizeof (comment_header));
+ fail_unless (gst_buffer_memcmp (buf, 0, comment_header,
sizeof (comment_header)) == 0);
}
}
GST_END_TEST;
-GST_START_TEST (test_parse_flac_set_index)
-{
- GstElement *parse;
- GstIndex *idx;
-
- idx = gst_index_factory_make ("memindex");
- if (idx == NULL)
- return;
- parse = gst_element_factory_make ("flacparse", NULL);
- fail_unless (parse != NULL);
- gst_object_ref_sink (idx);
- gst_element_set_index (parse, GST_INDEX (idx));
- gst_object_unref (idx);
- gst_object_unref (parse);
-}
-
-GST_END_TEST;
-
static Suite *
flacparse_suite (void)
{
/* Other tests */
tcase_add_test (tc_chain, test_parse_flac_detect_stream);
- tcase_add_test (tc_chain, test_parse_flac_set_index);
return s;
}
{
*p_counter += 1;
GST_LOG ("counter = %d", *p_counter);
-
- fail_unless (GST_BUFFER_CAPS (buf) != NULL);
}
static void
{
*p_counter += 1;
GST_LOG ("counter = %d", *p_counter);
-
- fail_unless (GST_BUFFER_CAPS (buf) != NULL);
}
static void
create_icydemux (void)
{
icydemux = gst_check_setup_element ("icydemux");
- srcpad = gst_check_setup_src_pad (icydemux, &srctemplate, NULL);
+ srcpad = gst_check_setup_src_pad (icydemux, &srctemplate);
g_signal_connect (icydemux, "pad-added", G_CALLBACK (icydemux_found_pad),
NULL);
GstFlowReturn res;
GstBuffer *buffer = gst_buffer_new_and_alloc (len);
- memcpy (GST_BUFFER_DATA (buffer), data, len);
- gst_buffer_set_caps (buffer, caps);
+ gst_buffer_fill (buffer, 0, data, len);
GST_BUFFER_OFFSET (buffer) = offset;
+ gst_pad_set_caps (srcpad, caps);
res = gst_pad_push (srcpad, buffer);
fail_unless (res == GST_FLOW_OK, "Failed pushing buffer: %d", res);
/* first buffer should have offset 0 even after it was merged with 2nd buf */
fail_unless (GST_BUFFER_OFFSET (GST_BUFFER_CAST (buffers->data)) == 0);
- /* first buffer should have caps set */
- fail_unless (GST_BUFFER_CAPS (GST_BUFFER_CAST (buffers->data)) != NULL);
-
gst_caps_unref (icy_caps);
cleanup_icydemux ();
typedef void (CheckTagsFunc) (const GstTagList * tags, const gchar * file);
-static void
-pad_added_cb (GstElement * id3demux, GstPad * pad, GstBin * pipeline)
-{
- GstElement *sink;
-
- sink = gst_bin_get_by_name (pipeline, "fakesink");
- fail_unless (gst_element_link (id3demux, sink));
- gst_object_unref (sink);
-
- gst_element_set_state (sink, GST_STATE_PAUSED);
-}
-
static GstBusSyncReply
error_cb (GstBus * bus, GstMessage * msg, gpointer user_data)
{
fail_unless (gst_element_link (src, sep));
fail_unless (gst_element_link (sep, id3demux));
-
- /* can't link id3demux and sink yet, do that later */
- g_signal_connect (id3demux, "pad-added", G_CALLBACK (pad_added_cb), pipeline);
+ fail_unless (gst_element_link (id3demux, sink));
path = g_build_filename (GST_TEST_FILES_PATH, file, NULL);
GST_LOG ("reading file '%s'", path);
check_unsync_v24 (const GstTagList * tags, const gchar * file)
{
const GValue *val;
+ GstSample *sample;
GstBuffer *buf;
gchar *album = NULL;
gchar *title = NULL;
gchar *artist = NULL;
+ GstMapInfo map;
fail_unless (gst_tag_list_get_string (tags, GST_TAG_TITLE, &title));
fail_unless (title != NULL);
val = gst_tag_list_get_value_index (tags, GST_TAG_IMAGE, 0);
fail_unless (val != NULL);
- fail_unless (GST_VALUE_HOLDS_BUFFER (val));
- buf = gst_value_get_buffer (val);
+ fail_unless (GST_VALUE_HOLDS_SAMPLE (val));
+ sample = gst_value_get_sample (val);
+ fail_unless (sample != NULL);
+ fail_unless (gst_sample_get_caps (sample) != NULL);
+ buf = gst_sample_get_buffer (sample);
fail_unless (buf != NULL);
- fail_unless (GST_BUFFER_CAPS (buf) != NULL);
- fail_unless_equals_int (GST_BUFFER_SIZE (buf), 38022);
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ fail_unless_equals_int (map.size, 38022);
/* check for jpeg start/end markers */
- fail_unless_equals_int (GST_BUFFER_DATA (buf)[0], 0xff);
- fail_unless_equals_int (GST_BUFFER_DATA (buf)[1], 0xd8);
- fail_unless_equals_int (GST_BUFFER_DATA (buf)[38020], 0xff);
- fail_unless_equals_int (GST_BUFFER_DATA (buf)[38021], 0xd9);
+ fail_unless_equals_int (map.data[0], 0xff);
+ fail_unless_equals_int (map.data[1], 0xd8);
+ fail_unless_equals_int (map.data[38020], 0xff);
+ fail_unless_equals_int (map.data[38021], 0xd9);
+ gst_buffer_unmap (buf, &map);
}
GST_START_TEST (test_unsync_v24)
{
GstTagList *tags;
- tags = gst_tag_list_new ();
+ tags = gst_tag_list_new_empty ();
if (mask & (1 << 0)) {
gst_tag_list_add (tags, GST_TAG_MERGE_KEEP,
utf8_string_in_buf (GstBuffer * buf, const gchar * s)
{
gint i, len;
+ GstMapInfo map;
len = strlen (s);
- for (i = 0; i < (GST_BUFFER_SIZE (buf) - len); ++i) {
- if (memcmp (GST_BUFFER_DATA (buf) + i, s, len) == 0) {
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ for (i = 0; i < (map.size - len); ++i) {
+ if (memcmp (map.data + i, s, len) == 0) {
+ gst_buffer_unmap (buf, &map);
return TRUE;
}
}
+ gst_buffer_unmap (buf, &map);
return FALSE;
}
fill_mp3_buffer (GstElement * fakesrc, GstBuffer * buf, GstPad * pad,
guint64 * p_offset)
{
- fail_unless (GST_BUFFER_SIZE (buf) == MP3_FRAME_SIZE);
+ gsize size;
+
+ size = gst_buffer_get_size (buf);
+
+ fail_unless (size == MP3_FRAME_SIZE);
GST_LOG ("filling buffer with fake mp3 data, offset = %" G_GUINT64_FORMAT,
*p_offset);
- memcpy (GST_BUFFER_DATA (buf), mp3_dummyhdr, sizeof (mp3_dummyhdr));
+ gst_buffer_fill (buf, 0, mp3_dummyhdr, sizeof (mp3_dummyhdr));
+#if 0
/* can't use gst_buffer_set_caps() here because the metadata isn't writable
* because of the extra refcounts taken by the signal emission mechanism;
* we know it's fine to use GST_BUFFER_CAPS() here though */
GST_BUFFER_CAPS (buf) = gst_caps_new_simple ("audio/mpeg", "mpegversion",
G_TYPE_INT, 1, "layer", G_TYPE_INT, 3, NULL);
+#endif
GST_BUFFER_OFFSET (buf) = *p_offset;
- *p_offset += GST_BUFFER_SIZE (buf);
+ *p_offset += size;
}
static void
GstBuffer ** p_buf)
{
gint64 off;
- guint size;
+ GstMapInfo map;
off = GST_BUFFER_OFFSET (buf);
- size = GST_BUFFER_SIZE (buf);
- GST_LOG ("got buffer, size=%u, offset=%" G_GINT64_FORMAT, size, off);
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+
+ GST_LOG ("got buffer, size=%u, offset=%" G_GINT64_FORMAT, map.size, off);
fail_unless (GST_BUFFER_OFFSET_IS_VALID (buf));
- if (*p_buf == NULL || (off + size) > GST_BUFFER_SIZE (*p_buf)) {
+ if (*p_buf == NULL || (off + map.size) > gst_buffer_get_size (*p_buf)) {
GstBuffer *newbuf;
/* not very elegant, but who cares */
- newbuf = gst_buffer_new_and_alloc (off + size);
+ newbuf = gst_buffer_new_and_alloc (off + map.size);
if (*p_buf) {
- memcpy (GST_BUFFER_DATA (newbuf), GST_BUFFER_DATA (*p_buf),
- GST_BUFFER_SIZE (*p_buf));
+ GstMapInfo pmap;
+
+ gst_buffer_map (*p_buf, &pmap, GST_MAP_READ);
+ gst_buffer_fill (newbuf, 0, pmap.data, pmap.size);
+ gst_buffer_unmap (*p_buf, &pmap);
}
- memcpy (GST_BUFFER_DATA (newbuf) + off, GST_BUFFER_DATA (buf), size);
+ gst_buffer_fill (newbuf, off, map.data, map.size);
+
if (*p_buf)
gst_buffer_unref (*p_buf);
*p_buf = newbuf;
} else {
- memcpy (GST_BUFFER_DATA (*p_buf) + off, GST_BUFFER_DATA (buf), size);
+ gst_buffer_fill (*p_buf, off, map.data, map.size);
}
-}
-
-static void
-demux_pad_added (GstElement * id3demux, GstPad * srcpad, GstBuffer ** p_outbuf)
-{
- GstElement *fakesink, *pipeline;
-
- GST_LOG ("id3demux added source pad with caps %" GST_PTR_FORMAT,
- GST_PAD_CAPS (srcpad));
-
- pipeline = id3demux;
- while (GST_OBJECT_PARENT (pipeline) != NULL)
- pipeline = (GstElement *) GST_OBJECT_PARENT (pipeline);
-
- fakesink = gst_element_factory_make ("fakesink", "fakesink");
- g_assert (fakesink != NULL);
-
- /* set up sink */
- g_object_set (fakesink, "signal-handoffs", TRUE, NULL);
- g_signal_connect (fakesink, "handoff", G_CALLBACK (got_buffer), p_outbuf);
-
- gst_bin_add (GST_BIN (pipeline), fakesink);
- gst_element_set_state (fakesink, GST_STATE_PLAYING);
-
- fail_unless (gst_element_link (id3demux, fakesink));
+ gst_buffer_unmap (buf, &map);
}
static void
test_taglib_id3mux_check_output_buffer (GstBuffer * buf)
{
- guint8 *data = GST_BUFFER_DATA (buf);
- guint size = GST_BUFFER_SIZE (buf);
+ GstMapInfo map;
guint off;
- g_assert (size % MP3_FRAME_SIZE == 0);
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ g_assert (map.size % MP3_FRAME_SIZE == 0);
- for (off = 0; off < size; off += MP3_FRAME_SIZE) {
- fail_unless (memcmp (data + off, mp3_dummyhdr, sizeof (mp3_dummyhdr)) == 0);
+ for (off = 0; off < map.size; off += MP3_FRAME_SIZE) {
+ fail_unless (memcmp (map.data + off, mp3_dummyhdr,
+ sizeof (mp3_dummyhdr)) == 0);
}
+ gst_buffer_unmap (buf, &map);
}
static void
{
GstMessage *msg;
GstTagList *tags_read = NULL;
- GstElement *pipeline, *id3mux, *id3demux, *fakesrc, *identity;
+ GstElement *pipeline, *id3mux, *id3demux, *fakesrc, *identity, *fakesink;
GstBus *bus;
guint64 offset;
GstBuffer *outbuf = NULL;
id3demux = gst_element_factory_make ("id3demux", "id3demux");
g_assert (id3demux != NULL);
+ fakesink = gst_element_factory_make ("fakesink", "fakesink");
+ g_assert (fakesink != NULL);
+
+ /* set up sink */
outbuf = NULL;
- g_signal_connect (id3demux, "pad-added",
- G_CALLBACK (demux_pad_added), &outbuf);
+ g_object_set (fakesink, "signal-handoffs", TRUE, NULL);
+ g_signal_connect (fakesink, "handoff", G_CALLBACK (got_buffer), &outbuf);
gst_bin_add (GST_BIN (pipeline), fakesrc);
gst_bin_add (GST_BIN (pipeline), id3mux);
gst_bin_add (GST_BIN (pipeline), identity);
gst_bin_add (GST_BIN (pipeline), id3demux);
+ gst_bin_add (GST_BIN (pipeline), fakesink);
gst_tag_setter_merge_tags (GST_TAG_SETTER (id3mux), tags,
GST_TAG_MERGE_APPEND);
- gst_element_link_many (fakesrc, id3mux, identity, id3demux, NULL);
+ gst_element_link_many (fakesrc, id3mux, identity, id3demux, fakesink, NULL);
/* set up source */
g_object_set (fakesrc, "signal-handoffs", TRUE, "can-activate-pull", FALSE,
GMainLoop *loop;
guint n_buffers = G_MAXUINT;
guint bus_watch = 0;
+ GstVideoInfo i1, i2;
- caps1 =
- gst_video_format_new_caps (GST_VIDEO_FORMAT_xRGB, 640, 480, 25, 1, 1, 1);
- caps2 =
- gst_video_format_new_caps (GST_VIDEO_FORMAT_xRGB, 640, 480, 0, 1, 1, 1);
+ gst_video_info_init (&i1);
+ gst_video_info_set_format (&i1, GST_VIDEO_FORMAT_xRGB, 640, 480);
+ i1.fps_n = 25;
+ i1.fps_d = 1;
+ caps1 = gst_video_info_to_caps (&i1);
+
+ gst_video_info_init (&i2);
+ gst_video_info_set_format (&i2, GST_VIDEO_FORMAT_xRGB, 640, 480);
+ caps2 = gst_video_info_to_caps (&i2);
pipeline =
setup_imagefreeze (caps1, caps2, G_CALLBACK (sink_handoff_cb_0_1),
GMainLoop *loop;
guint n_buffers = G_MAXUINT;
guint bus_watch = 0;
+ GstVideoInfo i1, i2;
+
+ gst_video_info_init (&i1);
+ gst_video_info_set_format (&i1, GST_VIDEO_FORMAT_xRGB, 640, 480);
+ i1.fps_n = 25;
+ i1.fps_d = 1;
+ caps1 = gst_video_info_to_caps (&i1);
- caps1 =
- gst_video_format_new_caps (GST_VIDEO_FORMAT_xRGB, 640, 480, 25, 1, 1, 1);
- caps2 =
- gst_video_format_new_caps (GST_VIDEO_FORMAT_xRGB, 640, 480, 25, 1, 1, 1);
+ gst_video_info_init (&i2);
+ gst_video_info_set_format (&i2, GST_VIDEO_FORMAT_xRGB, 640, 480);
+ i2.fps_n = 25;
+ i2.fps_d = 1;
+ caps2 = gst_video_info_to_caps (&i2);
pipeline =
setup_imagefreeze (caps1, caps2,
GMainLoop *loop;
guint n_buffers = G_MAXUINT;
guint bus_watch = 0;
+ GstVideoInfo i1, i2;
+
+ gst_video_info_init (&i1);
+ gst_video_info_set_format (&i1, GST_VIDEO_FORMAT_xRGB, 640, 480);
+ i1.fps_n = 25;
+ i1.fps_d = 1;
+ caps1 = gst_video_info_to_caps (&i1);
- caps1 =
- gst_video_format_new_caps (GST_VIDEO_FORMAT_xRGB, 640, 480, 25, 1, 1, 1);
- caps2 =
- gst_video_format_new_caps (GST_VIDEO_FORMAT_xRGB, 640, 480, 25, 1, 1, 1);
+ gst_video_info_init (&i2);
+ gst_video_info_set_format (&i2, GST_VIDEO_FORMAT_xRGB, 640, 480);
+ i2.fps_n = 25;
+ i2.fps_d = 1;
+ caps2 = gst_video_info_to_caps (&i2);
pipeline =
setup_imagefreeze (caps1, caps2,
GMainLoop *loop;
guint n_buffers = G_MAXUINT;
guint bus_watch = 0;
+ GstVideoInfo i1, i2;
- caps1 =
- gst_video_format_new_caps (GST_VIDEO_FORMAT_xRGB, 640, 480, 25, 1, 1, 1);
- caps2 =
- gst_video_format_new_caps (GST_VIDEO_FORMAT_xRGB, 640, 480, 25, 1, 1, 1);
+ gst_video_info_init (&i1);
+ gst_video_info_set_format (&i1, GST_VIDEO_FORMAT_xRGB, 640, 480);
+ i1.fps_n = 25;
+ i1.fps_d = 1;
+ caps1 = gst_video_info_to_caps (&i1);
+
+ gst_video_info_init (&i2);
+ gst_video_info_set_format (&i2, GST_VIDEO_FORMAT_xRGB, 640, 480);
+ i2.fps_n = 25;
+ i2.fps_d = 1;
+ caps2 = gst_video_info_to_caps (&i2);
pipeline =
setup_imagefreeze (caps1, caps2,
GMainLoop *loop;
guint n_buffers = G_MAXUINT;
guint bus_watch = 0;
+ GstVideoInfo i1, i2;
+
+ gst_video_info_init (&i1);
+ gst_video_info_set_format (&i1, GST_VIDEO_FORMAT_xRGB, 640, 480);
+ i1.fps_n = 25;
+ i1.fps_d = 1;
+ caps1 = gst_video_info_to_caps (&i1);
- caps1 =
- gst_video_format_new_caps (GST_VIDEO_FORMAT_xRGB, 640, 480, 25, 1, 1, 1);
- caps2 =
- gst_video_format_new_caps (GST_VIDEO_FORMAT_xRGB, 640, 480, 25, 1, 1, 1);
+ gst_video_info_init (&i2);
+ gst_video_info_set_format (&i2, GST_VIDEO_FORMAT_xRGB, 640, 480);
+ i2.fps_n = 25;
+ i2.fps_d = 1;
+ caps2 = gst_video_info_to_caps (&i2);
pipeline =
setup_imagefreeze (caps1, caps2,
GST_END_TEST;
-static GstBuffer *test_buffer = NULL;
-
-static GstFlowReturn
-test_bufferalloc (GstPad * pad, guint64 offset, guint size, GstCaps * caps,
- GstBuffer ** buf)
-{
- fail_if (test_buffer != NULL);
-
- test_buffer = gst_buffer_new_and_alloc (size);
- gst_buffer_set_caps (test_buffer, caps);
-
- *buf = gst_buffer_ref (test_buffer);
-
- return GST_FLOW_OK;
-}
-
-static void
-sink_handoff_cb_bufferalloc (GstElement * object, GstBuffer * buffer,
- GstPad * pad, gpointer user_data)
-{
- guint *n_buffers = (guint *) user_data;
-
- if (*n_buffers == G_MAXUINT)
- return;
-
- fail_unless (buffer->parent != NULL);
- fail_unless (test_buffer != NULL);
- fail_unless (buffer->parent == test_buffer);
-
- *n_buffers = *n_buffers + 1;
-}
-
-GST_START_TEST (test_imagefreeze_bufferalloc)
-{
- GstElement *pipeline;
- GstElement *sink;
- GstPad *sinkpad;
- GstCaps *caps1, *caps2;
- GstBus *bus;
- GMainLoop *loop;
- guint n_buffers = G_MAXUINT;
- guint bus_watch = 0;
-
- caps1 =
- gst_video_format_new_caps (GST_VIDEO_FORMAT_xRGB, 640, 480, 25, 1, 1, 1);
- caps2 =
- gst_video_format_new_caps (GST_VIDEO_FORMAT_xRGB, 640, 480, 25, 1, 1, 1);
-
- pipeline =
- setup_imagefreeze (caps1, caps2, G_CALLBACK (sink_handoff_cb_bufferalloc),
- &n_buffers);
-
- sink = gst_bin_get_by_name (GST_BIN (pipeline), "sink");
- fail_unless (sink != NULL);
- sinkpad = gst_element_get_static_pad (sink, "sink");
- fail_unless (sinkpad != NULL);
- gst_pad_set_bufferalloc_function (sinkpad, test_bufferalloc);
- gst_object_unref (sinkpad);
- gst_object_unref (sink);
-
- loop = g_main_loop_new (NULL, TRUE);
- fail_unless (loop != NULL);
-
- bus = gst_element_get_bus (pipeline);
- fail_unless (bus != NULL);
- bus_watch = gst_bus_add_watch (bus, bus_handler, loop);
- gst_object_unref (bus);
-
- fail_unless_equals_int (gst_element_set_state (pipeline, GST_STATE_PAUSED),
- GST_STATE_CHANGE_SUCCESS);
-
- fail_unless (gst_element_seek (pipeline, 1.0, GST_FORMAT_TIME,
- GST_SEEK_FLAG_FLUSH, GST_SEEK_TYPE_SET, 0, GST_SEEK_TYPE_SET,
- 400 * GST_MSECOND));
-
- n_buffers = 0;
- fail_unless_equals_int (gst_element_set_state (pipeline, GST_STATE_PLAYING),
- GST_STATE_CHANGE_SUCCESS);
-
- g_main_loop_run (loop);
-
- fail_unless (test_buffer != NULL);
- fail_unless (n_buffers >= 1);
-
- gst_element_set_state (pipeline, GST_STATE_NULL);
-
- gst_buffer_unref (test_buffer);
- test_buffer = NULL;
-
- gst_object_unref (pipeline);
- g_main_loop_unref (loop);
- gst_caps_unref (caps1);
- gst_caps_unref (caps2);
- g_source_remove (bus_watch);
-}
-
-GST_END_TEST;
-
GST_START_TEST (test_imagefreeze_eos)
{
GstElement *pipeline;
GstFormat fmt = GST_FORMAT_TIME;
gint64 position;
guint bus_watch = 0;
+ GstVideoInfo i1, i2;
+
+ gst_video_info_init (&i1);
+ gst_video_info_set_format (&i1, GST_VIDEO_FORMAT_xRGB, 640, 480);
+ i1.fps_n = 25;
+ i1.fps_d = 1;
+ caps1 = gst_video_info_to_caps (&i1);
- caps1 =
- gst_video_format_new_caps (GST_VIDEO_FORMAT_xRGB, 640, 480, 25, 1, 1, 1);
- caps2 =
- gst_video_format_new_caps (GST_VIDEO_FORMAT_xRGB, 640, 480, 25, 1, 1, 1);
+ gst_video_info_init (&i2);
+ gst_video_info_set_format (&i2, GST_VIDEO_FORMAT_xRGB, 640, 480);
+ i2.fps_n = 25;
+ i2.fps_d = 1;
+ caps2 = gst_video_info_to_caps (&i2);
pipeline = setup_imagefreeze (caps1, caps2, NULL, NULL);
g_main_loop_run (loop);
- fail_unless (gst_element_query_position (src, &fmt, &position));
+ fail_unless (gst_element_query_position (src, fmt, &position));
fail_unless_equals_uint64 (position, 40 * GST_MSECOND);
gst_element_set_state (pipeline, GST_STATE_NULL);
tcase_add_test (tc_chain, test_imagefreeze_25_1_400ms_0ms);
tcase_add_test (tc_chain, test_imagefreeze_25_1_220ms_380ms);
- tcase_add_test (tc_chain, test_imagefreeze_bufferalloc);
tcase_add_test (tc_chain, test_imagefreeze_eos);
return s;
* Boston, MA 02111-1307, USA.
*/
+/* FIXME 0.11: suppress warnings for deprecated API such as GValueArray
+ * with newer GLib versions (>= 2.31.0) */
+#define GLIB_DISABLE_DEPRECATION_WARNINGS
+
#ifdef HAVE_CONFIG_H
# include "config.h"
#endif
#include <gst/check/gstcheck.h>
-#include <gst/audio/multichannel.h>
+#include <gst/audio/audio.h>
+#include <gst/audio/audio-enumtypes.h>
GST_START_TEST (test_create_and_unref)
{
interleave = gst_element_factory_make ("interleave", NULL);
fail_unless (interleave != NULL);
- pad1 = gst_element_get_request_pad (interleave, "sink%d");
+ pad1 = gst_element_get_request_pad (interleave, "sink_%u");
fail_unless (pad1 != NULL);
- fail_unless_equals_string (GST_OBJECT_NAME (pad1), "sink0");
+ fail_unless_equals_string (GST_OBJECT_NAME (pad1), "sink_0");
- pad2 = gst_element_get_request_pad (interleave, "sink%d");
+ pad2 = gst_element_get_request_pad (interleave, "sink_%u");
fail_unless (pad2 != NULL);
- fail_unless_equals_string (GST_OBJECT_NAME (pad2), "sink1");
+ fail_unless_equals_string (GST_OBJECT_NAME (pad2), "sink_1");
gst_element_release_request_pad (interleave, pad2);
gst_object_unref (pad2);
static GstStaticPadTemplate sinktemplate = GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("audio/x-raw-float, "
- "width = (int) 32, "
- "channels = (int) 2, "
- "rate = (int) 48000, " "endianness = (int) BYTE_ORDER"));
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "format = (string) " GST_AUDIO_NE (F32) ", "
+ "channels = (int) 2, " "rate = (int) 48000"));
static GstStaticPadTemplate srctemplate = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("audio/x-raw-float, "
- "width = (int) 32, "
- "channels = (int) 1, "
- "rate = (int) 48000, " "endianness = (int) BYTE_ORDER"));
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "format = (string) " GST_AUDIO_NE (F32) ", "
+ "channels = (int) 1, " "rate = (int) 48000"));
#define CAPS_48khz \
- "audio/x-raw-float, " \
- "width = (int) 32, " \
+ "audio/x-raw, " \
+ "format = (string) " GST_AUDIO_NE (F32) ", " \
"channels = (int) 1, " \
- "rate = (int) 48000, " \
- "endianness = (int) BYTE_ORDER"
+ "rate = (int) 48000"
static GstFlowReturn
-interleave_chain_func (GstPad * pad, GstBuffer * buffer)
+interleave_chain_func (GstPad * pad, GstObject * parent, GstBuffer * buffer)
{
+ GstMapInfo map;
gfloat *outdata;
gint i;
fail_unless (GST_IS_BUFFER (buffer));
- fail_unless_equals_int (GST_BUFFER_SIZE (buffer),
- 48000 * 2 * sizeof (gfloat));
- fail_unless (GST_BUFFER_DATA (buffer) != NULL);
-
- outdata = (gfloat *) GST_BUFFER_DATA (buffer);
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ outdata = (gfloat *) map.data;
+ fail_unless_equals_int (map.size, 48000 * 2 * sizeof (gfloat));
+ fail_unless (outdata != NULL);
for (i = 0; i < 48000 * 2; i += 2) {
fail_unless_equals_float (outdata[i], input[0]);
fail_unless_equals_float (outdata[i + 1], input[1]);
}
+ gst_buffer_unmap (buffer, &map);
+ gst_buffer_unref (buffer);
have_data++;
- gst_buffer_unref (buffer);
-
return GST_FLOW_OK;
}
gint i;
GstBuffer *inbuf;
gfloat *indata;
+ GstMapInfo map;
mysrcpads = g_new0 (GstPad *, 2);
queue = gst_element_factory_make ("queue", "queue");
fail_unless (queue != NULL);
- sink0 = gst_element_get_request_pad (interleave, "sink%d");
+ sink0 = gst_element_get_request_pad (interleave, "sink_%u");
fail_unless (sink0 != NULL);
- fail_unless_equals_string (GST_OBJECT_NAME (sink0), "sink0");
+ fail_unless_equals_string (GST_OBJECT_NAME (sink0), "sink_0");
- sink1 = gst_element_get_request_pad (interleave, "sink%d");
+ sink1 = gst_element_get_request_pad (interleave, "sink_%u");
fail_unless (sink1 != NULL);
- fail_unless_equals_string (GST_OBJECT_NAME (sink1), "sink1");
+ fail_unless_equals_string (GST_OBJECT_NAME (sink1), "sink_1");
mysrcpads[0] = gst_pad_new_from_static_template (&srctemplate, "src0");
fail_unless (mysrcpads[0] != NULL);
input[0] = -1.0;
inbuf = gst_buffer_new_and_alloc (48000 * sizeof (gfloat));
- indata = (gfloat *) GST_BUFFER_DATA (inbuf);
+ gst_buffer_map (inbuf, &map, GST_MAP_WRITE);
+ indata = (gfloat *) map.data;
for (i = 0; i < 48000; i++)
indata[i] = -1.0;
- gst_buffer_set_caps (inbuf, caps);
+ gst_buffer_unmap (inbuf, &map);
+ gst_pad_set_caps (mysrcpads[0], caps);
fail_unless (gst_pad_push (mysrcpads[0], inbuf) == GST_FLOW_OK);
input[1] = 1.0;
inbuf = gst_buffer_new_and_alloc (48000 * sizeof (gfloat));
- indata = (gfloat *) GST_BUFFER_DATA (inbuf);
+ gst_buffer_map (inbuf, &map, GST_MAP_WRITE);
+ indata = (gfloat *) map.data;
for (i = 0; i < 48000; i++)
indata[i] = 1.0;
- gst_buffer_set_caps (inbuf, caps);
+ gst_buffer_unmap (inbuf, &map);
+ gst_pad_set_caps (mysrcpads[1], caps);
fail_unless (gst_pad_push (mysrcpads[1], inbuf) == GST_FLOW_OK);
inbuf = gst_buffer_new_and_alloc (48000 * sizeof (gfloat));
- indata = (gfloat *) GST_BUFFER_DATA (inbuf);
+ gst_buffer_map (inbuf, &map, GST_MAP_WRITE);
+ indata = (gfloat *) map.data;
for (i = 0; i < 48000; i++)
indata[i] = -1.0;
- gst_buffer_set_caps (inbuf, caps);
+ gst_buffer_unmap (inbuf, &map);
fail_unless (gst_pad_push (mysrcpads[0], inbuf) == GST_FLOW_OK);
inbuf = gst_buffer_new_and_alloc (48000 * sizeof (gfloat));
- indata = (gfloat *) GST_BUFFER_DATA (inbuf);
+ gst_buffer_map (inbuf, &map, GST_MAP_WRITE);
+ indata = (gfloat *) map.data;
for (i = 0; i < 48000; i++)
indata[i] = 1.0;
- gst_buffer_set_caps (inbuf, caps);
+ gst_buffer_unmap (inbuf, &map);
fail_unless (gst_pad_push (mysrcpads[1], inbuf) == GST_FLOW_OK);
fail_unless (have_data == 2);
gint i;
GstBuffer *inbuf;
gfloat *indata;
+ GstMapInfo map;
mysrcpads = g_new0 (GstPad *, 2);
queue = gst_element_factory_make ("queue", "queue");
fail_unless (queue != NULL);
- sink0 = gst_element_get_request_pad (interleave, "sink%d");
+ sink0 = gst_element_get_request_pad (interleave, "sink_%u");
fail_unless (sink0 != NULL);
- fail_unless_equals_string (GST_OBJECT_NAME (sink0), "sink0");
+ fail_unless_equals_string (GST_OBJECT_NAME (sink0), "sink_0");
- sink1 = gst_element_get_request_pad (interleave, "sink%d");
+ sink1 = gst_element_get_request_pad (interleave, "sink_%u");
fail_unless (sink1 != NULL);
- fail_unless_equals_string (GST_OBJECT_NAME (sink1), "sink1");
+ fail_unless_equals_string (GST_OBJECT_NAME (sink1), "sink_1");
mysrcpads[0] = gst_pad_new_from_static_template (&srctemplate, "src0");
fail_unless (mysrcpads[0] != NULL);
input[0] = -1.0;
inbuf = gst_buffer_new_and_alloc (48000 * sizeof (gfloat));
- indata = (gfloat *) GST_BUFFER_DATA (inbuf);
+ gst_buffer_map (inbuf, &map, GST_MAP_WRITE);
+ indata = (gfloat *) map.data;
for (i = 0; i < 48000; i++)
indata[i] = -1.0;
- gst_buffer_set_caps (inbuf, caps);
+ gst_buffer_unmap (inbuf, &map);
+ gst_pad_set_caps (mysrcpads[0], caps);
fail_unless (gst_pad_push (mysrcpads[0], inbuf) == GST_FLOW_OK);
input[1] = 1.0;
inbuf = gst_buffer_new_and_alloc (48000 * sizeof (gfloat));
- indata = (gfloat *) GST_BUFFER_DATA (inbuf);
+ gst_buffer_map (inbuf, &map, GST_MAP_WRITE);
+ indata = (gfloat *) map.data;
for (i = 0; i < 48000; i++)
indata[i] = 1.0;
- gst_buffer_set_caps (inbuf, caps);
+ gst_buffer_unmap (inbuf, &map);
+ gst_pad_set_caps (mysrcpads[1], caps);
fail_unless (gst_pad_push (mysrcpads[1], inbuf) == GST_FLOW_OK);
input[0] = 0.0;
input[1] = 1.0;
inbuf = gst_buffer_new_and_alloc (48000 * sizeof (gfloat));
- indata = (gfloat *) GST_BUFFER_DATA (inbuf);
+ gst_buffer_map (inbuf, &map, GST_MAP_WRITE);
+ indata = (gfloat *) map.data;
for (i = 0; i < 48000; i++)
indata[i] = 1.0;
- gst_buffer_set_caps (inbuf, caps);
+ gst_buffer_unmap (inbuf, &map);
fail_unless (gst_pad_push (mysrcpads[1], inbuf) == GST_FLOW_OK);
fail_unless (have_data == 2);
gpointer user_data)
{
gint n = GPOINTER_TO_INT (user_data);
- GstCaps *caps;
gfloat *data;
gint i;
+ gsize size;
- if (GST_PAD_CAPS (pad))
- caps = gst_caps_ref (GST_PAD_CAPS (pad));
- else {
- caps = gst_caps_new_simple ("audio/x-raw-float",
- "width", G_TYPE_INT, 32,
- "channels", G_TYPE_INT, 1,
- "rate", G_TYPE_INT, 48000, "endianness", G_TYPE_INT, G_BYTE_ORDER,
- NULL);
-
- if (n == 2) {
- GstAudioChannelPosition pos[1] =
- { GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT };
- gst_audio_set_channel_positions (gst_caps_get_structure (caps, 0), pos);
- } else if (n == 3) {
- GstAudioChannelPosition pos[1] =
- { GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT };
- gst_audio_set_channel_positions (gst_caps_get_structure (caps, 0), pos);
- }
- }
+ size = 48000 * sizeof (gfloat);
+ data = g_malloc (size);
+ for (i = 0; i < 48000; i++)
+ data[i] = (n % 2 == 0) ? -1.0 : 1.0;
- data = g_new (gfloat, 48000);
- GST_BUFFER_MALLOCDATA (buffer) = (guint8 *) data;
- GST_BUFFER_DATA (buffer) = (guint8 *) data;
- GST_BUFFER_SIZE (buffer) = 48000 * sizeof (gfloat);
+ gst_buffer_take_memory (buffer, -1, gst_memory_new_wrapped (0, data, g_free,
+ size, 0, size));
GST_BUFFER_OFFSET (buffer) = GST_BUFFER_OFFSET_NONE;
GST_BUFFER_TIMESTAMP (buffer) = GST_CLOCK_TIME_NONE;
GST_BUFFER_OFFSET_END (buffer) = GST_BUFFER_OFFSET_NONE;
GST_BUFFER_DURATION (buffer) = GST_SECOND;
-
- GST_BUFFER_CAPS (buffer) = caps;
-
- for (i = 0; i < 48000; i++)
- data[i] = (n % 2 == 0) ? -1.0 : 1.0;
}
static void
gpointer user_data)
{
gint i;
+ GstMapInfo map;
gfloat *data;
- GstCaps *caps;
+ GstCaps *caps, *ccaps;
gint n = GPOINTER_TO_INT (user_data);
+ guint64 mask;
fail_unless (GST_IS_BUFFER (buffer));
- fail_unless_equals_int (GST_BUFFER_SIZE (buffer),
- 48000 * 2 * sizeof (gfloat));
- fail_unless_equals_int (GST_BUFFER_DURATION (buffer), GST_SECOND);
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ data = (gfloat *) map.data;
- caps = gst_caps_new_simple ("audio/x-raw-float",
- "width", G_TYPE_INT, 32,
- "channels", G_TYPE_INT, 2,
- "rate", G_TYPE_INT, 48000, "endianness", G_TYPE_INT, G_BYTE_ORDER, NULL);
+ fail_unless_equals_int (map.size, 48000 * 2 * sizeof (gfloat));
+ fail_unless_equals_int (GST_BUFFER_DURATION (buffer), GST_SECOND);
if (n == 0) {
GstAudioChannelPosition pos[2] =
{ GST_AUDIO_CHANNEL_POSITION_NONE, GST_AUDIO_CHANNEL_POSITION_NONE };
- gst_audio_set_channel_positions (gst_caps_get_structure (caps, 0), pos);
+ gst_audio_channel_positions_to_mask (pos, 2, &mask);
} else if (n == 1) {
GstAudioChannelPosition pos[2] = { GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT,
GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT
};
- gst_audio_set_channel_positions (gst_caps_get_structure (caps, 0), pos);
+ gst_audio_channel_positions_to_mask (pos, 2, &mask);
} else if (n == 2) {
GstAudioChannelPosition pos[2] = { GST_AUDIO_CHANNEL_POSITION_FRONT_CENTER,
GST_AUDIO_CHANNEL_POSITION_REAR_CENTER
};
- gst_audio_set_channel_positions (gst_caps_get_structure (caps, 0), pos);
+ gst_audio_channel_positions_to_mask (pos, 2, &mask);
}
- fail_unless (gst_caps_is_equal (caps, GST_BUFFER_CAPS (buffer)));
- gst_caps_unref (caps);
+ caps = gst_caps_new_simple ("audio/x-raw",
+ "format", G_TYPE_STRING, GST_AUDIO_NE (F32),
+ "channels", G_TYPE_INT, 2, "rate", G_TYPE_INT, 48000,
+ "channel-mask", GST_TYPE_BITMASK, &mask, NULL);
- data = (gfloat *) GST_BUFFER_DATA (buffer);
+ ccaps = gst_pad_get_current_caps (pad);
+ fail_unless (gst_caps_is_equal (caps, ccaps));
+ gst_caps_unref (ccaps);
+ gst_caps_unref (caps);
for (i = 0; i < 48000 * 2; i += 2) {
fail_unless_equals_float (data[i], -1.0);
fail_unless_equals_float (data[i + 1], 1.0);
}
+ gst_buffer_unmap (buffer, &map);
have_data++;
}
fail_unless (interleave != NULL);
gst_bin_add (GST_BIN (pipeline), gst_object_ref (interleave));
- sinkpad0 = gst_element_get_request_pad (interleave, "sink%d");
+ sinkpad0 = gst_element_get_request_pad (interleave, "sink_%u");
fail_unless (sinkpad0 != NULL);
tmp = gst_element_get_static_pad (src1, "src");
fail_unless (gst_pad_link (tmp, sinkpad0) == GST_PAD_LINK_OK);
gst_object_unref (tmp);
- sinkpad1 = gst_element_get_request_pad (interleave, "sink%d");
+ sinkpad1 = gst_element_get_request_pad (interleave, "sink_%u");
fail_unless (sinkpad1 != NULL);
tmp = gst_element_get_static_pad (src2, "src");
tmp2 = gst_element_get_static_pad (queue, "sink");
g_object_set (interleave, "channel-positions-from-input", TRUE, NULL);
gst_bin_add (GST_BIN (pipeline), gst_object_ref (interleave));
- sinkpad0 = gst_element_get_request_pad (interleave, "sink%d");
+ sinkpad0 = gst_element_get_request_pad (interleave, "sink_%u");
fail_unless (sinkpad0 != NULL);
tmp = gst_element_get_static_pad (src1, "src");
fail_unless (gst_pad_link (tmp, sinkpad0) == GST_PAD_LINK_OK);
gst_object_unref (tmp);
- sinkpad1 = gst_element_get_request_pad (interleave, "sink%d");
+ sinkpad1 = gst_element_get_request_pad (interleave, "sink_%u");
fail_unless (sinkpad1 != NULL);
tmp = gst_element_get_static_pad (src2, "src");
tmp2 = gst_element_get_static_pad (queue, "sink");
g_value_array_free (arr);
gst_bin_add (GST_BIN (pipeline), gst_object_ref (interleave));
- sinkpad0 = gst_element_get_request_pad (interleave, "sink%d");
+ sinkpad0 = gst_element_get_request_pad (interleave, "sink_%u");
fail_unless (sinkpad0 != NULL);
tmp = gst_element_get_static_pad (src1, "src");
fail_unless (gst_pad_link (tmp, sinkpad0) == GST_PAD_LINK_OK);
gst_object_unref (tmp);
- sinkpad1 = gst_element_get_request_pad (interleave, "sink%d");
+ sinkpad1 = gst_element_get_request_pad (interleave, "sink_%u");
fail_unless (sinkpad1 != NULL);
tmp = gst_element_get_static_pad (src2, "src");
tmp2 = gst_element_get_static_pad (queue, "sink");
GST_DEBUG ("setup_jpegenc");
jpegenc = gst_check_setup_element ("jpegenc");
- mysinkpad = gst_check_setup_sink_pad (jpegenc, sinktemplate, NULL);
- mysrcpad = gst_check_setup_src_pad (jpegenc, &any_srctemplate, NULL);
+ mysinkpad = gst_check_setup_sink_pad (jpegenc, sinktemplate);
+ mysrcpad = gst_check_setup_src_pad (jpegenc, &any_srctemplate);
gst_pad_set_active (mysrcpad, TRUE);
gst_pad_set_active (mysinkpad, TRUE);
GstElement *pipeline;
GstElement *cf;
GstElement *sink;
+ GstSample *sample;
GstBuffer *buffer;
pipeline =
gst_element_set_state (pipeline, GST_STATE_PLAYING);
- buffer = gst_app_sink_pull_buffer (GST_APP_SINK (sink));
+ sample = gst_app_sink_pull_sample (GST_APP_SINK (sink));
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
gst_object_unref (sink);
gst_object_unref (cf);
+
+ buffer = gst_sample_get_buffer (sample);
+ gst_buffer_ref (buffer);
+
+ gst_sample_unref (sample);
+
return buffer;
}
jpegenc = setup_jpegenc (&any_sinktemplate);
sinkpad = gst_element_get_static_pad (jpegenc, "sink");
/* this should assert if non-subset */
- caps = gst_pad_get_caps (sinkpad);
+ caps = gst_pad_query_caps (sinkpad, NULL);
gst_caps_unref (caps);
gst_object_unref (sinkpad);
cleanup_jpegenc (jpegenc);
jpegenc = setup_jpegenc (&jpeg_sinktemplate);
sinkpad = gst_element_get_static_pad (jpegenc, "sink");
/* this should assert if non-subset */
- caps = gst_pad_get_caps (sinkpad);
+ caps = gst_pad_query_caps (sinkpad, NULL);
gst_caps_unref (caps);
gst_object_unref (sinkpad);
cleanup_jpegenc (jpegenc);
jpegenc = setup_jpegenc (&jpeg_restrictive_sinktemplate);
sinkpad = gst_element_get_static_pad (jpegenc, "sink");
/* this should assert if non-subset */
- caps = gst_pad_get_caps (sinkpad);
+ caps = gst_pad_query_caps (sinkpad, NULL);
structure = gst_caps_get_structure (caps, 0);
/* check the width */
gst_element_set_state (jpegenc, GST_STATE_PLAYING);
/* push first buffer with 800x600 resolution */
- caps = gst_caps_new_simple ("video/x-raw-yuv", "width", G_TYPE_INT,
+ caps = gst_caps_new_simple ("video/x-raw", "width", G_TYPE_INT,
800, "height", G_TYPE_INT, 600, "framerate",
- GST_TYPE_FRACTION, 1, 1, "format", GST_TYPE_FOURCC,
- GST_MAKE_FOURCC ('I', '4', '2', '0'), NULL);
- buffer = create_video_buffer (caps);
+ GST_TYPE_FRACTION, 1, 1, "format", G_TYPE_STRING, "I420", NULL);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
+ fail_unless ((buffer = create_video_buffer (caps)) != NULL);
gst_caps_unref (caps);
fail_unless (gst_pad_push (mysrcpad, buffer) == GST_FLOW_OK);
allowed_caps = gst_pad_get_allowed_caps (mysrcpad);
/* the caps we want to negotiate to */
- caps = gst_caps_new_simple ("video/x-raw-yuv", "width", G_TYPE_INT,
+ caps = gst_caps_new_simple ("video/x-raw", "width", G_TYPE_INT,
640, "height", G_TYPE_INT, 480, "framerate",
- GST_TYPE_FRACTION, 1, 1, "format", GST_TYPE_FOURCC,
- GST_MAKE_FOURCC ('I', '4', '2', '0'), NULL);
+ GST_TYPE_FRACTION, 1, 1, "format", G_TYPE_STRING, "I420", NULL);
fail_unless (gst_caps_can_intersect (allowed_caps, caps));
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
/* push second buffer with 640x480 resolution */
buffer = create_video_buffer (caps);
#include <unistd.h>
#include <math.h>
+#include <gst/audio/audio.h>
#include <gst/check/gstcheck.h>
gboolean have_eos = FALSE;
GstPad *mysrcpad, *mysinkpad;
#define LEVEL_CAPS_TEMPLATE_STRING \
- "audio/x-raw-int, " \
+ "audio/x-raw, " \
+ "format = (string) { S8, "GST_AUDIO_NE(S16)" }, " \
+ "layout = (string) interleaved, " \
"rate = (int) [ 1, MAX ], " \
- "channels = (int) [ 1, 8 ], " \
- "endianness = (int) BYTE_ORDER, " \
- "width = (int) {8, 16}, " \
- "depth = (int) {8, 16}, " \
- "signed = (boolean) true"
+ "channels = (int) [ 1, 8 ]"
#define LEVEL_CAPS_STRING \
- "audio/x-raw-int, " \
+ "audio/x-raw, " \
+ "format = (string) "GST_AUDIO_NE(S16)", " \
+ "layout = (string) interleaved, " \
"rate = (int) 1000, " \
- "channels = (int) 2, " \
- "endianness = (int) BYTE_ORDER, " \
- "width = (int) 16, " \
- "depth = (int) 16, " \
- "signed = (boolean) true"
-
+ "channels = (int) 2, " \
+ "channel-mask = (bitmask) 3" \
static GstStaticPadTemplate sinktemplate = GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_DEBUG ("setup_level");
level = gst_check_setup_element ("level");
- mysrcpad = gst_check_setup_src_pad (level, &srctemplate, NULL);
- mysinkpad = gst_check_setup_sink_pad (level, &sinktemplate, NULL);
+ mysrcpad = gst_check_setup_src_pad (level, &srctemplate);
+ mysinkpad = gst_check_setup_sink_pad (level, &sinktemplate);
gst_pad_set_active (mysrcpad, TRUE);
gst_pad_set_active (mysinkpad, TRUE);
GstMessage *message;
const GstStructure *structure;
int i, j;
+ GstMapInfo map;
gint16 *data;
const GValue *list, *value;
GstClockTime endtime;
/* create a fake 0.1 sec buffer with a half-amplitude block signal */
inbuffer = gst_buffer_new_and_alloc (400);
- data = (gint16 *) GST_BUFFER_DATA (inbuffer);
+ gst_buffer_map (inbuffer, &map, GST_MAP_WRITE);
+ data = (gint16 *) map.data;
for (j = 0; j < 200; ++j) {
*data = 16536;
++data;
}
+ gst_buffer_unmap (inbuffer, &map);
caps = gst_caps_from_string (LEVEL_CAPS_STRING);
- gst_buffer_set_caps (inbuffer, caps);
+ gst_pad_set_caps (mysrcpad, caps);
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
const GstStructure *structure;
int j;
gint16 *data;
+ GstMapInfo map;
const GValue *list, *value;
GstClockTime endtime;
gdouble dB;
/* create a fake 0.1 sec buffer with a half-amplitude block signal */
inbuffer = gst_buffer_new_and_alloc (400);
- data = (gint16 *) GST_BUFFER_DATA (inbuffer);
+ gst_buffer_map (inbuffer, &map, GST_MAP_WRITE);
+ data = (gint16 *) map.data;
for (j = 0; j < 100; ++j) {
*data = 0;
++data;
*data = 16536;
++data;
}
+ gst_buffer_unmap (inbuffer, &map);
caps = gst_caps_from_string (LEVEL_CAPS_STRING);
- gst_buffer_set_caps (inbuffer, caps);
+ gst_pad_set_caps (mysrcpad, caps);
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
ASSERT_OBJECT_REFCOUNT (srcpad, "srcpad", 1);
gst_pad_set_active (srcpad, TRUE);
- if (!(sinkpad = gst_element_get_static_pad (element, "audio_%d")))
- sinkpad = gst_element_get_request_pad (element, "audio_%d");
+ if (!(sinkpad = gst_element_get_static_pad (element, "audio_%u")))
+ sinkpad = gst_element_get_request_pad (element, "audio_%u");
fail_if (sinkpad == NULL, "Could not get sink pad from %s",
GST_ELEMENT_NAME (element));
/* references are owned by: 1) us, 2) matroskamux, 3) collect pads */
setup_matroskamux (GstStaticPadTemplate * srctemplate)
{
GstElement *matroskamux;
+ GstSegment segment;
GST_DEBUG ("setup_matroskamux");
matroskamux = gst_check_setup_element ("matroskamux");
mysrcpad = setup_src_pad (matroskamux, srctemplate, NULL);
mysinkpad = setup_sink_pad (matroskamux, &sinktemplate, NULL);
+ fail_unless (gst_element_set_state (matroskamux,
+ GST_STATE_PLAYING) == GST_STATE_CHANGE_SUCCESS,
+ "could not set to playing");
+
+ gst_segment_init (&segment, GST_FORMAT_TIME);
+ fail_unless (gst_pad_push_event (mysrcpad,
+ gst_event_new_segment (&segment)), "Segment event rejected");
+
return matroskamux;
}
static void
check_buffer_data (GstBuffer * buffer, void *data, size_t data_size)
{
- fail_unless (GST_BUFFER_SIZE (buffer) == data_size);
- fail_unless (memcmp (data, GST_BUFFER_DATA (buffer), data_size) == 0);
+ fail_unless (gst_buffer_get_size (buffer) == data_size);
+ fail_unless (gst_buffer_memcmp (buffer, 0, data, data_size) == 0);
}
GST_START_TEST (test_ebml_header)
};
matroskamux = setup_matroskamux (&srcac3template);
- fail_unless (gst_element_set_state (matroskamux,
- GST_STATE_PLAYING) == GST_STATE_CHANGE_SUCCESS,
- "could not set to playing");
- inbuffer = gst_buffer_new_and_alloc (1);
+ inbuffer = gst_buffer_new_allocate (NULL, 1, 0);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_unless (gst_pad_push (mysrcpad, inbuffer) == GST_FLOW_OK);
num_buffers = g_list_length (buffers);
};
matroskamux = setup_matroskamux (&srcvorbistemplate);
- fail_unless (gst_element_set_state (matroskamux,
- GST_STATE_PLAYING) == GST_STATE_CHANGE_SUCCESS,
- "could not set to playing");
- inbuffer = gst_buffer_new_and_alloc (1);
caps = gst_caps_from_string (VORBIS_CAPS_STRING);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
+
+ inbuffer = gst_buffer_new_allocate (NULL, 1, 0);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_unless (gst_pad_push (mysrcpad, inbuffer) == GST_FLOW_OK);
for (i = 0; i < num_buffers; ++i) {
gint j;
+ gsize buffer_size;
outbuffer = GST_BUFFER (buffers->data);
fail_if (outbuffer == NULL);
+ buffer_size = gst_buffer_get_size (outbuffer);
buffers = g_list_remove (buffers, outbuffer);
- if (!vorbis_header_found && GST_BUFFER_SIZE (outbuffer) >= sizeof (data)) {
- for (j = 0; j <= GST_BUFFER_SIZE (outbuffer) - sizeof (data); j++) {
- if (memcmp (GST_BUFFER_DATA (outbuffer) + j, data, sizeof (data)) == 0) {
+ if (!vorbis_header_found && buffer_size >= sizeof (data)) {
+ for (j = 0; j <= buffer_size - sizeof (data); j++) {
+ if (gst_buffer_memcmp (outbuffer, j, data, sizeof (data)) == 0) {
vorbis_header_found = TRUE;
break;
}
{
GstElement *matroskamux;
GstBuffer *inbuffer, *outbuffer;
+ guint8 *indata;
GstCaps *caps;
int num_buffers;
int i;
guint8 data1[] = { 0x42 };
matroskamux = setup_matroskamux (&srcac3template);
- fail_unless (gst_element_set_state (matroskamux,
- GST_STATE_PLAYING) == GST_STATE_CHANGE_SUCCESS,
- "could not set to playing");
- /* Generate the header */
- inbuffer = gst_buffer_new_and_alloc (1);
- GST_BUFFER_TIMESTAMP (inbuffer) = 0;
caps = gst_caps_from_string (AC3_CAPS_STRING);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
+
+ /* Generate the header */
+ inbuffer = gst_buffer_new_allocate (NULL, 1, 0);
+ GST_BUFFER_TIMESTAMP (inbuffer) = 0;
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_unless_equals_int (gst_pad_push (mysrcpad, inbuffer), GST_FLOW_OK);
buffers = NULL;
/* Now push a buffer */
- inbuffer = gst_buffer_new_and_alloc (1);
- GST_BUFFER_DATA (inbuffer)[0] = 0x42;
+ indata = g_malloc (1);
+ inbuffer = gst_buffer_new_wrapped (indata, 1);
+ indata[0] = 0x42;
GST_BUFFER_TIMESTAMP (inbuffer) = 1000000;
- caps = gst_caps_from_string (AC3_CAPS_STRING);
- gst_buffer_set_caps (inbuffer, caps);
- gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_unless (gst_pad_push (mysrcpad, inbuffer) == GST_FLOW_OK);
int i;
matroskamux = setup_matroskamux (&srcac3template);
- fail_unless (gst_element_set_state (matroskamux,
- GST_STATE_PLAYING) == GST_STATE_CHANGE_SUCCESS,
- "could not set to playing");
- inbuffer = gst_buffer_new_and_alloc (1);
+ inbuffer = gst_buffer_new_allocate (NULL, 1, 0);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_unless (gst_pad_push (mysrcpad, inbuffer) == GST_FLOW_OK);
num_buffers = g_list_length (buffers);
GST_STATE_PLAYING) == GST_STATE_CHANGE_SUCCESS,
"could not set to playing");
- inbuffer = gst_buffer_new_and_alloc (1);
+ inbuffer = gst_buffer_new_allocate (NULL, 1, 0);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_unless (gst_pad_push (mysrcpad, inbuffer) == GST_FLOW_OK);
num_buffers = g_list_length (buffers);
gst_element_set_state (pipeline, GST_STATE_NULL);
}
-#if !GLIB_CHECK_VERSION(2,30,0)
-static gchar *
-g_mkdtemp (gchar * template)
-{
- gchar *tmpdir;
-
- tmpdir = mkdtemp (template);
- if (tmpdir == NULL) {
- g_free (template);
- }
- return tmpdir;
-}
-#endif
-
GST_START_TEST (test_multifilesink_key_frame)
{
GstElement *pipeline;
pipeline =
gst_parse_launch
- ("videotestsrc num-buffers=10 ! video/x-raw-yuv,format=(fourcc)I420,width=320,height=240 ! multifilesink name=mfs",
+ ("videotestsrc num-buffers=10 ! video/x-raw,format=(string)I420,width=320,height=240 ! multifilesink name=mfs",
NULL);
fail_if (pipeline == NULL);
mfs = gst_bin_get_by_name (GST_BIN (pipeline), "mfs");
pipeline =
gst_parse_launch
- ("videotestsrc num-buffers=10 ! video/x-raw-yuv,format=(fourcc)I420,width=320,height=240 ! multifilesink name=mfs",
+ ("videotestsrc num-buffers=10 ! video/x-raw,format=(string)I420,width=320,height=240 ! multifilesink name=mfs",
NULL);
fail_if (pipeline == NULL);
mfs = gst_bin_get_by_name (GST_BIN (pipeline), "mfs");
sink = gst_element_get_static_pad (mfs, "sink");
buf = gst_buffer_new_and_alloc (4);
- memcpy (GST_BUFFER_DATA (buf), "foo", 4);
- fail_if (gst_pad_chain (sink, gst_buffer_ref (buf)) != GST_FLOW_OK);
+ gst_buffer_fill (buf, 0, "foo", 4);
+ fail_if (gst_pad_chain (sink, gst_buffer_copy (buf)) != GST_FLOW_OK);
- memcpy (GST_BUFFER_DATA (buf), "bar", 4);
- fail_if (gst_pad_chain (sink, gst_buffer_ref (buf)) != GST_FLOW_OK);
+ gst_buffer_fill (buf, 0, "bar", 4);
+ fail_if (gst_pad_chain (sink, gst_buffer_copy (buf)) != GST_FLOW_OK);
fail_unless (gst_pad_send_event (sink,
gst_video_event_new_downstream_force_key_unit (GST_CLOCK_TIME_NONE,
GST_CLOCK_TIME_NONE, GST_CLOCK_TIME_NONE, TRUE, 1)));
- memcpy (GST_BUFFER_DATA (buf), "baz", 4);
+ gst_buffer_fill (buf, 0, "baz", 4);
fail_if (gst_pad_chain (sink, buf) != GST_FLOW_OK);
fail_if (gst_element_set_state (mfs,
pipeline =
gst_parse_launch
- ("videotestsrc num-buffers=10 ! video/x-raw-yuv,format=(fourcc)I420,width=320,height=240 ! multifilesink name=mfs",
+ ("videotestsrc num-buffers=10 ! video/x-raw,format=(string)I420,width=320,height=240 ! multifilesink name=mfs",
NULL);
fail_if (pipeline == NULL);
mfs = gst_bin_get_by_name (GST_BIN (pipeline), "mfs");
pipeline =
gst_parse_launch
- ("multifilesrc ! video/x-raw-yuv,format=(fourcc)I420,width=320,height=240,framerate=10/1 ! fakesink",
+ ("multifilesrc ! video/x-raw,format=(string)I420,width=320,height=240,framerate=10/1 ! fakesink",
NULL);
fail_if (pipeline == NULL);
mfs = gst_bin_get_by_name (GST_BIN (pipeline), "multifilesrc0");
buffer = gst_buffer_new_and_alloc (size);
if (buffer_data) {
- memcpy (GST_BUFFER_DATA (buffer), buffer_data, size);
+ gst_buffer_fill (buffer, 0, buffer_data, size);
} else {
guint i;
+ GstMapInfo map;
+
+ gst_buffer_map (buffer, &map, GST_MAP_WRITE);
/* Create a recognizable pattern (loop 0x00 -> 0xff) in the data block */
- for (i = 0; i < size; i++) {
- GST_BUFFER_DATA (buffer)[i] = i % 0x100;
+ for (i = 0; i < map.size; i++) {
+ map.data[i] = i % 0x100;
}
+ gst_buffer_unmap (buffer, &map);
}
- gst_buffer_set_caps (buffer, GST_PAD_CAPS (srcpad));
+ /* gst_buffer_set_caps (buffer, GST_PAD_CAPS (srcpad)); */
GST_BUFFER_OFFSET (buffer) = dataoffset;
dataoffset += size;
return buffer;
buffer_count_size (void *buffer, void *user_data)
{
guint *sum = (guint *) user_data;
- *sum += GST_BUFFER_SIZE (buffer);
+ *sum += gst_buffer_get_size (buffer);
}
/*
return;
}
- fail_unless (GST_BUFFER_SIZE (buffer) == vdata->data_to_verify_size);
- fail_unless (memcmp (GST_BUFFER_DATA (buffer), vdata->data_to_verify,
+ fail_unless (gst_buffer_get_size (buffer) == vdata->data_to_verify_size);
+ fail_unless (gst_buffer_memcmp (buffer, 0, vdata->data_to_verify,
vdata->data_to_verify_size) == 0);
if (vdata->buffers_before_offset_skip) {
fail_unless (GST_BUFFER_OFFSET (buffer) == offset_counter);
}
- if (vdata->caps) {
- GST_LOG ("%" GST_PTR_FORMAT " = %" GST_PTR_FORMAT " ?",
- GST_BUFFER_CAPS (buffer), vdata->caps);
- fail_unless (gst_caps_is_equal (GST_BUFFER_CAPS (buffer), vdata->caps));
- }
-
ts_counter += GST_BUFFER_DURATION (buffer);
- offset_counter += GST_BUFFER_SIZE (buffer);
+ offset_counter += gst_buffer_get_size (buffer);
buffer_counter++;
}
GstBus *bus;
element = gst_check_setup_element (factory);
- srcpad = gst_check_setup_src_pad (element, src_template, src_caps);
- sinkpad = gst_check_setup_sink_pad (element, sink_template, sink_caps);
+ srcpad = gst_check_setup_src_pad (element, src_template);
+ sinkpad = gst_check_setup_sink_pad (element, sink_template);
gst_pad_set_active (srcpad, TRUE);
gst_pad_set_active (sinkpad, TRUE);
+ if (src_caps)
+ fail_unless (gst_pad_set_caps (srcpad, src_caps));
+ if (sink_caps)
+ fail_unless (gst_pad_set_caps (sinkpad, sink_caps));
bus = gst_bus_new ();
gst_element_set_bus (element, bus);
if (!k)
buffer = buffer_new (test->series[j].data, test->series[j].size);
else {
- GstCaps *caps = gst_buffer_get_caps (buffer);
-
buffer = gst_buffer_join (buffer,
buffer_new (test->series[j].data, test->series[j].size));
- if (caps) {
- gst_buffer_set_caps (buffer, caps);
- gst_caps_unref (caps);
- }
}
}
fail_unless_equals_int (gst_pad_push (srcpad, buffer), GST_FLOW_OK);
fail_unless_equals_int (datasum, size);
}
- src_caps = gst_pad_get_negotiated_caps (sinkpad);
+ src_caps = gst_pad_get_current_caps (sinkpad);
GST_LOG ("output caps: %" GST_PTR_FORMAT, src_caps);
if (test->sink_caps) {
GST_DEBUG ("setup_qtmux");
qtmux = gst_check_setup_element ("qtmux");
mysrcpad = setup_src_pad (qtmux, srctemplate, NULL, sinkname);
- mysinkpad = gst_check_setup_sink_pad (qtmux, &sinktemplate, NULL);
+ mysinkpad = gst_check_setup_sink_pad (qtmux, &sinktemplate);
gst_pad_set_active (mysrcpad, TRUE);
gst_pad_set_active (mysinkpad, TRUE);
inbuffer = gst_buffer_new_and_alloc (1);
caps = gst_caps_copy (gst_pad_get_pad_template_caps (mysrcpad));
- gst_buffer_set_caps (inbuffer, caps);
+ gst_pad_set_caps (mysrcpad, caps);
gst_caps_unref (caps);
GST_BUFFER_TIMESTAMP (inbuffer) = 0;
GST_BUFFER_DURATION (inbuffer) = 40 * GST_MSECOND;
case 0:
{
/* ftyp header */
- guint8 *data = GST_BUFFER_DATA (outbuffer);
-
- fail_unless (GST_BUFFER_SIZE (outbuffer) >= 20);
- fail_unless (memcmp (data, data0, sizeof (data0)) == 0);
- fail_unless (memcmp (data + 16, data0 + 8, 4) == 0);
+ fail_unless (gst_buffer_get_size (outbuffer) >= 20);
+ fail_unless (gst_buffer_memcmp (outbuffer, 0, data0,
+ sizeof (data0)) == 0);
+ fail_unless (gst_buffer_memcmp (outbuffer, 16, data0 + 8, 4) == 0);
break;
}
case 1: /* mdat header */
- fail_unless (GST_BUFFER_SIZE (outbuffer) == 16);
- fail_unless (memcmp (GST_BUFFER_DATA (outbuffer), data1, sizeof (data1))
+ fail_unless (gst_buffer_get_size (outbuffer) == 16);
+ fail_unless (gst_buffer_memcmp (outbuffer, 0, data1, sizeof (data1))
== 0);
break;
case 2: /* buffer we put in */
- fail_unless (GST_BUFFER_SIZE (outbuffer) == 1);
+ fail_unless (gst_buffer_get_size (outbuffer) == 1);
break;
case 3: /* moov */
- fail_unless (GST_BUFFER_SIZE (outbuffer) > 8);
- fail_unless (memcmp (GST_BUFFER_DATA (outbuffer) + 4, data2,
+ fail_unless (gst_buffer_get_size (outbuffer) > 8);
+ fail_unless (gst_buffer_memcmp (outbuffer, 4, data2,
sizeof (data2)) == 0);
break;
default:
inbuffer = gst_buffer_new_and_alloc (1);
caps = gst_caps_copy (gst_pad_get_pad_template_caps (mysrcpad));
- gst_buffer_set_caps (inbuffer, caps);
+ gst_pad_set_caps (mysrcpad, caps);
gst_caps_unref (caps);
GST_BUFFER_TIMESTAMP (inbuffer) = 0;
GST_BUFFER_DURATION (inbuffer) = 40 * GST_MSECOND;
case 0:
{
/* ftyp header */
- guint8 *data = GST_BUFFER_DATA (outbuffer);
-
- fail_unless (GST_BUFFER_SIZE (outbuffer) >= 20);
- fail_unless (memcmp (data, data0, sizeof (data0)) == 0);
- fail_unless (memcmp (data + 16, data0 + 8, 4) == 0);
+ fail_unless (gst_buffer_get_size (outbuffer) >= 20);
+ fail_unless (gst_buffer_memcmp (outbuffer, 0, data0,
+ sizeof (data0)) == 0);
+ fail_unless (gst_buffer_memcmp (outbuffer, 16, data0 + 8, 4) == 0);
break;
}
case 1: /* moov */
- fail_unless (GST_BUFFER_SIZE (outbuffer) > 8);
- fail_unless (memcmp (GST_BUFFER_DATA (outbuffer) + 4, data2,
+ fail_unless (gst_buffer_get_size (outbuffer) > 8);
+ fail_unless (gst_buffer_memcmp (outbuffer, 4, data2,
sizeof (data2)) == 0);
break;
case 2: /* moof */
- fail_unless (GST_BUFFER_SIZE (outbuffer) > 8);
- fail_unless (memcmp (GST_BUFFER_DATA (outbuffer) + 4, data3,
+ fail_unless (gst_buffer_get_size (outbuffer) > 8);
+ fail_unless (gst_buffer_memcmp (outbuffer, 4, data3,
sizeof (data3)) == 0);
break;
case 3: /* mdat header */
- fail_unless (GST_BUFFER_SIZE (outbuffer) == 8);
- fail_unless (memcmp (GST_BUFFER_DATA (outbuffer) + 4, data1,
+ fail_unless (gst_buffer_get_size (outbuffer) == 8);
+ fail_unless (gst_buffer_memcmp (outbuffer, 4, data1,
sizeof (data1)) == 0);
break;
case 4: /* buffer we put in */
- fail_unless (GST_BUFFER_SIZE (outbuffer) == 1);
+ fail_unless (gst_buffer_get_size (outbuffer) == 1);
break;
case 5: /* mfra */
- fail_unless (GST_BUFFER_SIZE (outbuffer) > 8);
- fail_unless (memcmp (GST_BUFFER_DATA (outbuffer) + 4, data4,
+ fail_unless (gst_buffer_get_size (outbuffer) > 8);
+ fail_unless (gst_buffer_memcmp (outbuffer, 4, data4,
sizeof (data4)) == 0);
break;
default:
GST_START_TEST (test_video_pad_dd)
{
- check_qtmux_pad (&srcvideotemplate, "video_%d", 0);
+ check_qtmux_pad (&srcvideotemplate, "video_%u", 0);
}
GST_END_TEST;
GST_START_TEST (test_audio_pad_dd)
{
- check_qtmux_pad (&srcaudiotemplate, "audio_%d", 0);
+ check_qtmux_pad (&srcaudiotemplate, "audio_%u", 0);
}
GST_END_TEST;
GST_START_TEST (test_video_pad_frag_dd)
{
- check_qtmux_pad_fragmented (&srcvideotemplate, "video_%d", 0, FALSE);
+ check_qtmux_pad_fragmented (&srcvideotemplate, "video_%u", 0, FALSE);
}
GST_END_TEST;
GST_START_TEST (test_audio_pad_frag_dd)
{
- check_qtmux_pad_fragmented (&srcaudiotemplate, "audio_%d", 0, FALSE);
+ check_qtmux_pad_fragmented (&srcaudiotemplate, "audio_%u", 0, FALSE);
}
GST_END_TEST;
GST_START_TEST (test_video_pad_frag_dd_streamable)
{
- check_qtmux_pad_fragmented (&srcvideotemplate, "video_%d", 0, TRUE);
+ check_qtmux_pad_fragmented (&srcvideotemplate, "video_%u", 0, TRUE);
}
GST_END_TEST;
GST_START_TEST (test_audio_pad_frag_dd_streamable)
{
- check_qtmux_pad_fragmented (&srcaudiotemplate, "audio_%d", 0, TRUE);
+ check_qtmux_pad_fragmented (&srcaudiotemplate, "audio_%u", 0, TRUE);
}
GST_END_TEST;
GST_START_TEST (test_video_pad_reorder)
{
- check_qtmux_pad (&srcvideotemplate, "video_%d", 1);
+ check_qtmux_pad (&srcvideotemplate, "video_%u", 1);
}
GST_END_TEST;
GST_START_TEST (test_audio_pad_reorder)
{
- check_qtmux_pad (&srcaudiotemplate, "audio_%d", 1);
+ check_qtmux_pad (&srcaudiotemplate, "audio_%u", 1);
}
GST_END_TEST;
GST_START_TEST (test_video_pad_frag_reorder)
{
- check_qtmux_pad_fragmented (&srcvideotemplate, "video_%d", 1, FALSE);
+ check_qtmux_pad_fragmented (&srcvideotemplate, "video_%u", 1, FALSE);
}
GST_END_TEST;
GST_START_TEST (test_audio_pad_frag_reorder)
{
- check_qtmux_pad_fragmented (&srcaudiotemplate, "audio_%d", 1, FALSE);
+ check_qtmux_pad_fragmented (&srcaudiotemplate, "audio_%u", 1, FALSE);
}
GST_END_TEST;
GST_START_TEST (test_video_pad_frag_reorder_streamable)
{
- check_qtmux_pad_fragmented (&srcvideotemplate, "video_%d", 1, TRUE);
+ check_qtmux_pad_fragmented (&srcvideotemplate, "video_%u", 1, TRUE);
}
GST_END_TEST;
GST_START_TEST (test_audio_pad_frag_reorder_streamable)
{
- check_qtmux_pad_fragmented (&srcaudiotemplate, "audio_%d", 1, TRUE);
+ check_qtmux_pad_fragmented (&srcaudiotemplate, "audio_%u", 1, TRUE);
}
GST_END_TEST;
GST_START_TEST (test_video_pad_asc)
{
- check_qtmux_pad (&srcvideotemplate, "video_%d", 2);
+ check_qtmux_pad (&srcvideotemplate, "video_%u", 2);
}
GST_END_TEST;
GST_START_TEST (test_audio_pad_asc)
{
- check_qtmux_pad (&srcaudiotemplate, "audio_%d", 2);
+ check_qtmux_pad (&srcaudiotemplate, "audio_%u", 2);
}
GST_END_TEST;
GST_START_TEST (test_video_pad_frag_asc)
{
- check_qtmux_pad_fragmented (&srcvideotemplate, "video_%d", 2, FALSE);
+ check_qtmux_pad_fragmented (&srcvideotemplate, "video_%u", 2, FALSE);
}
GST_END_TEST;
GST_START_TEST (test_audio_pad_frag_asc)
{
- check_qtmux_pad_fragmented (&srcaudiotemplate, "audio_%d", 2, FALSE);
+ check_qtmux_pad_fragmented (&srcaudiotemplate, "audio_%u", 2, FALSE);
}
GST_END_TEST;
GST_START_TEST (test_video_pad_frag_asc_streamable)
{
- check_qtmux_pad_fragmented (&srcvideotemplate, "video_%d", 2, TRUE);
+ check_qtmux_pad_fragmented (&srcvideotemplate, "video_%u", 2, TRUE);
}
GST_END_TEST;
GST_START_TEST (test_audio_pad_frag_asc_streamable)
{
- check_qtmux_pad_fragmented (&srcaudiotemplate, "audio_%d", 2, TRUE);
+ check_qtmux_pad_fragmented (&srcaudiotemplate, "audio_%u", 2, TRUE);
}
GST_END_TEST;
GST_START_TEST (test_reuse)
{
- GstElement *qtmux = setup_qtmux (&srcvideotemplate, "video_%d");
+ GstElement *qtmux = setup_qtmux (&srcvideotemplate, "video_%u");
GstBuffer *inbuffer;
GstCaps *caps;
inbuffer = gst_buffer_new_and_alloc (1);
fail_unless (inbuffer != NULL);
caps = gst_caps_copy (gst_pad_get_pad_template_caps (mysrcpad));
- gst_buffer_set_caps (inbuffer, caps);
+ gst_pad_set_caps (mysrcpad, caps);
gst_caps_unref (caps);
GST_BUFFER_TIMESTAMP (inbuffer) = 0;
GST_BUFFER_DURATION (inbuffer) = 40 * GST_MSECOND;
/* send eos to have all written */
fail_unless (gst_pad_push_event (mysrcpad, gst_event_new_eos ()) == TRUE);
- cleanup_qtmux (qtmux, "video_%d");
+ cleanup_qtmux (qtmux, "video_%u");
}
GST_END_TEST;
GstCaps *caps;
if (variant == NULL) {
- caps = gst_caps_new_simple ("video/quicktime", NULL);
+ caps = gst_caps_new_empty_simple ("video/quicktime");
} else {
caps = gst_caps_new_simple ("video/quicktime",
"variant", G_TYPE_STRING, variant, NULL);
cprof = gst_encoding_container_profile_new ("Name", "blah", caps, NULL);
gst_caps_unref (caps);
- caps = gst_caps_new_simple ("audio/x-raw-int", "width", G_TYPE_INT, 16,
- "depth", G_TYPE_INT, 16, "endianness", G_TYPE_INT, 4321,
- "channels", G_TYPE_INT, 2, "rate", G_TYPE_INT, 44100,
- "signed", G_TYPE_BOOLEAN, TRUE, NULL);
+ caps = gst_caps_new_simple ("audio/x-raw",
+ "format", G_TYPE_STRING, "S16BE",
+ "channels", G_TYPE_INT, 2, "rate", G_TYPE_INT, 44100, NULL);
gst_encoding_container_profile_add_profile (cprof,
GST_ENCODING_PROFILE (gst_encoding_audio_profile_new (caps, NULL, NULL,
1)));
static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("audio/x-raw-int")
+ GST_STATIC_CAPS ("audio/x-raw")
);
static GType test_mp3_enc_get_type (void);
-GST_BOILERPLATE (TestMp3Enc, test_mp3_enc, GstElement, GST_TYPE_ELEMENT);
+G_DEFINE_TYPE (TestMp3Enc, test_mp3_enc, GST_TYPE_ELEMENT);
static void
-test_mp3_enc_base_init (gpointer klass)
+test_mp3_enc_class_init (TestMp3EncClass * klass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
- gst_element_class_add_static_pad_template (element_class, &sink_template);
- gst_element_class_add_static_pad_template (element_class, &src_template);
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&sink_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&src_template));
gst_element_class_set_details_simple (element_class, "MPEG1 Audio Encoder",
"Codec/Encoder/Audio", "Pretends to encode mp3", "Foo Bar <foo@bar.com>");
}
static void
-test_mp3_enc_class_init (TestMp3EncClass * klass)
-{
- /* doesn't actually need to do anything for this test */
-}
-
-static void
-test_mp3_enc_init (TestMp3Enc * mp3enc, TestMp3EncClass * klass)
+test_mp3_enc_init (TestMp3Enc * mp3enc)
{
GstPad *pad;
"fakemp3enc", "fakemp3enc", plugin_init, VERSION, "LGPL",
"gst-plugins-good", GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN);
- feature = gst_default_registry_find_feature ("testmp3enc",
+ feature = gst_registry_find_feature (gst_registry_get (), "testmp3enc",
GST_TYPE_ELEMENT_FACTORY);
gst_plugin_feature_set_rank (feature, GST_RANK_PRIMARY + 100);
/* make sure we got mp4mux for variant=iso */
GST_INFO ("muxer: %s", G_OBJECT_TYPE_NAME (mux));
- fail_unless_equals_string (GST_PLUGIN_FEATURE_NAME (f), "mp4mux");
+ fail_unless_equals_string (GST_OBJECT_NAME (f), "mp4mux");
}
gst_object_unref (mux);
gst_object_unref (enc);
!= GST_STATE_CHANGE_FAILURE);
if (*taglist == NULL) {
- *taglist = gst_tag_list_new ();
+ *taglist = gst_tag_list_new_empty ();
}
while (1) {
for (i = 0; i < 3; i++) {
inbuffer = gst_buffer_new_and_alloc (bytes[i]);
caps = gst_caps_copy (gst_pad_get_pad_template_caps (mysrcpad));
- gst_buffer_set_caps (inbuffer, caps);
+ gst_pad_set_caps (mysrcpad, caps);
gst_caps_unref (caps);
GST_BUFFER_TIMESTAMP (inbuffer) = total_duration;
GST_BUFFER_DURATION (inbuffer) = (GstClockTime) durations[i];
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
- total_bytes += GST_BUFFER_SIZE (inbuffer);
+ total_bytes += gst_buffer_get_size (inbuffer);
total_duration += GST_BUFFER_DURATION (inbuffer);
fail_unless (gst_pad_push (mysrcpad, inbuffer) == GST_FLOW_OK);
}
GST_START_TEST (test_average_bitrate)
{
- test_average_bitrate_custom ("mp4mux", &srcaudioaactemplate, "audio_%d");
- test_average_bitrate_custom ("mp4mux", &srcvideoh264template, "video_%d");
+ test_average_bitrate_custom ("mp4mux", &srcaudioaactemplate, "audio_%u");
+ test_average_bitrate_custom ("mp4mux", &srcvideoh264template, "video_%u");
- test_average_bitrate_custom ("qtmux", &srcaudioaactemplate, "audio_%d");
- test_average_bitrate_custom ("qtmux", &srcvideoh264template, "video_%d");
+ test_average_bitrate_custom ("qtmux", &srcaudioaactemplate, "audio_%u");
+ test_average_bitrate_custom ("qtmux", &srcvideoh264template, "video_%u");
}
GST_END_TEST;
*/
#include <gst/check/gstcheck.h>
+#include <gst/audio/audio.h>
/* For ease of programming we use globals to keep refs for our floating src and
* sink pads we create; otherwise we always have to do get_pad, get_peer, and
"rate = (int) { 8000, 11025, 12000, 16000, 22050, " \
"24000, 32000, 44100, 48000 }"
-#define RG_ANALYSIS_CAPS_TEMPLATE_STRING \
- "audio/x-raw-float, " \
- "width = (int) 32, " \
- "endianness = (int) BYTE_ORDER, " \
- REPLAY_GAIN_CAPS \
- "; " \
- "audio/x-raw-int, " \
- "width = (int) 16, " \
- "depth = (int) [ 1, 16 ], " \
- "signed = (boolean) true, " \
- "endianness = (int) BYTE_ORDER, " \
+#define RG_ANALYSIS_CAPS_TEMPLATE_STRING \
+ "audio/x-raw, " \
+ "format = (string) "GST_AUDIO_NE (F32) ", " \
+ "layout = (string) interleaved, " \
+ REPLAY_GAIN_CAPS \
+ "; " \
+ "audio/x-raw, " \
+ "format = (string) "GST_AUDIO_NE (S16) ", " \
+ "layout = (string) interleaved, " \
REPLAY_GAIN_CAPS
static GstStaticPadTemplate sinktemplate = GST_STATIC_PAD_TEMPLATE ("sink",
GST_DEBUG ("setup_rganalysis");
analysis = gst_check_setup_element ("rganalysis");
- mysrcpad = gst_check_setup_src_pad (analysis, &srctemplate, NULL);
- mysinkpad = gst_check_setup_sink_pad (analysis, &sinktemplate, NULL);
+ mysrcpad = gst_check_setup_src_pad (analysis, &srctemplate);
+ mysinkpad = gst_check_setup_sink_pad (analysis, &sinktemplate);
gst_pad_set_active (mysrcpad, TRUE);
gst_pad_set_active (mysinkpad, TRUE);
test_buffer_const_float_mono (gint sample_rate, gsize n_frames, gfloat value)
{
GstBuffer *buf = gst_buffer_new_and_alloc (n_frames * sizeof (gfloat));
- gfloat *data = (gfloat *) GST_BUFFER_DATA (buf);
+ GstMapInfo map;
+ gfloat *data;
GstCaps *caps;
gint i;
+ gst_buffer_map (buf, &map, GST_MAP_WRITE);
+ data = (gfloat *) map.data;
for (i = n_frames; i--;)
*data++ = value;
+ gst_buffer_unmap (buf, &map);
- caps = gst_caps_new_simple ("audio/x-raw-float",
+ caps = gst_caps_new_simple ("audio/x-raw",
+ "format", G_TYPE_STRING, GST_AUDIO_NE (F32),
"rate", G_TYPE_INT, sample_rate, "channels", G_TYPE_INT, 1,
- "endianness", G_TYPE_INT, G_BYTE_ORDER, "width", G_TYPE_INT, 32, NULL);
- gst_buffer_set_caps (buf, caps);
+ "layout", G_TYPE_STRING, "interleaved", NULL);
+ gst_pad_set_caps (mysrcpad, caps);
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (buf, "buf", 1);
gfloat value_l, gfloat value_r)
{
GstBuffer *buf = gst_buffer_new_and_alloc (n_frames * sizeof (gfloat) * 2);
- gfloat *data = (gfloat *) GST_BUFFER_DATA (buf);
+ GstMapInfo map;
+ gfloat *data;
GstCaps *caps;
gint i;
+ gst_buffer_map (buf, &map, GST_MAP_WRITE);
+ data = (gfloat *) map.data;
for (i = n_frames; i--;) {
*data++ = value_l;
*data++ = value_r;
}
-
- caps = gst_caps_new_simple ("audio/x-raw-float",
- "rate", G_TYPE_INT, sample_rate, "channels", G_TYPE_INT, 2,
- "endianness", G_TYPE_INT, G_BYTE_ORDER, "width", G_TYPE_INT, 32, NULL);
- gst_buffer_set_caps (buf, caps);
+ gst_buffer_unmap (buf, &map);
+
+ caps = gst_caps_new_simple ("audio/x-raw",
+ "format", G_TYPE_STRING, GST_AUDIO_NE (F32),
+ "layout", G_TYPE_STRING, "interleaved",
+ "rate", G_TYPE_INT, sample_rate,
+ "channels", G_TYPE_INT, 2,
+ "channel-mask", GST_TYPE_BITMASK, (gint64) 0x3, NULL);
+ gst_pad_set_caps (mysrcpad, caps);
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (buf, "buf", 1);
gint16 value)
{
GstBuffer *buf = gst_buffer_new_and_alloc (n_frames * sizeof (gint16));
- gint16 *data = (gint16 *) GST_BUFFER_DATA (buf);
+ gint16 *data;
+ GstMapInfo map;
GstCaps *caps;
gint i;
+ gst_buffer_map (buf, &map, GST_MAP_WRITE);
+ data = (gint16 *) map.data;
for (i = n_frames; i--;)
*data++ = value;
+ gst_buffer_unmap (buf, &map);
- caps = gst_caps_new_simple ("audio/x-raw-int",
- "rate", G_TYPE_INT, sample_rate, "channels", G_TYPE_INT, 1,
- "endianness", G_TYPE_INT, G_BYTE_ORDER, "signed", G_TYPE_BOOLEAN, TRUE,
- "width", G_TYPE_INT, 16, "depth", G_TYPE_INT, depth, NULL);
- gst_buffer_set_caps (buf, caps);
+ caps = gst_caps_new_simple ("audio/x-raw",
+ "format", G_TYPE_STRING, GST_AUDIO_NE (S16),
+ "layout", G_TYPE_STRING, "interleaved",
+ "rate", G_TYPE_INT, sample_rate, "channels", G_TYPE_INT, 1, NULL);
+ gst_pad_set_caps (mysrcpad, caps);
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (buf, "buf", 1);
gint16 value_l, gint16 value_r)
{
GstBuffer *buf = gst_buffer_new_and_alloc (n_frames * sizeof (gint16) * 2);
- gint16 *data = (gint16 *) GST_BUFFER_DATA (buf);
+ gint16 *data;
+ GstMapInfo map;
GstCaps *caps;
gint i;
+ gst_buffer_map (buf, &map, GST_MAP_WRITE);
+ data = (gint16 *) map.data;
for (i = n_frames; i--;) {
*data++ = value_l;
*data++ = value_r;
}
+ gst_buffer_unmap (buf, &map);
- caps = gst_caps_new_simple ("audio/x-raw-int",
+ caps = gst_caps_new_simple ("audio/x-raw",
+ "format", G_TYPE_STRING, GST_AUDIO_NE (S16),
+ "layout", G_TYPE_STRING, "interleaved",
"rate", G_TYPE_INT, sample_rate, "channels", G_TYPE_INT, 2,
- "endianness", G_TYPE_INT, G_BYTE_ORDER, "signed", G_TYPE_BOOLEAN, TRUE,
- "width", G_TYPE_INT, 16, "depth", G_TYPE_INT, depth, NULL);
- gst_buffer_set_caps (buf, caps);
+ "channel-mask", GST_TYPE_BITMASK, (gint64) 0x3, NULL);
+ gst_pad_set_caps (mysrcpad, caps);
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (buf, "buf", 1);
gsize n_frames, gfloat value)
{
GstBuffer *buf = gst_buffer_new_and_alloc (n_frames * sizeof (gfloat));
- gfloat *data = (gfloat *) GST_BUFFER_DATA (buf);
+ gfloat *data;
+ GstMapInfo map;
GstCaps *caps;
gint i;
+ gst_buffer_map (buf, &map, GST_MAP_WRITE);
+ data = (gfloat *) map.data;
for (i = n_frames; i--;) {
*accumulator += 1;
*accumulator %= 96;
else
*data++ = -value;
}
+ gst_buffer_unmap (buf, &map);
- caps = gst_caps_new_simple ("audio/x-raw-float",
- "rate", G_TYPE_INT, sample_rate, "channels", G_TYPE_INT, 1,
- "endianness", G_TYPE_INT, G_BYTE_ORDER, "width", G_TYPE_INT, 32, NULL);
- gst_buffer_set_caps (buf, caps);
+ caps = gst_caps_new_simple ("audio/x-raw",
+ "format", G_TYPE_STRING, GST_AUDIO_NE (F32),
+ "layout", G_TYPE_STRING, "interleaved",
+ "rate", G_TYPE_INT, sample_rate, "channels", G_TYPE_INT, 1, NULL);
+ gst_pad_set_caps (mysrcpad, caps);
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (buf, "buf", 1);
gsize n_frames, gfloat value_l, gfloat value_r)
{
GstBuffer *buf = gst_buffer_new_and_alloc (n_frames * sizeof (gfloat) * 2);
- gfloat *data = (gfloat *) GST_BUFFER_DATA (buf);
+ gfloat *data;
+ GstMapInfo map;
GstCaps *caps;
gint i;
+ gst_buffer_map (buf, &map, GST_MAP_WRITE);
+ data = (gfloat *) map.data;
for (i = n_frames; i--;) {
*accumulator += 1;
*accumulator %= 96;
*data++ = -value_r;
}
}
+ gst_buffer_unmap (buf, &map);
- caps = gst_caps_new_simple ("audio/x-raw-float",
+ caps = gst_caps_new_simple ("audio/x-raw",
+ "format", G_TYPE_STRING, GST_AUDIO_NE (F32),
+ "layout", G_TYPE_STRING, "interleaved",
"rate", G_TYPE_INT, sample_rate, "channels", G_TYPE_INT, 2,
- "endianness", G_TYPE_INT, G_BYTE_ORDER, "width", G_TYPE_INT, 32, NULL);
- gst_buffer_set_caps (buf, caps);
+ "channel-mask", GST_TYPE_BITMASK, (gint64) 0x3, NULL);
+ gst_pad_set_caps (mysrcpad, caps);
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (buf, "buf", 1);
gint depth, gsize n_frames, gint16 value)
{
GstBuffer *buf = gst_buffer_new_and_alloc (n_frames * sizeof (gint16));
- gint16 *data = (gint16 *) GST_BUFFER_DATA (buf);
+ gint16 *data;
+ GstMapInfo map;
GstCaps *caps;
gint i;
+ gst_buffer_map (buf, &map, GST_MAP_WRITE);
+ data = (gint16 *) map.data;
for (i = n_frames; i--;) {
*accumulator += 1;
*accumulator %= 96;
else
*data++ = -MAX (value, -32767);
}
+ gst_buffer_unmap (buf, &map);
- caps = gst_caps_new_simple ("audio/x-raw-int",
- "rate", G_TYPE_INT, sample_rate, "channels", G_TYPE_INT, 1,
- "endianness", G_TYPE_INT, G_BYTE_ORDER, "signed", G_TYPE_BOOLEAN, TRUE,
- "width", G_TYPE_INT, 16, "depth", G_TYPE_INT, depth, NULL);
- gst_buffer_set_caps (buf, caps);
+ caps = gst_caps_new_simple ("audio/x-raw",
+ "format", G_TYPE_STRING, GST_AUDIO_NE (S16),
+ "layout", G_TYPE_STRING, "interleaved",
+ "rate", G_TYPE_INT, sample_rate, "channels", G_TYPE_INT, 1, NULL);
+ gst_pad_set_caps (mysrcpad, caps);
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (buf, "buf", 1);
gint depth, gsize n_frames, gint16 value_l, gint16 value_r)
{
GstBuffer *buf = gst_buffer_new_and_alloc (n_frames * sizeof (gint16) * 2);
- gint16 *data = (gint16 *) GST_BUFFER_DATA (buf);
+ gint16 *data;
+ GstMapInfo map;
GstCaps *caps;
gint i;
+ gst_buffer_map (buf, &map, GST_MAP_WRITE);
+ data = (gint16 *) map.data;
for (i = n_frames; i--;) {
*accumulator += 1;
*accumulator %= 96;
*data++ = -MAX (value_r, -32767);
}
}
+ gst_buffer_unmap (buf, &map);
- caps = gst_caps_new_simple ("audio/x-raw-int",
+ caps = gst_caps_new_simple ("audio/x-raw",
+ "format", G_TYPE_STRING, GST_AUDIO_NE (S16),
+ "layout", G_TYPE_STRING, "interleaved",
"rate", G_TYPE_INT, sample_rate, "channels", G_TYPE_INT, 2,
- "endianness", G_TYPE_INT, G_BYTE_ORDER, "signed", G_TYPE_BOOLEAN, TRUE,
- "width", G_TYPE_INT, 16, "depth", G_TYPE_INT, depth, NULL);
- gst_buffer_set_caps (buf, caps);
+ "channel-mask", GST_TYPE_BITMASK, (gint64) 0x3, NULL);
+ gst_pad_set_caps (mysrcpad, caps);
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (buf, "buf", 1);
g_object_set (element, "forced", FALSE, NULL);
set_playing_state (element);
- tag_list = gst_tag_list_new ();
+ tag_list = gst_tag_list_new_empty ();
/* Provided values are totally arbitrary. */
gst_tag_list_add (tag_list, GST_TAG_MERGE_APPEND,
GST_TAG_TRACK_PEAK, 1.0, GST_TAG_TRACK_GAIN, 2.21, NULL);
g_object_set (element, "forced", FALSE, NULL);
set_playing_state (element);
- tag_list = gst_tag_list_new ();
+ tag_list = gst_tag_list_new_empty ();
gst_tag_list_add (tag_list, GST_TAG_MERGE_APPEND, GST_TAG_TRACK_GAIN, 2.21,
NULL);
send_tag_event (element, tag_list);
- tag_list = gst_tag_list_new ();
+ tag_list = gst_tag_list_new_empty ();
gst_tag_list_add (tag_list, GST_TAG_MERGE_APPEND, GST_TAG_TRACK_PEAK, 1.0,
NULL);
send_tag_event (element, tag_list);
for (i = 20; i--;)
push_buffer (test_buffer_const_float_stereo (8000, 512, 0.5, 0.5));
- tag_list = gst_tag_list_new ();
+ tag_list = gst_tag_list_new_empty ();
gst_tag_list_add (tag_list, GST_TAG_MERGE_APPEND,
GST_TAG_TRACK_PEAK, 1.0, GST_TAG_TRACK_GAIN, 2.21, NULL);
send_tag_event (element, tag_list);
g_object_set (element, "forced", FALSE, NULL);
set_playing_state (element);
- tag_list = gst_tag_list_new ();
+ tag_list = gst_tag_list_new_empty ();
/* Provided values are totally arbitrary. */
gst_tag_list_add (tag_list, GST_TAG_MERGE_APPEND,
GST_TAG_TRACK_PEAK, 1.0, GST_TAG_TRACK_GAIN, 2.21, NULL);
g_object_set (element, "forced", FALSE, "num-tracks", 2, NULL);
set_playing_state (element);
- tag_list = gst_tag_list_new ();
+ tag_list = gst_tag_list_new_empty ();
/* Provided values are totally arbitrary. */
gst_tag_list_add (tag_list, GST_TAG_MERGE_APPEND,
GST_TAG_TRACK_PEAK, 0.75, GST_TAG_TRACK_GAIN, 2.21,
/* The second track has indeed full tags, but although being not forced, this
* one has to be processed because album processing is on. */
- tag_list = gst_tag_list_new ();
+ tag_list = gst_tag_list_new_empty ();
/* Provided values are totally arbitrary. */
gst_tag_list_add (tag_list, GST_TAG_MERGE_APPEND,
GST_TAG_TRACK_PEAK, 0.75, GST_TAG_TRACK_GAIN, 2.21,
g_object_set (element, "num-tracks", 0, NULL);
/* Processing a track that has to be skipped. */
- tag_list = gst_tag_list_new ();
+ tag_list = gst_tag_list_new_empty ();
/* Provided values are totally arbitrary. */
gst_tag_list_add (tag_list, GST_TAG_MERGE_APPEND,
GST_TAG_TRACK_PEAK, 0.75, GST_TAG_TRACK_GAIN, 2.21,
*/
#include <gst/check/gstcheck.h>
+#include <gst/audio/audio.h>
#include <math.h>
* get_peer, and then remove references in every test function */
static GstPad *mysrcpad, *mysinkpad;
-#define RG_LIMITER_CAPS_TEMPLATE_STRING \
- "audio/x-raw-float, " \
- "width = (int) 32, " \
- "endianness = (int) BYTE_ORDER, " \
- "channels = (int) [ 1, MAX ], " \
+#define RG_LIMITER_CAPS_TEMPLATE_STRING \
+ "audio/x-raw, " \
+ "format = (string) "GST_AUDIO_NE (F32) ", " \
+ "layout = (string) interleaved, " \
+ "channels = (int) [ 1, MAX ], " \
"rate = (int) [ 1, MAX ]"
static GstStaticPadTemplate sinktemplate = GST_STATIC_PAD_TEMPLATE ("sink",
GST_DEBUG ("setup_rglimiter");
element = gst_check_setup_element ("rglimiter");
- mysrcpad = gst_check_setup_src_pad (element, &srctemplate, NULL);
- mysinkpad = gst_check_setup_sink_pad (element, &sinktemplate, NULL);
+ mysrcpad = gst_check_setup_src_pad (element, &srctemplate);
+ mysinkpad = gst_check_setup_sink_pad (element, &sinktemplate);
gst_pad_set_active (mysrcpad, TRUE);
gst_pad_set_active (mysinkpad, TRUE);
GstBuffer *buf = gst_buffer_new_and_alloc (sizeof (test_input));
GstCaps *caps;
- memcpy (GST_BUFFER_DATA (buf), test_input, sizeof (test_input));
+ gst_buffer_fill (buf, 0, test_input, sizeof (test_input));
- caps = gst_caps_new_simple ("audio/x-raw-float",
+ caps = gst_caps_new_simple ("audio/x-raw",
"rate", G_TYPE_INT, 44100, "channels", G_TYPE_INT, 1,
- "endianness", G_TYPE_INT, G_BYTE_ORDER, "width", G_TYPE_INT, 32, NULL);
- gst_buffer_set_caps (buf, caps);
+ "format", G_TYPE_STRING, GST_AUDIO_NE (F32),
+ "layout", G_TYPE_STRING, "interleaved", NULL);
+ gst_pad_set_caps (mysrcpad, caps);
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (buf, "buf", 1);
static void
verify_test_buffer (GstBuffer * buf)
{
- gfloat *output = (gfloat *) GST_BUFFER_DATA (buf);
+ GstMapInfo map;
+ gfloat *output;
gint i;
- fail_unless (GST_BUFFER_SIZE (buf) == sizeof (test_output));
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ output = (gfloat *) map.data;
+ fail_unless (map.size == sizeof (test_output));
+
for (i = 0; i < G_N_ELEMENTS (test_input); i++)
fail_unless (ABS (output[i] - test_output[i]) < 1.e-6,
"Incorrect output value %.6f for input %.2f, expected %.6f",
output[i], test_input[i], test_output[i]);
+
+ gst_buffer_unmap (buf, &map);
}
/* Start of tests. */
{
GstElement *element = setup_rglimiter ();
GstBuffer *buf, *out_buf;
+ GstMapInfo m1, m2;
set_playing_state (element);
/* Verify that the baseclass does not lift the GAP flag: */
fail_unless (GST_BUFFER_FLAG_IS_SET (out_buf, GST_BUFFER_FLAG_GAP));
- g_assert (GST_BUFFER_SIZE (out_buf) == GST_BUFFER_SIZE (buf));
+ gst_buffer_map (out_buf, &m1, GST_MAP_READ);
+ gst_buffer_map (buf, &m2, GST_MAP_READ);
+
+ g_assert (m1.size == m2.size);
/* We cheated by passing an input buffer with non-silence that has the GAP
* flag set. The element cannot know that however and must have skipped
* adjusting the buffer because of the flag, which we can easily verify: */
- fail_if (memcmp (GST_BUFFER_DATA (out_buf),
- GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (out_buf)) != 0);
+ fail_if (memcmp (m1.data, m2.data, m1.size) != 0);
+
+ gst_buffer_unmap (out_buf, &m1);
+ gst_buffer_unmap (buf, &m2);
cleanup_rglimiter (element);
}
*/
#include <gst/check/gstcheck.h>
+#include <gst/audio/audio.h>
#include <math.h>
* then remove references in every test function */
static GstPad *mysrcpad, *mysinkpad;
-#define RG_VOLUME_CAPS_TEMPLATE_STRING \
- "audio/x-raw-float, " \
- "width = (int) 32, " \
- "endianness = (int) BYTE_ORDER, " \
- "channels = (int) [ 1, MAX ], " \
+#define RG_VOLUME_CAPS_TEMPLATE_STRING \
+ "audio/x-raw, " \
+ "format = (string) "GST_AUDIO_NE (F32) ", " \
+ "layout = (string) interleaved, " \
+ "channels = (int) [ 1, MAX ], " \
"rate = (int) [ 1, MAX ]"
static GstStaticPadTemplate sinktemplate = GST_STATIC_PAD_TEMPLATE ("sink",
/* gstcheck sets up a chain function that appends buffers to a global list.
* This is our equivalent of that for event handling. */
static gboolean
-event_func (GstPad * pad, GstEvent * event)
+event_func (GstPad * pad, GstObject * parent, GstEvent * event)
{
+ GST_DEBUG ("received event %p", event);
events = g_list_append (events, event);
return TRUE;
setup_rgvolume (void)
{
GstElement *element;
+ GstCaps *caps;
GST_DEBUG ("setup_rgvolume");
element = gst_check_setup_element ("rgvolume");
- mysrcpad = gst_check_setup_src_pad (element, &srctemplate, NULL);
- mysinkpad = gst_check_setup_sink_pad (element, &sinktemplate, NULL);
+ mysrcpad = gst_check_setup_src_pad (element, &srctemplate);
+ mysinkpad = gst_check_setup_sink_pad (element, &sinktemplate);
/* Capture events, to test tag filtering behavior: */
gst_pad_set_event_function (mysinkpad, event_func);
gst_pad_set_active (mysrcpad, TRUE);
gst_pad_set_active (mysinkpad, TRUE);
+ caps = gst_caps_from_string ("audio/x-raw, format = F32LE, "
+ "layout = interleaved, rate = 8000, channels = 1");
+ gst_pad_set_caps (mysrcpad, caps);
+ gst_caps_unref (caps);
+
return element;
}
{
GstBuffer *buf;
GstEvent *ev;
+ GstSegment segment;
fail_unless (g_list_length (events) == 0);
- ev = gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME, 0, -1, 0);
+ gst_segment_init (&segment, GST_FORMAT_TIME);
+ ev = gst_event_new_segment (&segment);
fail_unless (gst_pad_push_event (mysrcpad, ev),
"Pushing newsegment event failed");
+ /* makes caps event */
buf = test_buffer_new (0.0);
- GST_BUFFER_SIZE (buf) = 0;
+ gst_buffer_resize (buf, 0, 0);
GST_BUFFER_DURATION (buf) = 0;
GST_BUFFER_OFFSET_END (buf) = GST_BUFFER_OFFSET (buf);
fail_unless (gst_pad_push (mysrcpad, buf) == GST_FLOW_OK);
- fail_unless (g_list_length (events) == 1);
+ fail_unless (g_list_length (events) == 2);
fail_unless (events->data == ev);
gst_mini_object_unref ((GstMiniObject *) events->data);
events = g_list_remove (events, ev);
+ ev = events->data;
+ gst_mini_object_unref ((GstMiniObject *) ev);
+ events = g_list_remove (events, ev);
+ fail_unless (g_list_length (events) == 0);
fail_unless (g_list_length (buffers) == 1);
fail_unless (buffers->data == buf);
{
GstEvent *event = gst_event_new_eos ();
+ GST_DEBUG ("events : %d", g_list_length (events));
+
fail_unless (g_list_length (events) == 0);
fail_unless (gst_pad_push_event (mysrcpad, event),
"Pushing EOS event failed");
test_buffer_new (gfloat value)
{
GstBuffer *buf;
- GstCaps *caps;
+ GstMapInfo map;
gfloat *data;
gint i;
buf = gst_buffer_new_and_alloc (8 * sizeof (gfloat));
- data = (gfloat *) GST_BUFFER_DATA (buf);
+ gst_buffer_map (buf, &map, GST_MAP_WRITE);
+ data = (gfloat *) map.data;
for (i = 0; i < 8; i++)
data[i] = value;
+ gst_buffer_unmap (buf, &map);
- caps = gst_caps_from_string ("audio/x-raw-float, "
- "rate = 8000, channels = 1, endianness = BYTE_ORDER, width = 32");
- gst_buffer_set_caps (buf, caps);
- gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (buf, "buf", 1);
fail_unless_result_gain (GstElement * element, gdouble expected_gain)
{
GstBuffer *input_buf, *output_buf;
+ gfloat *data;
gfloat input_sample, output_sample;
gdouble gain, prop_gain;
gboolean is_passthrough, expect_passthrough;
gint i;
+ GstMapInfo map;
fail_unless (g_list_length (buffers) == 0);
buffers = g_list_remove (buffers, output_buf);
ASSERT_BUFFER_REFCOUNT (output_buf, "output_buf", 1);
- fail_unless_equals_int (GST_BUFFER_SIZE (output_buf), 8 * sizeof (gfloat));
- output_sample = *((gfloat *) GST_BUFFER_DATA (output_buf));
+ fail_unless_equals_int (gst_buffer_get_size (output_buf),
+ 8 * sizeof (gfloat));
+
+ gst_buffer_map (output_buf, &map, GST_MAP_READ);
+ data = (gfloat *) map.data;
+ output_sample = *data;
fail_if (output_sample == 0.0, "First output sample is zero");
for (i = 1; i < 8; i++) {
- gfloat output = ((gfloat *) GST_BUFFER_DATA (output_buf))[i];
-
- fail_unless (output_sample == output, "Output samples not uniform");
+ fail_unless (output_sample == data[i], "Output samples not uniform");
};
+ gst_buffer_unmap (output_buf, &map);
gain = 20. * log10 (output_sample / input_sample);
fail_unless (MATCH_GAIN (gain, expected_gain),
send_newsegment_and_empty_buffer ();
- tag_list = gst_tag_list_new ();
+ tag_list = gst_tag_list_new_empty ();
gst_tag_list_add (tag_list, GST_TAG_MERGE_REPLACE,
GST_TAG_TRACK_GAIN, +4.95, GST_TAG_TRACK_PEAK, 0.59463,
GST_TAG_ALBUM_GAIN, -1.54, GST_TAG_ALBUM_PEAK, 0.693415,
GST_TAG_ARTIST, "Foobar", NULL);
event = gst_event_new_tag (tag_list);
new_event = send_tag_event (element, event);
- /* Expect the element to modify the writable event. */
- fail_unless (event == new_event, "Writable tag event not reused");
gst_event_parse_tag (new_event, &tag_list);
fail_unless (gst_tag_list_get_string (tag_list, GST_TAG_ARTIST, &artist));
fail_unless (g_str_equal (artist, "Foobar"));
/* Same as above, but with a non-writable event. */
- tag_list = gst_tag_list_new ();
+ tag_list = gst_tag_list_new_empty ();
gst_tag_list_add (tag_list, GST_TAG_MERGE_REPLACE,
GST_TAG_TRACK_GAIN, +4.95, GST_TAG_TRACK_PEAK, 0.59463,
GST_TAG_ALBUM_GAIN, -1.54, GST_TAG_ALBUM_PEAK, 0.693415,
GST_TAG_ARTIST, "Foobar", NULL);
event = gst_event_new_tag (tag_list);
- /* Holding an extra ref makes the event unwritable: */
- gst_event_ref (event);
new_event = send_tag_event (element, event);
- fail_unless (event != new_event, "Unwritable tag event reused");
gst_event_parse_tag (new_event, &tag_list);
fail_unless (gst_tag_list_get_string (tag_list, GST_TAG_ARTIST, &artist));
fail_unless (g_str_equal (artist, "Foobar"));
g_free (artist);
- gst_event_unref (event);
gst_event_unref (new_event);
cleanup_rgvolume (element);
send_newsegment_and_empty_buffer ();
- tag_list = gst_tag_list_new ();
+ tag_list = gst_tag_list_new_empty ();
gst_tag_list_add (tag_list, GST_TAG_MERGE_REPLACE,
GST_TAG_TRACK_GAIN, -3.45, GST_TAG_TRACK_PEAK, 1.0,
GST_TAG_ALBUM_GAIN, +2.09, GST_TAG_ALBUM_PEAK, 1.0, NULL);
g_object_set (element, "album-mode", TRUE, NULL);
- tag_list = gst_tag_list_new ();
+ tag_list = gst_tag_list_new_empty ();
gst_tag_list_add (tag_list, GST_TAG_MERGE_REPLACE,
GST_TAG_TRACK_GAIN, -3.45, GST_TAG_TRACK_PEAK, 1.0,
GST_TAG_ALBUM_GAIN, +2.09, GST_TAG_ALBUM_PEAK, 1.0, NULL);
send_newsegment_and_empty_buffer ();
- tag_list = gst_tag_list_new ();
+ tag_list = gst_tag_list_new_empty ();
gst_tag_list_add (tag_list, GST_TAG_MERGE_REPLACE,
GST_TAG_TRACK_GAIN, +3.5, GST_TAG_TRACK_PEAK, 1.0,
GST_TAG_ALBUM_GAIN, -0.5, GST_TAG_ALBUM_PEAK, 1.0, NULL);
send_newsegment_and_empty_buffer ();
- tag_list = gst_tag_list_new ();
+ tag_list = gst_tag_list_new_empty ();
gst_tag_list_add (tag_list, GST_TAG_MERGE_REPLACE,
GST_TAG_TRACK_GAIN, +2.11, GST_TAG_TRACK_PEAK, 1.0, NULL);
fail_unless (send_tag_event (element, gst_event_new_tag (tag_list)) == NULL);
send_newsegment_and_empty_buffer ();
- tag_list = gst_tag_list_new ();
+ tag_list = gst_tag_list_new_empty ();
gst_tag_list_add (tag_list, GST_TAG_MERGE_REPLACE,
GST_TAG_ALBUM_GAIN, +3.73, GST_TAG_ALBUM_PEAK, 1.0, NULL);
fail_unless (send_tag_event (element, gst_event_new_tag (tag_list)) == NULL);
send_newsegment_and_empty_buffer ();
- tag_list = gst_tag_list_new ();
+ tag_list = gst_tag_list_new_empty ();
gst_tag_list_add (tag_list, GST_TAG_MERGE_REPLACE,
GST_TAG_TRACK_GAIN, +3.50, GST_TAG_TRACK_PEAK, 1.0, NULL);
fail_unless (send_tag_event (element, gst_event_new_tag (tag_list)) == NULL);
send_eos_event (element);
g_object_set (element, "headroom", +2.00, NULL);
- tag_list = gst_tag_list_new ();
+ tag_list = gst_tag_list_new_empty ();
gst_tag_list_add (tag_list, GST_TAG_MERGE_REPLACE,
GST_TAG_TRACK_GAIN, +9.18, GST_TAG_TRACK_PEAK, 0.687149, NULL);
fail_unless (send_tag_event (element, gst_event_new_tag (tag_list)) == NULL);
send_eos_event (element);
g_object_set (element, "album-mode", TRUE, NULL);
- tag_list = gst_tag_list_new ();
+ tag_list = gst_tag_list_new_empty ();
gst_tag_list_add (tag_list, GST_TAG_MERGE_REPLACE,
GST_TAG_ALBUM_GAIN, +5.50, GST_TAG_ALBUM_PEAK, 1.0, NULL);
fail_unless (send_tag_event (element, gst_event_new_tag (tag_list)) == NULL);
send_newsegment_and_empty_buffer ();
- tag_list = gst_tag_list_new ();
+ tag_list = gst_tag_list_new_empty ();
gst_tag_list_add (tag_list, GST_TAG_MERGE_REPLACE,
GST_TAG_TRACK_GAIN, 0.00, GST_TAG_TRACK_PEAK, 0.2,
GST_TAG_REFERENCE_LEVEL, 83., NULL);
/* Same as above, but with album gain. */
- tag_list = gst_tag_list_new ();
+ tag_list = gst_tag_list_new_empty ();
gst_tag_list_add (tag_list, GST_TAG_MERGE_REPLACE,
GST_TAG_TRACK_GAIN, 1.23, GST_TAG_TRACK_PEAK, 0.1,
GST_TAG_ALBUM_GAIN, 0.00, GST_TAG_ALBUM_PEAK, 0.2,
* Chain list function for testing buffer lists
*/
static GstFlowReturn
-rtp_pipeline_chain_list (GstPad * pad, GstBufferList * list)
+rtp_pipeline_chain_list (GstPad * pad, GstObject * parent, GstBufferList * list)
{
- GstBufferListIterator *it;
+ guint i, len;
fail_if (!list);
- it = gst_buffer_list_iterate (list);
-
/*
* Count the size of the payload in the buffer list.
*/
+ len = gst_buffer_list_length (list);
/* Loop through all groups */
- while (gst_buffer_list_iterator_next_group (it)) {
+ for (i = 0; i < len; i++) {
GstBuffer *paybuf;
- /* Skip the first buffer in the group, its the RTP header */
- fail_if (!gst_buffer_list_iterator_next (it));
-
+ /* FIXME need to discard RTP header */
+ paybuf = gst_buffer_list_get (list, i);
/* Loop through all payload buffers in the current group */
- while ((paybuf = gst_buffer_list_iterator_next (it))) {
- chain_list_bytes_received += GST_BUFFER_SIZE (paybuf);
- }
+ chain_list_bytes_received += gst_buffer_get_size (paybuf);
}
-
- gst_buffer_list_iterator_free (it);
gst_buffer_list_unref (list);
return GST_FLOW_OK;
for (j = 0; j < p->frame_count; j++) {
GstBuffer *buf;
- buf = gst_buffer_new ();
- GST_BUFFER_DATA (buf) = (guint8 *) data;
- GST_BUFFER_SIZE (buf) = p->frame_data_size;
- GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_READONLY);
+ buf =
+ gst_buffer_new_wrapped_full ((guint8 *) data, NULL, 0,
+ p->frame_data_size);
g_signal_emit_by_name (p->appsrc, "push-buffer", buf, &flow_ret);
fail_unless_equals_int (flow_ret, GST_FLOW_OK);
{
rtp_pipeline_test (rtp_L16_frame_data, rtp_L16_frame_data_size,
rtp_L16_frame_count,
- "audio/x-raw-int,endianess=4321,signed=true,width=16,depth=16,rate=1,channels=1",
+ "audio/x-raw,format=S16_BE,rate=1,channels=1",
"rtpL16pay", "rtpL16depay", 0, 0, FALSE);
}
GObject *session;
gint count = 2;
- rtpbin = gst_element_factory_make ("gstrtpbin", "rtpbin");
+ rtpbin = gst_element_factory_make ("rtpbin", "rtpbin");
while (count--) {
/* request session 0 */
0x2b, 0x82, 0x31, 0x3b, 0x36, 0xc1, 0x3c, 0x13
};
-static GstBuffer *
-make_rtp_packet (CleanupData * data)
+static GstFlowReturn
+chain_rtp_packet (GstPad * pad, CleanupData * data)
{
+ GstFlowReturn res;
static GstCaps *caps = NULL;
- GstBuffer *result;
- guint8 *datap;
+ GstBuffer *buffer;
+ GstMapInfo map;
if (caps == NULL) {
caps = gst_caps_from_string ("application/x-rtp,"
"media=(string)audio, clock-rate=(int)44100, "
"encoding-name=(string)L16, encoding-params=(string)1, channels=(int)1");
data->seqnum = 0;
+ gst_pad_set_caps (pad, caps);
}
- result = gst_buffer_new_and_alloc (sizeof (rtp_packet));
- datap = GST_BUFFER_DATA (result);
- memcpy (datap, rtp_packet, sizeof (rtp_packet));
+ buffer = gst_buffer_new_and_alloc (sizeof (rtp_packet));
+ gst_buffer_map (buffer, &map, GST_MAP_WRITE);
+ memcpy (map.data, rtp_packet, sizeof (rtp_packet));
- datap[2] = (data->seqnum >> 8) & 0xff;
- datap[3] = data->seqnum & 0xff;
+ map.data[2] = (data->seqnum >> 8) & 0xff;
+ map.data[3] = data->seqnum & 0xff;
data->seqnum++;
+ gst_buffer_unmap (buffer, &map);
- gst_buffer_set_caps (result, caps);
+ res = gst_pad_chain (pad, buffer);
- return result;
+ return res;
}
static GstFlowReturn
-dummy_chain (GstPad * pad, GstBuffer * buffer)
+dummy_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer)
{
gst_buffer_unref (buffer);
CleanupData data;
GstStateChangeReturn ret;
GstFlowReturn res;
- GstBuffer *buffer;
gint count = 2;
init_data (&data);
- rtpbin = gst_element_factory_make ("gstrtpbin", "rtpbin");
+ rtpbin = gst_element_factory_make ("rtpbin", "rtpbin");
g_signal_connect (rtpbin, "pad-added", (GCallback) pad_added_cb, &data);
g_signal_connect (rtpbin, "pad-removed", (GCallback) pad_removed_cb, &data);
fail_unless (rtpbin->numsinkpads == 1);
fail_unless (rtpbin->numsrcpads == 0);
- buffer = make_rtp_packet (&data);
- res = gst_pad_chain (rtp_sink, buffer);
+ res = chain_rtp_packet (rtp_sink, &data);
GST_DEBUG ("res %d, %s\n", res, gst_flow_get_name (res));
fail_unless (res == GST_FLOW_OK);
- buffer = make_rtp_packet (&data);
- res = gst_pad_chain (rtp_sink, buffer);
+ res = chain_rtp_packet (rtp_sink, &data);
GST_DEBUG ("res %d, %s\n", res, gst_flow_get_name (res));
fail_unless (res == GST_FLOW_OK);
CleanupData data;
GstStateChangeReturn ret;
GstFlowReturn res;
- GstBuffer *buffer;
gint count = 2;
init_data (&data);
- rtpbin = gst_element_factory_make ("gstrtpbin", "rtpbin");
+ rtpbin = gst_element_factory_make ("rtpbin", "rtpbin");
g_signal_connect (rtpbin, "pad-added", (GCallback) pad_added_cb, &data);
g_signal_connect (rtpbin, "pad-removed", (GCallback) pad_removed_cb, &data);
fail_unless (rtpbin->numsinkpads == 1);
fail_unless (rtpbin->numsrcpads == 0);
- buffer = make_rtp_packet (&data);
- res = gst_pad_chain (rtp_sink, buffer);
+ res = chain_rtp_packet (rtp_sink, &data);
GST_DEBUG ("res %d, %s\n", res, gst_flow_get_name (res));
fail_unless (res == GST_FLOW_OK);
- buffer = make_rtp_packet (&data);
- res = gst_pad_chain (rtp_sink, buffer);
+ res = chain_rtp_packet (rtp_sink, &data);
GST_DEBUG ("res %d, %s\n", res, gst_flow_get_name (res));
fail_unless (res == GST_FLOW_OK);
GstElement *rtpbin;
GstPad *rtp_sink1, *rtp_sink2, *rtp_sink3;
- rtpbin = gst_element_factory_make ("gstrtpbin", "rtpbin");
- rtp_sink1 = gst_element_get_request_pad (rtpbin, "recv_rtp_sink_%d");
+ rtpbin = gst_element_factory_make ("rtpbin", "rtpbin");
+ rtp_sink1 = gst_element_get_request_pad (rtpbin, "recv_rtp_sink_%u");
fail_unless (rtp_sink1 != NULL);
fail_unless_equals_string (GST_PAD_NAME (rtp_sink1), "recv_rtp_sink_0");
ASSERT_OBJECT_REFCOUNT (rtp_sink1, "rtp_sink1", 2);
- rtp_sink2 = gst_element_get_request_pad (rtpbin, "recv_rtp_sink_%d");
+ rtp_sink2 = gst_element_get_request_pad (rtpbin, "recv_rtp_sink_%u");
fail_unless (rtp_sink2 != NULL);
fail_unless_equals_string (GST_PAD_NAME (rtp_sink2), "recv_rtp_sink_1");
ASSERT_OBJECT_REFCOUNT (rtp_sink2, "rtp_sink2", 2);
- rtp_sink3 = gst_element_get_request_pad (rtpbin, "recv_rtp_sink_%d");
+ rtp_sink3 = gst_element_get_request_pad (rtpbin, "recv_rtp_sink_%u");
fail_unless (rtp_sink3 != NULL);
fail_unless_equals_string (GST_PAD_NAME (rtp_sink3), "recv_rtp_sink_2");
ASSERT_OBJECT_REFCOUNT (rtp_sink3, "rtp_sink3", 2);
static Suite *
gstrtpbin_suite (void)
{
- Suite *s = suite_create ("gstrtpbin");
+ Suite *s = suite_create ("rtpbin");
TCase *tc_chain = tcase_create ("general");
suite_add_tcase (s, tc_chain);
#include <gst/rtp/gstrtpbuffer.h>
+#if 0
/* This test makes sure that RTP packets sent as buffer lists are sent through
* the rtpbin as they are supposed to, and not corrupted in any way.
GST_END_TEST;
+#endif
static Suite *
tcase_set_timeout (tc_chain, 10);
suite_add_tcase (s, tc_chain);
+#if 0
tcase_add_test (tc_chain, test_bufferlist);
+#endif
return s;
}
);
static void
-buffer_dropped (gpointer mem)
+buffer_dropped (gpointer data, GstMiniObject * obj)
{
- if (mem) {
- GST_DEBUG ("dropping buffer: data=%p", mem);
- g_free (mem);
- num_dropped++;
- }
+ GST_DEBUG ("dropping buffer %p", obj);
+ num_dropped++;
}
static GstElement *
GstBuffer *buffer;
GstCaps *caps;
/* a 20 sample audio block (2,5 ms) generated with
- * gst-launch audiotestsrc wave=silence blocksize=40 num-buffers=3 !
- * "audio/x-raw-int,channels=1,rate=8000" ! mulawenc ! rtppcmupay !
+ * gst-launch audiotestsrc wave=silence blocksize=40 num-buffers=3 !
+ * "audio/x-raw,channels=1,rate=8000" ! mulawenc ! rtppcmupay !
* fakesink dump=1
*/
guint8 in[] = { /* first 4 bytes are rtp-header, next 4 bytes are timestamp */
gint i;
GST_DEBUG ("setup_jitterbuffer");
- jitterbuffer = gst_check_setup_element ("gstrtpjitterbuffer");
+ jitterbuffer = gst_check_setup_element ("rtpjitterbuffer");
/* we need a clock here */
clock = gst_system_clock_obtain ();
gst_element_set_clock (jitterbuffer, clock);
GST_INFO_OBJECT (jitterbuffer, "set latency to %u ms", latency);
*/
- mysrcpad = gst_check_setup_src_pad (jitterbuffer, &srctemplate, NULL);
- mysinkpad = gst_check_setup_sink_pad (jitterbuffer, &sinktemplate, NULL);
+ mysrcpad = gst_check_setup_src_pad (jitterbuffer, &srctemplate);
+ mysinkpad = gst_check_setup_sink_pad (jitterbuffer, &sinktemplate);
gst_pad_set_active (mysrcpad, TRUE);
gst_pad_set_active (mysinkpad, TRUE);
/* create n buffers */
caps = gst_caps_from_string (RTP_CAPS_STRING);
+ gst_pad_set_caps (mysrcpad, caps);
+ gst_caps_unref (caps);
+
for (i = 0; i < num_buffers; i++) {
buffer = gst_buffer_new_and_alloc (sizeof (in));
- memcpy (GST_BUFFER_DATA (buffer), in, sizeof (in));
- gst_buffer_set_caps (buffer, caps);
+ gst_buffer_fill (buffer, 0, in, sizeof (in));
GST_BUFFER_TIMESTAMP (buffer) = ts;
GST_BUFFER_DURATION (buffer) = tso;
- GST_BUFFER_FREE_FUNC (buffer) = buffer_dropped;
- GST_DEBUG ("created buffer: %p, data=%p", buffer, GST_BUFFER_DATA (buffer));
+ gst_mini_object_weak_ref (GST_MINI_OBJECT (buffer), buffer_dropped, NULL);
+ GST_DEBUG ("created buffer: %p", buffer);
if (!i)
GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT);
in[7] += RTP_FRAME_SIZE; /* inc. timestamp with framesize */
ts += tso;
}
- gst_caps_unref (caps);
num_dropped = 0;
return jitterbuffer;
GList *node;
GstClockTime ts = G_GUINT64_CONSTANT (0);
GstClockTime tso = gst_util_uint64_scale (RTP_FRAME_SIZE, GST_SECOND, 8000);
- guint8 *data;
+ GstMapInfo map;
guint16 prev_sn = 0, cur_sn;
guint32 prev_ts = 0, cur_ts;
for (node = buffers; node; node = g_list_next (node)) {
fail_if ((buffer = (GstBuffer *) node->data) == NULL);
fail_if (GST_BUFFER_TIMESTAMP (buffer) != ts);
- data = GST_BUFFER_DATA (buffer);
- cur_sn = ((guint16) data[2] << 8) | data[3];
- cur_ts = ((guint32) data[4] << 24) | ((guint32) data[5] << 16) |
- ((guint32) data[6] << 8) | data[7];
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ cur_sn = ((guint16) map.data[2] << 8) | map.data[3];
+ cur_ts = ((guint32) map.data[4] << 24) | ((guint32) map.data[5] << 16) |
+ ((guint32) map.data[6] << 8) | map.data[7];
+ gst_buffer_unmap (buffer, &map);
if (node != buffers) {
fail_unless (cur_sn > prev_sn);
#define SHAPEWIPE_VIDEO_CAPS_STRING \
- "video/x-raw-yuv, " \
- "format = (GstFourcc)AYUV, " \
+ "video/x-raw, " \
+ "format = (string)AYUV, " \
"width = 400, " \
"height = 400, " \
"framerate = 0/1"
#define SHAPEWIPE_MASK_CAPS_STRING \
- "video/x-raw-gray, " \
- "bpp = 8, " \
- "depth = 8, " \
+ "video/x-raw, " \
+ "format = (string)GRAY8, " \
"width = 400, " \
"height = 400, " \
"framerate = 0/1"
static GstBuffer *output = NULL;
static GstFlowReturn
-on_chain (GstPad * pad, GstBuffer * buffer)
+on_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer)
{
g_return_val_if_fail (output == NULL, GST_FLOW_ERROR);
GstBuffer *mask, *input;
guint i, j;
guint8 *data;
+ GstMapInfo map;
myvideosrcpad =
gst_pad_new_from_static_template (&videosrctemplate, "videosrc");
mask = gst_buffer_new_and_alloc (400 * 400);
caps = gst_caps_from_string (SHAPEWIPE_MASK_CAPS_STRING);
- gst_buffer_set_caps (mask, caps);
+ gst_pad_set_caps (mymasksrcpad, caps);
gst_caps_unref (caps);
- data = GST_BUFFER_DATA (mask);
+ gst_buffer_map (mask, &map, GST_MAP_WRITE);
+ data = map.data;
for (i = 0; i < 400; i++) {
for (j = 0; j < 400; j++) {
if (i < 100 && j < 100)
data++;
}
}
+ gst_buffer_unmap (mask, &map);
fail_unless (gst_pad_push (mymasksrcpad, mask) == GST_FLOW_OK);
input = gst_buffer_new_and_alloc (400 * 400 * 4);
caps = gst_caps_from_string (SHAPEWIPE_VIDEO_CAPS_STRING);
- gst_buffer_set_caps (input, caps);
+ gst_pad_set_caps (myvideosrcpad, caps);
gst_caps_unref (caps);
- data = GST_BUFFER_DATA (input);
+ gst_buffer_map (input, &map, GST_MAP_WRITE);
+ data = map.data;
for (i = 0; i < 400; i++) {
for (j = 0; j < 400; j++) {
/* This is green */
data += 4;
}
}
+ gst_buffer_unmap (input, &map);
g_object_set (G_OBJECT (shapewipe), "position", 0.0, NULL);
output = NULL;
fail_unless (gst_pad_push (myvideosrcpad,
gst_buffer_ref (input)) == GST_FLOW_OK);
fail_unless (output != NULL);
- data = GST_BUFFER_DATA (output);
+ gst_buffer_map (output, &map, GST_MAP_WRITE);
+ data = map.data;
for (i = 0; i < 400; i++) {
for (j = 0; j < 400; j++) {
fail_unless_equals_int (data[0], 255); /* A */
data += 4;
}
}
+ gst_buffer_unmap (output, &map);
gst_buffer_unref (output);
output = NULL;
fail_unless (gst_pad_push (myvideosrcpad,
gst_buffer_ref (input)) == GST_FLOW_OK);
fail_unless (output != NULL);
- data = GST_BUFFER_DATA (output);
+ gst_buffer_map (output, &map, GST_MAP_READ);
+ data = map.data;
for (i = 0; i < 400; i++) {
for (j = 0; j < 400; j++) {
if (i < 100 && j < 100) {
data += 4;
}
}
+ gst_buffer_unmap (output, &map);
gst_buffer_unref (output);
output = NULL;
fail_unless (gst_pad_push (myvideosrcpad,
gst_buffer_ref (input)) == GST_FLOW_OK);
fail_unless (output != NULL);
- data = GST_BUFFER_DATA (output);
+ gst_buffer_map (output, &map, GST_MAP_READ);
+ data = map.data;
for (i = 0; i < 400; i++) {
for (j = 0; j < 400; j++) {
if (i < 200 && j < 200) {
data += 4;
}
}
+ gst_buffer_unmap (output, &map);
gst_buffer_unref (output);
output = NULL;
fail_unless (gst_pad_push (myvideosrcpad,
gst_buffer_ref (input)) == GST_FLOW_OK);
fail_unless (output != NULL);
- data = GST_BUFFER_DATA (output);
+ gst_buffer_map (output, &map, GST_MAP_READ);
+ data = map.data;
for (i = 0; i < 400; i++) {
for (j = 0; j < 400; j++) {
if (i < 300 && j < 300) {
data += 4;
}
}
+ gst_buffer_unmap (output, &map);
gst_buffer_unref (output);
output = NULL;
fail_unless (gst_pad_push (myvideosrcpad,
gst_buffer_ref (input)) == GST_FLOW_OK);
fail_unless (output != NULL);
- data = GST_BUFFER_DATA (output);
+ gst_buffer_map (output, &map, GST_MAP_READ);
+ data = map.data;
for (i = 0; i < 400; i++) {
for (j = 0; j < 400; j++) {
fail_unless_equals_int (data[0], 0); /* A */
data += 4;
}
}
+ gst_buffer_unmap (output, &map);
gst_buffer_unref (output);
output = NULL;
gpointer user_data)
{
GstStructure *s;
+ GstCaps *caps;
/* Caps can be anything if we don't except icy caps */
if (!icy_caps)
return;
/* Otherwise they _must_ be "application/x-icy" */
- fail_unless (GST_BUFFER_CAPS (buf) != NULL);
- s = gst_caps_get_structure (GST_BUFFER_CAPS (buf), 0);
+ caps = gst_pad_get_current_caps (pad);
+ fail_unless (caps != NULL);
+ s = gst_caps_get_structure (caps, 0);
fail_unless_equals_string (gst_structure_get_name (s), "application/x-icy");
+ gst_caps_unref (caps);
}
GST_START_TEST (test_icy_stream)
src = gst_element_factory_make ("souphttpsrc", NULL);
fail_unless (src != NULL);
- g_object_set (src, "iradio-mode", TRUE, NULL);
sink = gst_element_factory_make ("fakesink", NULL);
fail_unless (sink != NULL);
g_type_init ();
-#if !GLIB_CHECK_VERSION (2, 31, 0)
- if (!g_thread_supported ())
- g_thread_init (NULL);
-#endif
-
s = suite_create ("souphttpsrc");
tc_chain = tcase_create ("general");
tc_internet = tcase_create ("internet");
#include <unistd.h>
+#include <gst/audio/audio.h>
#include <gst/check/gstcheck.h>
gboolean have_eos = FALSE;
GstPad *mysrcpad, *mysinkpad;
#define SPECT_CAPS_TEMPLATE_STRING \
- "audio/x-raw-int, " \
- " width = (int) 16, " \
- " depth = (int) 16, " \
- " signed = (boolean) true, " \
- " endianness = (int) BYTE_ORDER, " \
+ "audio/x-raw, " \
" rate = (int) [ 1, MAX ], " \
- " channels = (int) [ 1, MAX ]; " \
- "audio/x-raw-int, " \
- " width = (int) 32, " \
- " depth = (int) 32, " \
- " signed = (boolean) true, " \
- " endianness = (int) BYTE_ORDER, " \
- " rate = (int) [ 1, MAX ], " \
- " channels = (int) [ 1, MAX ]; " \
- "audio/x-raw-float, " \
- " width = (int) { 32, 64 }, " \
- " endianness = (int) BYTE_ORDER, " \
- " rate = (int) [ 1, MAX ], " \
- " channels = (int) [ 1, MAX ]"
+ " channels = (int) [ 1, MAX ], " \
+ " layout = (string) interleaved, " \
+ " format = (string) { " \
+ GST_AUDIO_NE(S16) ", " \
+ GST_AUDIO_NE(S32) ", " \
+ GST_AUDIO_NE(F32) ", " \
+ GST_AUDIO_NE(F64) " }"
#define SPECT_CAPS_STRING_S16 \
- "audio/x-raw-int, " \
+ "audio/x-raw, " \
"rate = (int) 44100, " \
"channels = (int) 1, " \
- "endianness = (int) BYTE_ORDER, " \
- "width = (int) 16, " \
- "depth = (int) 16, " \
- "signed = (boolean) true"
+ "layout = (string) interleaved, " \
+ "format = (string) " GST_AUDIO_NE(S16)
#define SPECT_CAPS_STRING_S32 \
- "audio/x-raw-int, " \
+ "audio/x-raw, " \
"rate = (int) 44100, " \
"channels = (int) 1, " \
- "endianness = (int) BYTE_ORDER, " \
- "width = (int) 32, " \
- "depth = (int) 32, " \
- "signed = (boolean) true"
+ "layout = (string) interleaved, " \
+ "format = (string) " GST_AUDIO_NE(S32)
#define SPECT_CAPS_STRING_F32 \
- "audio/x-raw-float, " \
- " width = (int) 32, " \
- " endianness = (int) BYTE_ORDER, " \
+ "audio/x-raw, " \
" rate = (int) 44100, " \
- " channels = (int) 1"
+ " channels = (int) 1, " \
+ " layout = (string) interleaved, " \
+ " format = (string) " GST_AUDIO_NE(F32)
#define SPECT_CAPS_STRING_F64 \
- "audio/x-raw-float, " \
- " width = (int) 64, " \
- " endianness = (int) BYTE_ORDER, " \
+ "audio/x-raw, " \
" rate = (int) 44100, " \
- " channels = (int) 1"
+ " channels = (int) 1, " \
+ " layout = (string) interleaved, " \
+ " format = (string) " GST_AUDIO_NE(F64)
#define SPECT_BANDS 256
GST_DEBUG ("setup_spectrum");
spectrum = gst_check_setup_element ("spectrum");
- mysrcpad = gst_check_setup_src_pad (spectrum, &srctemplate, NULL);
- mysinkpad = gst_check_setup_sink_pad (spectrum, &sinktemplate, NULL);
+ mysrcpad = gst_check_setup_src_pad (spectrum, &srctemplate);
+ mysinkpad = gst_check_setup_sink_pad (spectrum, &sinktemplate);
gst_pad_set_active (mysrcpad, TRUE);
gst_pad_set_active (mysinkpad, TRUE);
const GstStructure *structure;
int i, j;
gint16 *data;
+ GstMapInfo map;
const GValue *list, *value;
GstClockTime endtime;
gfloat level;
spectrum = setup_spectrum ();
- g_object_set (spectrum, "message", TRUE, "interval", GST_SECOND / 100,
+ g_object_set (spectrum, "post-messages", TRUE, "interval", GST_SECOND / 100,
"bands", SPECT_BANDS, "threshold", -80, NULL);
fail_unless (gst_element_set_state (spectrum,
"could not set to playing");
/* create a 1 sec buffer with an 11025 Hz sine wave */
- inbuffer = gst_buffer_new_and_alloc (44100 * sizeof (gint16));
- data = (gint16 *) GST_BUFFER_DATA (inbuffer);
-
+ inbuffer = gst_buffer_new_allocate (NULL, 44100 * sizeof (gint16), 0);
+ gst_buffer_map (inbuffer, &map, GST_MAP_WRITE);
+ data = (gint16 *) map.data;
for (j = 0; j < 44100; j += 4) {
*data = 0;
++data;
*data = -32767;
++data;
}
+ gst_buffer_unmap (inbuffer, &map);
caps = gst_caps_from_string (SPECT_CAPS_STRING_S16);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
const GstStructure *structure;
int i, j;
gint32 *data;
+ GstMapInfo map;
const GValue *list, *value;
GstClockTime endtime;
gfloat level;
spectrum = setup_spectrum ();
- g_object_set (spectrum, "message", TRUE, "interval", GST_SECOND / 100,
+ g_object_set (spectrum, "post-messages", TRUE, "interval", GST_SECOND / 100,
"bands", SPECT_BANDS, "threshold", -80, NULL);
fail_unless (gst_element_set_state (spectrum,
"could not set to playing");
/* create a 1 sec buffer with an 11025 Hz sine wave */
- inbuffer = gst_buffer_new_and_alloc (44100 * sizeof (gint32));
- data = (gint32 *) GST_BUFFER_DATA (inbuffer);
+ inbuffer = gst_buffer_new_allocate (NULL, 44100 * sizeof (gint32), 0);
+ gst_buffer_map (inbuffer, &map, GST_MAP_WRITE);
+ data = (gint32 *) map.data;
for (j = 0; j < 44100; j += 4) {
*data = 0;
++data;
*data = -2147483647;
++data;
}
+ gst_buffer_unmap (inbuffer, &map);
+
caps = gst_caps_from_string (SPECT_CAPS_STRING_S32);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
const GstStructure *structure;
int i, j;
gfloat *data;
+ GstMapInfo map;
const GValue *list, *value;
GstClockTime endtime;
gfloat level;
spectrum = setup_spectrum ();
- g_object_set (spectrum, "message", TRUE, "interval", GST_SECOND / 100,
+ g_object_set (spectrum, "post-messages", TRUE, "interval", GST_SECOND / 100,
"bands", SPECT_BANDS, "threshold", -80, NULL);
fail_unless (gst_element_set_state (spectrum,
"could not set to playing");
/* create a 1 sec buffer with an 11025 Hz sine wave */
- inbuffer = gst_buffer_new_and_alloc (44100 * sizeof (gfloat));
- data = (gfloat *) GST_BUFFER_DATA (inbuffer);
+ inbuffer = gst_buffer_new_allocate (NULL, 44100 * sizeof (gfloat), 0);
+ gst_buffer_map (inbuffer, &map, GST_MAP_WRITE);
+ data = (gfloat *) map.data;
for (j = 0; j < 44100; j += 4) {
*data = 0.0;
++data;
*data = -1.0;
++data;
}
+ gst_buffer_unmap (inbuffer, &map);
+
caps = gst_caps_from_string (SPECT_CAPS_STRING_F32);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
const GstStructure *structure;
int i, j;
gdouble *data;
+ GstMapInfo map;
const GValue *list, *value;
GstClockTime endtime;
gfloat level;
spectrum = setup_spectrum ();
- g_object_set (spectrum, "message", TRUE, "interval", GST_SECOND / 100,
+ g_object_set (spectrum, "post-messages", TRUE, "interval", GST_SECOND / 100,
"bands", SPECT_BANDS, "threshold", -80, NULL);
fail_unless (gst_element_set_state (spectrum,
"could not set to playing");
/* create a 1 sec buffer with an 11025 Hz sine wave */
- inbuffer = gst_buffer_new_and_alloc (44100 * sizeof (gdouble));
- data = (gdouble *) GST_BUFFER_DATA (inbuffer);
+ inbuffer = gst_buffer_new_allocate (NULL, 44100 * sizeof (gdouble), 0);
+ gst_buffer_map (inbuffer, &map, GST_MAP_WRITE);
+ data = (gdouble *) map.data;
for (j = 0; j < 44100; j += 4) {
*data = 0.0;
++data;
*data = -1.0;
++data;
}
+ gst_buffer_unmap (inbuffer, &map);
+
caps = gst_caps_from_string (SPECT_CAPS_STRING_F64);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
#include <stdlib.h>
#include <unistd.h>
+#if 0
static GstStaticPadTemplate srctemplate = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
}
GST_END_TEST;
+#endif
/*
* Creates the test suite.
tcase_set_timeout (tc_chain, 60);
suite_add_tcase (s, tc_chain);
+#if 0
tcase_add_test (tc_chain, test_udpsink);
tcase_add_test (tc_chain, test_udpsink_bufferlist);
+#endif
return s;
}
if (g_socket_send_to (socket, sa, "HeLL0", 0, NULL, NULL) == 0) {
GST_INFO ("sent 0 bytes");
if (g_socket_send_to (socket, sa, "HeLL0", 6, NULL, NULL) == 6) {
+ GstMapInfo map;
GstBuffer *buf;
guint len;
/* last buffer should be our HeLL0 string */
buf = GST_BUFFER (g_list_nth_data (buffers, len - 1));
- fail_unless_equals_int (GST_BUFFER_SIZE (buf), 6);
- fail_unless_equals_string ((gchar *) GST_BUFFER_DATA (buf), "HeLL0");
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ fail_unless_equals_int (map.size, 6);
+ fail_unless_equals_string ((gchar *) map.data, "HeLL0");
+ gst_buffer_unmap (buf, &map);
/* if there's another buffer, it should be 0 bytes */
if (len == 2) {
buf = GST_BUFFER (g_list_nth_data (buffers, 0));
- fail_unless_equals_int (GST_BUFFER_SIZE (buf), 0);
+ fail_unless_equals_int (gst_buffer_get_size (buf), 0);
}
} else {
GST_WARNING ("send_to(6 bytes) failed");
#include <unistd.h>
#include <gst/check/gstcheck.h>
+#include <gst/video/video.h>
#include <gst/base/gstbasetransform.h>
/* return a list of caps where we only need to set
static GList *
video_crop_get_test_caps (GstElement * videocrop)
{
- const GstCaps *allowed_caps;
+ GstCaps *templ, *allowed_caps;
GstPad *srcpad;
GList *list = NULL;
guint i;
srcpad = gst_element_get_static_pad (videocrop, "src");
fail_unless (srcpad != NULL);
- allowed_caps = gst_pad_get_pad_template_caps (srcpad);
- fail_unless (allowed_caps != NULL);
+ templ = gst_pad_get_pad_template_caps (srcpad);
+ fail_unless (templ != NULL);
+
+ allowed_caps = gst_caps_normalize (templ);
+ gst_caps_unref (templ);
for (i = 0; i < gst_caps_get_size (allowed_caps); ++i) {
GstStructure *new_structure;
gst_structure_remove_field (new_structure, "height");
gst_caps_append_structure (single_caps, new_structure);
+ GST_DEBUG ("have caps %" GST_PTR_FORMAT, single_caps);
/* should be fixed without width/height */
fail_unless (gst_caps_is_fixed (single_caps));
list = g_list_prepend (list, single_caps);
}
+ gst_caps_unref (allowed_caps);
gst_object_unref (srcpad);
return list;
fail_unless (videocrop != NULL, "Failed to create videocrop element");
vcrop_klass = GST_BASE_TRANSFORM_GET_CLASS (videocrop);
- csp = gst_element_factory_make ("ffmpegcolorspace", "csp");
- fail_unless (csp != NULL, "Failed to create ffmpegcolorspace element");
+ csp = gst_element_factory_make ("videoconvert", "csp");
+ fail_unless (csp != NULL, "Failed to create videoconvert element");
csp_klass = GST_BASE_TRANSFORM_GET_CLASS (csp);
caps_list = video_crop_get_test_caps (videocrop);
for (i = 0; i < G_N_ELEMENTS (sizes_to_try); ++i) {
gchar *caps_str;
- guint csp_size = 0;
- guint vc_size = 0;
+ gsize csp_size = 0;
+ gsize vc_size = 0;
gst_structure_set (s, "width", G_TYPE_INT, sizes_to_try[i].width,
"height", G_TYPE_INT, sizes_to_try[i].height, NULL);
caps_str = gst_caps_to_string (caps);
GST_INFO ("Testing unit size for %s", caps_str);
- /* skip if ffmpegcolorspace doesn't support these caps
+ /* skip if videoconvert doesn't support these caps
* (only works with gst-plugins-base 0.10.9.1 or later) */
if (!csp_klass->get_unit_size ((GstBaseTransform *) csp, caps, &csp_size)) {
- GST_INFO ("ffmpegcolorspace does not support format %s", caps_str);
+ GST_INFO ("videoconvert does not support format %s", caps_str);
g_free (caps_str);
continue;
}
caps, &vc_size));
fail_unless (vc_size == csp_size,
- "videocrop and ffmpegcolorspace return different unit sizes for "
+ "videocrop and videoconvert return different unit sizes for "
"caps %s: vc_size=%d, csp_size=%d", caps_str, vc_size, csp_size);
g_free (caps_str);
GstElement *crop;
GstElement *sink;
GstBuffer *last_buf;
+ GstCaps *last_caps;
} GstVideoCropTestContext;
static void
handoff_cb (GstElement * sink, GstBuffer * buf, GstPad * pad,
- GstBuffer ** p_buf)
+ GstVideoCropTestContext * ctx)
{
- gst_buffer_replace (p_buf, buf);
+ GstCaps *caps;
+
+ gst_buffer_replace (&ctx->last_buf, buf);
+ caps = gst_pad_get_current_caps (pad);
+ gst_caps_replace (&ctx->last_caps, caps);
+ gst_caps_unref (caps);
}
static void
g_object_set (ctx->src, "pattern", 4, NULL);
g_object_set (ctx->sink, "signal-handoffs", TRUE, NULL);
- g_signal_connect (ctx->sink, "preroll-handoff", G_CALLBACK (handoff_cb),
- &ctx->last_buf);
+ g_signal_connect (ctx->sink, "preroll-handoff", G_CALLBACK (handoff_cb), ctx);
ctx->last_buf = NULL;
+ ctx->last_caps = NULL;
GST_LOG ("context inited");
}
memset (ctx, 0x00, sizeof (GstVideoCropTestContext));
}
-typedef void (*GstVideoCropTestBufferFunc) (GstBuffer * buffer);
+typedef void (*GstVideoCropTestBufferFunc) (GstBuffer * buffer, GstCaps * caps);
static void
videocrop_test_cropping (GstVideoCropTestContext * ctx, GstCaps * in_caps,
-1) == GST_STATE_CHANGE_SUCCESS);
if (func != NULL) {
- func (ctx->last_buf);
+ func (ctx->last_buf, ctx->last_caps);
}
gst_element_set_state (ctx->pipeline, GST_STATE_NULL);
}
static void
-check_1x1_buffer (GstBuffer * buf)
+check_1x1_buffer (GstBuffer * buf, GstCaps * caps)
{
- GstStructure *s;
+ GstVideoInfo info;
+ GstVideoFrame frame;
+ /* the exact values we check for come from videotestsrc */
+ static const guint yuv_values[] = { 81, 90, 240, 255 };
+ static const guint rgb_values[] = { 0xff, 0, 0, 255 };
+ static const guint gray_values[] = { 63, 63, 63, 255 };
+ const guint *values;
+ guint i;
+ const GstVideoFormatInfo *finfo;
fail_unless (buf != NULL);
- fail_unless (GST_BUFFER_CAPS (buf) != NULL);
-
- s = gst_caps_get_structure (GST_BUFFER_CAPS (buf), 0);
- if (gst_structure_has_name (s, "video/x-raw-yuv")) {
- guint32 format = 0;
-
- fail_unless (gst_structure_get_fourcc (s, "format", &format));
-
- /* the exact values we check for come from videotestsrc */
- switch (format) {
- case GST_MAKE_FOURCC ('I', '4', '2', '0'):
- fail_unless_equals_int (GST_BUFFER_DATA (buf)[0], 81);
- fail_unless_equals_int (GST_BUFFER_DATA (buf)[8], 90);
- fail_unless_equals_int (GST_BUFFER_DATA (buf)[12], 240);
- break;
- case GST_MAKE_FOURCC ('Y', 'V', '1', '2'):
- fail_unless_equals_int (GST_BUFFER_DATA (buf)[0], 81);
- fail_unless_equals_int (GST_BUFFER_DATA (buf)[8], 240);
- fail_unless_equals_int (GST_BUFFER_DATA (buf)[12], 90);
- break;
- case GST_MAKE_FOURCC ('Y', '8', '0', '0'):
- fail_unless_equals_int (GST_BUFFER_DATA (buf)[0], 81);
- /* no chroma planes */
- break;
- case GST_MAKE_FOURCC ('A', 'Y', 'U', 'V'):
- fail_unless_equals_int (GST_BUFFER_DATA (buf)[1], 81);
- fail_unless_equals_int (GST_BUFFER_DATA (buf)[2], 90);
- fail_unless_equals_int (GST_BUFFER_DATA (buf)[3], 240);
- /* no chroma planes */
- break;
- default:
- GST_LOG ("not checking %" GST_FOURCC_FORMAT, GST_FOURCC_ARGS (format));
- break;
- }
- } else if (gst_structure_has_name (s, "video/x-raw-rgb")) {
- guint32 pixel;
- gint rmask = 0, bmask = 0, gmask = 0, endianness = 0, bpp = 0;
- gint rshift, gshift, bshift;
-
- fail_unless (gst_structure_get_int (s, "red_mask", &rmask));
- fail_unless (gst_structure_get_int (s, "blue_mask", &bmask));
- fail_unless (gst_structure_get_int (s, "green_mask", &gmask));
- fail_unless (gst_structure_get_int (s, "bpp", &bpp));
- fail_unless (gst_structure_get_int (s, "endianness", &endianness));
-
- fail_unless (rmask != 0);
- fail_unless (gmask != 0);
- fail_unless (bmask != 0);
- fail_unless (bpp != 0);
- fail_unless (endianness != 0);
-
- rshift = g_bit_nth_lsf (rmask, -1);
- gshift = g_bit_nth_lsf (gmask, -1);
- bshift = g_bit_nth_lsf (bmask, -1);
-
- switch (bpp) {
- case 32:{
- if (endianness == G_LITTLE_ENDIAN)
- pixel = GST_READ_UINT32_LE (GST_BUFFER_DATA (buf));
- else
- pixel = GST_READ_UINT32_BE (GST_BUFFER_DATA (buf));
- break;
- }
- case 24:{
- if (endianness == G_BIG_ENDIAN) {
- pixel = (GST_READ_UINT8 (GST_BUFFER_DATA (buf)) << 16) |
- (GST_READ_UINT8 (GST_BUFFER_DATA (buf) + 1) << 8) |
- (GST_READ_UINT8 (GST_BUFFER_DATA (buf) + 2) << 0);
- } else {
- pixel = (GST_READ_UINT8 (GST_BUFFER_DATA (buf) + 2) << 16) |
- (GST_READ_UINT8 (GST_BUFFER_DATA (buf) + 1) << 8) |
- (GST_READ_UINT8 (GST_BUFFER_DATA (buf) + 0) << 0);
- }
- break;
- }
- default:{
- GST_LOG ("not checking RGB-format buffer with %ubpp", bpp);
- return;
- }
- }
+ fail_unless (caps != NULL);
+
+ fail_unless (gst_video_info_from_caps (&info, caps));
+ fail_unless (gst_video_frame_map (&frame, &info, buf, GST_MAP_READ));
+
+ finfo = info.finfo;
+
+
+ if (GST_VIDEO_INFO_FORMAT (&info) == GST_VIDEO_FORMAT_Y800)
+ values = gray_values;
+ else if (GST_VIDEO_INFO_IS_YUV (&info))
+ values = yuv_values;
+ else if (GST_VIDEO_INFO_IS_GRAY (&info))
+ values = gray_values;
+ else
+ values = rgb_values;
+
+ GST_MEMDUMP ("buffer", GST_VIDEO_FRAME_PLANE_DATA (&frame, 0), 8);
- fail_unless_equals_int ((pixel & rmask) >> rshift, 0xff);
- fail_unless_equals_int ((pixel & gmask) >> gshift, 0x00);
- fail_unless_equals_int ((pixel & bmask) >> bshift, 0x00);
+ for (i = 0; i < GST_VIDEO_FRAME_N_COMPONENTS (&frame); i++) {
+ guint8 *data = GST_VIDEO_FRAME_COMP_DATA (&frame, i);
+
+ GST_DEBUG ("W: %d", GST_VIDEO_FORMAT_INFO_W_SUB (finfo, i));
+ GST_DEBUG ("H: %d", GST_VIDEO_FORMAT_INFO_H_SUB (finfo, i));
+
+ if (GST_VIDEO_FORMAT_INFO_W_SUB (finfo,
+ i) >= GST_VIDEO_FRAME_WIDTH (&frame))
+ continue;
+ if (GST_VIDEO_FORMAT_INFO_H_SUB (finfo,
+ i) >= GST_VIDEO_FRAME_HEIGHT (&frame))
+ continue;
+
+ if (GST_VIDEO_FORMAT_INFO_BITS (finfo) == 8) {
+ fail_unless_equals_int (data[0], values[i]);
+ } else if (GST_VIDEO_FORMAT_INFO_BITS (finfo) == 16) {
+ guint16 pixels, val;
+ gint depth;
+
+ if (GST_VIDEO_FORMAT_INFO_IS_LE (finfo))
+ pixels = GST_READ_UINT16_LE (data);
+ else
+ pixels = GST_READ_UINT16_BE (data);
+
+ depth = GST_VIDEO_FORMAT_INFO_DEPTH (finfo, i);
+ val = pixels >> GST_VIDEO_FORMAT_INFO_SHIFT (finfo, i);
+ val = val & ((1 << depth) - 1);
+
+ GST_DEBUG ("val %08x %d : %d", pixels, i, val);
+ fail_unless_equals_int (val, values[i] >> (8 - depth));
+ } else {
+ }
}
+
+ /*
+ fail_unless_equals_int ((pixel & rmask) >> rshift, 0xff);
+ fail_unless_equals_int ((pixel & gmask) >> gshift, 0x00);
+ fail_unless_equals_int ((pixel & bmask) >> bshift, 0x00);
+ */
}
GST_START_TEST (test_crop_to_1x1)
s = gst_caps_get_structure (caps, 0);
fail_unless (s != NULL);
- if (g_strcmp0 (gst_structure_get_name (s), "video/x-raw-gray") == 0) {
- /* videotestsrc does not support this format */
- gst_caps_unref (caps);
- continue;
- }
-
GST_INFO ("testing format: %" GST_PTR_FORMAT, caps);
gst_structure_set (s, "width", G_TYPE_INT, 160,
GST_END_TEST;
-static gboolean
-buffer_probe_cb (GstPad * pad, GstBuffer * buf, GstBuffer ** p_buf)
+static GstPadProbeReturn
+buffer_probe_cb (GstPad * pad, GstPadProbeInfo * info, gpointer data)
{
+ GstBuffer **p_buf = data;
+ GstBuffer *buf = GST_PAD_PROBE_INFO_BUFFER (info);
+
gst_buffer_replace (p_buf, buf);
- return TRUE; /* keep data */
+
+ return GST_PAD_PROBE_OK; /* keep data */
}
GST_START_TEST (test_passthrough)
srcpad = gst_element_get_static_pad (ctx.src, "src");
fail_unless (srcpad != NULL);
- gst_pad_add_buffer_probe (srcpad, G_CALLBACK (buffer_probe_cb), &gen_buf);
+ gst_pad_add_probe (srcpad, GST_PAD_PROBE_TYPE_BUFFER, buffer_probe_cb,
+ &gen_buf, NULL);
gst_object_unref (srcpad);
g_object_set (ctx.crop, "left", 0, "right", 0, "top", 0, "bottom", 0, NULL);
klass = GST_BASE_TRANSFORM_GET_CLASS (ctx.crop);
fail_unless (klass != NULL);
- caps = gst_caps_new_simple ("video/x-raw-yuv",
- "format", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('I', '4', '2', '0'),
+ caps = gst_caps_new_simple ("video/x-raw",
+ "format", G_TYPE_STRING, "I420",
"framerate", GST_TYPE_FRACTION, 1, 1,
"width", G_TYPE_INT, 200, "height", G_TYPE_INT, 100, NULL);
/* by default, it should be no cropping and hence passthrough */
- adj_caps = klass->transform_caps (crop, GST_PAD_SRC, caps);
+ adj_caps = klass->transform_caps (crop, GST_PAD_SRC, caps, NULL);
fail_unless (adj_caps != NULL);
fail_unless (gst_caps_is_equal (adj_caps, caps));
gst_caps_unref (adj_caps);
- adj_caps = klass->transform_caps (crop, GST_PAD_SINK, caps);
+ adj_caps = klass->transform_caps (crop, GST_PAD_SINK, caps, NULL);
fail_unless (adj_caps != NULL);
fail_unless (gst_caps_is_equal (adj_caps, caps));
gst_caps_unref (adj_caps);
g_object_set (ctx.crop, "left", 1, "right", 3, "top", 5, "bottom", 7, NULL);
g_object_set (ctx.crop, "left", 0, "right", 0, "top", 0, "bottom", 0, NULL);
- adj_caps = klass->transform_caps (crop, GST_PAD_SRC, caps);
+ adj_caps = klass->transform_caps (crop, GST_PAD_SRC, caps, NULL);
fail_unless (adj_caps != NULL);
fail_unless (gst_caps_is_equal (adj_caps, caps));
gst_caps_unref (adj_caps);
- adj_caps = klass->transform_caps (crop, GST_PAD_SINK, caps);
+ adj_caps = klass->transform_caps (crop, GST_PAD_SINK, caps, NULL);
fail_unless (adj_caps != NULL);
fail_unless (gst_caps_is_equal (adj_caps, caps));
gst_caps_unref (adj_caps);
/* ========= (1) fixed value ============================================= */
/* sink => source, source must be bigger if we crop stuff off */
- adj_caps = klass->transform_caps (crop, GST_PAD_SRC, caps);
+ adj_caps = klass->transform_caps (crop, GST_PAD_SRC, caps, NULL);
fail_unless (adj_caps != NULL);
fail_unless (gst_caps_get_size (adj_caps) == 1);
w_val =
gst_caps_unref (adj_caps);
/* source => sink becomes smaller */
- adj_caps = klass->transform_caps (crop, GST_PAD_SINK, caps);
+ adj_caps = klass->transform_caps (crop, GST_PAD_SINK, caps, NULL);
fail_unless (adj_caps != NULL);
fail_unless (gst_caps_get_size (adj_caps) == 1);
w_val =
"height", GST_TYPE_INT_RANGE, 3000, 4000, NULL);
/* sink => source, source must be bigger if we crop stuff off */
- adj_caps = klass->transform_caps (crop, GST_PAD_SRC, caps);
+ adj_caps = klass->transform_caps (crop, GST_PAD_SRC, caps, NULL);
fail_unless (adj_caps != NULL);
fail_unless (gst_caps_get_size (adj_caps) == 1);
w_val =
gst_caps_unref (adj_caps);
/* source => sink becomes smaller */
- adj_caps = klass->transform_caps (crop, GST_PAD_SINK, caps);
+ adj_caps = klass->transform_caps (crop, GST_PAD_SINK, caps, NULL);
fail_unless (adj_caps != NULL);
fail_unless (gst_caps_get_size (adj_caps) == 1);
w_val =
"height", GST_TYPE_INT_RANGE, 2, G_MAXINT, NULL);
/* sink => source, source must be bigger if we crop stuff off */
- adj_caps = klass->transform_caps (crop, GST_PAD_SRC, caps);
+ adj_caps = klass->transform_caps (crop, GST_PAD_SRC, caps, NULL);
fail_unless (adj_caps != NULL);
fail_unless (gst_caps_get_size (adj_caps) == 1);
w_val =
gst_caps_unref (adj_caps);
/* source => sink becomes smaller */
- adj_caps = klass->transform_caps (crop, GST_PAD_SINK, caps);
+ adj_caps = klass->transform_caps (crop, GST_PAD_SINK, caps, NULL);
fail_unless (adj_caps != NULL);
fail_unless (gst_caps_get_size (adj_caps) == 1);
w_val =
}
/* sink => source, source must be bigger if we crop stuff off */
- adj_caps = klass->transform_caps (crop, GST_PAD_SRC, caps);
+ adj_caps = klass->transform_caps (crop, GST_PAD_SRC, caps, NULL);
fail_unless (adj_caps != NULL);
fail_unless (gst_caps_get_size (adj_caps) == 1);
w_val =
gst_caps_unref (adj_caps);
/* source => sink becomes smaller */
- adj_caps = klass->transform_caps (crop, GST_PAD_SINK, caps);
+ adj_caps = klass->transform_caps (crop, GST_PAD_SINK, caps, NULL);
fail_unless (adj_caps != NULL);
fail_unless (gst_caps_get_size (adj_caps) == 1);
w_val =
GstPad *mysrcpad, *mysinkpad;
#define VIDEO_CAPS_TEMPLATE_STRING \
- GST_VIDEO_CAPS_YUV ("I420") ";" \
- GST_VIDEO_CAPS_YUV ("AYUV") ";" \
- GST_VIDEO_CAPS_YUV ("YUY2") ";" \
- GST_VIDEO_CAPS_YUV ("UYVY") ";" \
- GST_VIDEO_CAPS_YUV ("YVYU") ";" \
- GST_VIDEO_CAPS_xRGB
+ GST_VIDEO_CAPS_MAKE ("{ I420, AYUV, YUY2, UYVY, YVYU, xRGB }")
static GstStaticPadTemplate sinktemplate = GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_DEBUG ("setup_element");
element = gst_check_setup_element (name);
g_object_set_valist (G_OBJECT (element), prop, var_args);
- mysrcpad = gst_check_setup_src_pad (element, &srctemplate, NULL);
+ mysrcpad = gst_check_setup_src_pad (element, &srctemplate);
gst_pad_set_active (mysrcpad, TRUE);
- mysinkpad = gst_check_setup_sink_pad (element, &sinktemplate, NULL);
+ mysinkpad = gst_check_setup_sink_pad (element, &sinktemplate);
gst_pad_set_active (mysinkpad, TRUE);
return element;
GST_STATE_PLAYING) == GST_STATE_CHANGE_SUCCESS,
"could not set to playing");
+ gst_pad_set_caps (mysrcpad, caps);
+
for (i = 0; i < num_buffers; ++i) {
inbuffer = gst_buffer_new_and_alloc (size);
/* makes valgrind's memcheck happier */
- memset (GST_BUFFER_DATA (inbuffer), 0, GST_BUFFER_SIZE (inbuffer));
- gst_buffer_set_caps (inbuffer, caps);
+ gst_buffer_memset (inbuffer, 0, 0, size);
GST_BUFFER_TIMESTAMP (inbuffer) = 0;
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
fail_unless (gst_pad_push (mysrcpad, inbuffer) == GST_FLOW_OK);
switch (i) {
case 0:
- fail_unless (GST_BUFFER_SIZE (outbuffer) == size);
+ fail_unless (gst_buffer_get_size (outbuffer) == size);
/* no check on filter operation itself */
break;
default:
385, 289}, {
385, 385}};
gint i, n, r;
- GstVideoFormat format;
gint size;
- GstCaps *templ = gst_caps_from_string (VIDEO_CAPS_TEMPLATE_STRING);
+ GstCaps *allcaps, *templ = gst_caps_from_string (VIDEO_CAPS_TEMPLATE_STRING);
va_list varargs;
- n = gst_caps_get_size (templ);
+ allcaps = gst_caps_normalize (templ);
+ gst_caps_unref (templ);
+
+ n = gst_caps_get_size (allcaps);
for (i = 0; i < n; i++) {
- GstStructure *s = gst_caps_get_structure (templ, i);
+ GstStructure *s = gst_caps_get_structure (allcaps, i);
GstCaps *caps = gst_caps_new_empty ();
gst_caps_append_structure (caps, gst_structure_copy (s));
/* try various resolutions */
for (r = 0; r < G_N_ELEMENTS (resolutions); ++r) {
+ GstVideoInfo info;
+
caps = gst_caps_make_writable (caps);
gst_caps_set_simple (caps, "width", G_TYPE_INT, resolutions[r].width,
"height", G_TYPE_INT, resolutions[r].height,
"framerate", GST_TYPE_FRACTION, 25, 1, NULL);
GST_DEBUG ("Testing with caps: %" GST_PTR_FORMAT, caps);
- gst_video_format_parse_caps (caps, &format, NULL, NULL);
- size = gst_video_format_get_size (format, resolutions[r].width,
- resolutions[r].height);
+ gst_video_info_from_caps (&info, caps);
+ size = GST_VIDEO_INFO_SIZE (&info);
va_start (varargs, prop);
check_filter_caps (name, caps, size, num_buffers, prop, varargs);
gst_caps_unref (caps);
}
- gst_caps_unref (templ);
+ gst_caps_unref (allcaps);
}
GST_START_TEST (test_videobalance)
* get_peer, and then remove references in every test function */
static GstPad *mysrcpad, *mysinkpad;
-#define VIDEO_CAPS_STRING "video/x-raw-yuv, " \
- "format = (fourcc) I420, "\
+#define VIDEO_CAPS_STRING "video/x-raw, " \
+ "format = (string) I420, "\
"width = (int) 384, " \
"height = (int) 288, " \
"framerate = (fraction) 25/1, " \
GST_DEBUG ("setup_y4menc");
y4menc = gst_check_setup_element ("y4menc");
- mysrcpad = gst_check_setup_src_pad (y4menc, &srctemplate, NULL);
- mysinkpad = gst_check_setup_sink_pad (y4menc, &sinktemplate, NULL);
+ mysrcpad = gst_check_setup_src_pad (y4menc, &srctemplate);
+ mysinkpad = gst_check_setup_sink_pad (y4menc, &sinktemplate);
gst_pad_set_active (mysrcpad, TRUE);
gst_pad_set_active (mysinkpad, TRUE);
size = 384 * 288 * 3 / 2;
inbuffer = gst_buffer_new_and_alloc (size);
/* makes valgrind's memcheck happier */
- memset (GST_BUFFER_DATA (inbuffer), 0, GST_BUFFER_SIZE (inbuffer));
+ gst_buffer_memset (inbuffer, 0, 0, size);
caps = gst_caps_from_string (VIDEO_CAPS_STRING);
- gst_buffer_set_caps (inbuffer, caps);
+ fail_unless (gst_pad_set_caps (mysrcpad, caps));
gst_caps_unref (caps);
GST_BUFFER_TIMESTAMP (inbuffer) = 0;
ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1);
/* clean up buffers */
for (i = 0; i < num_buffers; ++i) {
+ GstMapInfo map;
gchar *data;
+ gsize outsize;
outbuffer = GST_BUFFER (buffers->data);
fail_if (outbuffer == NULL);
switch (i) {
case 0:
- fail_unless (GST_BUFFER_SIZE (outbuffer) > size);
- fail_unless (memcmp (data0, GST_BUFFER_DATA (outbuffer),
- strlen (data0)) == 0 ||
- memcmp (data1, GST_BUFFER_DATA (outbuffer), strlen (data1)) == 0);
+ gst_buffer_map (outbuffer, &map, GST_MAP_READ);
+ outsize = map.size;
+ data = (gchar *) map.data;
+
+ fail_unless (outsize > size);
+ fail_unless (memcmp (data, data0, strlen (data0)) == 0 ||
+ memcmp (data, data1, strlen (data1)) == 0);
/* so we know there is a newline */
- data = (gchar *) GST_BUFFER_DATA (outbuffer);
data = strchr (data, '\n');
fail_unless (data != NULL);
data++;
fail_unless (memcmp (data2, data, strlen (data2)) == 0);
data += strlen (data2);
/* remainder must be frame data */
- fail_unless ((data - (gchar *) GST_BUFFER_DATA (outbuffer)) + size ==
- GST_BUFFER_SIZE (outbuffer));
+ fail_unless (data - (gchar *) map.data + size == outsize);
+ gst_buffer_unmap (outbuffer, &map);
break;
default:
break;
+++ /dev/null
-/* GStreamer
- * unit test for index setting on all elements
- * Copyright (C) 2005 Thomas Vander Stichele <thomas at apestaart dot org>
- * Copyright (C) 2011 Tim-Philipp Müller <tim centricular net>
- *
- * This library is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Library General Public
- * License as published by the Free Software Foundation; either
- * version 2 of the License, or (at your option) any later version.
- *
- * This library is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Library General Public License for more details.
- *
- * You should have received a copy of the GNU Library General Public
- * License along with this library; if not, write to the
- * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
- * Boston, MA 02111-1307, USA.
- */
-
-#ifdef HAVE_CONFIG_H
-# include "config.h"
-#endif
-
-#include <gst/check/gstcheck.h>
-
-static GList *elements = NULL;
-
-static void
-setup (void)
-{
- GList *features, *f;
- GList *plugins, *p;
- gchar **ignorelist = NULL;
- const gchar *INDEX_IGNORE_ELEMENTS = NULL;
-
- GST_DEBUG ("getting elements for package %s", PACKAGE);
- INDEX_IGNORE_ELEMENTS = g_getenv ("GST_INDEX_IGNORE_ELEMENTS");
- if (!g_getenv ("GST_NO_INDEX_IGNORE_ELEMENTS") && INDEX_IGNORE_ELEMENTS) {
- GST_DEBUG ("Will ignore element factories: '%s'", INDEX_IGNORE_ELEMENTS);
- ignorelist = g_strsplit (INDEX_IGNORE_ELEMENTS, " ", 0);
- }
-
- plugins = gst_registry_get_plugin_list (gst_registry_get_default ());
-
- for (p = plugins; p; p = p->next) {
- GstPlugin *plugin = p->data;
-
- if (strcmp (gst_plugin_get_source (plugin), PACKAGE) != 0)
- continue;
-
- features =
- gst_registry_get_feature_list_by_plugin (gst_registry_get_default (),
- gst_plugin_get_name (plugin));
-
- for (f = features; f; f = f->next) {
- GstPluginFeature *feature = f->data;
- const gchar *name = gst_plugin_feature_get_name (feature);
- gboolean ignore = FALSE;
-
- if (!GST_IS_ELEMENT_FACTORY (feature))
- continue;
-
- if (ignorelist) {
- gchar **s;
-
- for (s = ignorelist; s && *s; ++s) {
- if (g_str_has_prefix (name, *s)) {
- GST_DEBUG ("ignoring element %s", name);
- ignore = TRUE;
- }
- }
- if (ignore)
- continue;
- }
-
- GST_DEBUG ("adding element %s", name);
- elements = g_list_prepend (elements, (gpointer) g_strdup (name));
- }
- gst_plugin_feature_list_free (features);
- }
- gst_plugin_list_free (plugins);
- g_strfreev (ignorelist);
-}
-
-static void
-teardown (void)
-{
- GList *e;
-
- for (e = elements; e; e = e->next) {
- g_free (e->data);
- }
- g_list_free (elements);
- elements = NULL;
-}
-
-GST_START_TEST (test_set_index)
-{
- GstElement *element;
- GstIndex *idx;
- GList *e;
-
- idx = gst_index_factory_make ("memindex");
- if (idx == NULL)
- return;
-
- gst_object_ref_sink (idx);
-
- for (e = elements; e; e = e->next) {
- const gchar *name = e->data;
-
- GST_INFO ("testing element %s", name);
- element = gst_element_factory_make (name, name);
- fail_if (element == NULL, "Could not make element from factory %s", name);
-
- gst_element_set_index (element, idx);
- gst_object_unref (element);
- }
-
- gst_object_unref (idx);
-}
-
-GST_END_TEST;
-
-static Suite *
-index_suite (void)
-{
- Suite *s = suite_create ("index");
- TCase *tc_chain = tcase_create ("general");
-
- suite_add_tcase (s, tc_chain);
- tcase_add_checked_fixture (tc_chain, setup, teardown);
- tcase_add_test (tc_chain, test_set_index);
-
- return s;
-}
-
-GST_CHECK_MAIN (index);
GList *plugins, *p;
gchar **ignorelist = NULL;
const gchar *STATE_IGNORE_ELEMENTS = NULL;
+ GstRegistry *def;
GST_DEBUG ("getting elements for package %s", PACKAGE);
STATE_IGNORE_ELEMENTS = g_getenv ("GST_STATE_IGNORE_ELEMENTS");
ignorelist = g_strsplit (STATE_IGNORE_ELEMENTS, " ", 0);
}
- plugins = gst_registry_get_plugin_list (gst_registry_get_default ());
+ def = gst_registry_get ();
+
+ plugins = gst_registry_get_plugin_list (def);
for (p = plugins; p; p = p->next) {
GstPlugin *plugin = p->data;
continue;
features =
- gst_registry_get_feature_list_by_plugin (gst_registry_get_default (),
+ gst_registry_get_feature_list_by_plugin (def,
gst_plugin_get_name (plugin));
for (f = features; f; f = f->next) {
gchar *pipeline; \
\
pipeline = g_strdup_printf ("videotestsrc num-buffers=100 ! " \
- "ffmpegcolorspace ! " \
+ "videoconvert ! " \
" %s ! " \
" fakesink", #element); \
\
*/
#include <gst/check/gstcheck.h>
+#include <gst/audio/audio.h>
#include <glib/gstdio.h>
static guint16
-buffer_get_first_sample (GstBuffer * buf)
+_get_first_sample (GstSample * sample)
{
- GstStructure *s;
- gint w, d, c, r, e;
+ GstAudioInfo info;
+ GstCaps *caps;
+ GstBuffer *buf;
+ GstMapInfo map;
+ guint16 res;
- fail_unless (buf != NULL, "NULL buffer");
- fail_unless (GST_BUFFER_CAPS (buf) != NULL, "buffer without caps");
+ fail_unless (sample != NULL, "NULL sample");
- /* log buffer details */
+ caps = gst_sample_get_caps (sample);
+ fail_unless (caps != NULL, "sample without caps");
+
+ buf = gst_sample_get_buffer (sample);
GST_DEBUG ("buffer with size=%u, caps=%" GST_PTR_FORMAT,
- GST_BUFFER_SIZE (buf), GST_BUFFER_CAPS (buf));
- GST_MEMDUMP ("buffer data from decoder", GST_BUFFER_DATA (buf),
- GST_BUFFER_SIZE (buf));
+ gst_buffer_get_size (buf), caps);
+
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ /* log buffer details */
+ GST_MEMDUMP ("buffer data from decoder", map.data, map.size);
/* make sure it's the format we expect */
- s = gst_caps_get_structure (GST_BUFFER_CAPS (buf), 0);
- fail_unless_equals_string (gst_structure_get_name (s), "audio/x-raw-int");
- fail_unless (gst_structure_get_int (s, "width", &w));
- fail_unless_equals_int (w, 16);
- fail_unless (gst_structure_get_int (s, "depth", &d));
- fail_unless_equals_int (d, 16);
- fail_unless (gst_structure_get_int (s, "rate", &r));
- fail_unless_equals_int (r, 44100);
- fail_unless (gst_structure_get_int (s, "channels", &c));
- fail_unless_equals_int (c, 1);
- fail_unless (gst_structure_get_int (s, "endianness", &e));
- if (e == G_BIG_ENDIAN)
- return GST_READ_UINT16_BE (GST_BUFFER_DATA (buf));
+ fail_unless (gst_audio_info_from_caps (&info, caps));
+
+ fail_unless_equals_int (GST_AUDIO_INFO_WIDTH (&info), 16);
+ fail_unless_equals_int (GST_AUDIO_INFO_DEPTH (&info), 16);
+ fail_unless_equals_int (GST_AUDIO_INFO_RATE (&info), 44100);
+ fail_unless_equals_int (GST_AUDIO_INFO_CHANNELS (&info), 1);
+
+ if (GST_AUDIO_INFO_IS_LITTLE_ENDIAN (&info))
+ res = GST_READ_UINT16_LE (map.data);
else
- return GST_READ_UINT16_LE (GST_BUFFER_DATA (buf));
+ res = GST_READ_UINT16_BE (map.data);
+
+ gst_buffer_unmap (buf, &map);
+
+ return res;
}
GST_START_TEST (test_decode)
{
GstElement *pipeline;
GstElement *appsink;
- GstBuffer *buffer = NULL;
+ GstSample *sample = NULL;
guint16 first_sample = 0;
guint size = 0;
gchar *path =
gst_element_set_state (pipeline, GST_STATE_PLAYING);
do {
- g_signal_emit_by_name (appsink, "pull-buffer", &buffer);
- if (buffer == NULL)
+ g_signal_emit_by_name (appsink, "pull-sample", &sample);
+ if (sample == NULL)
break;
if (first_sample == 0)
- first_sample = buffer_get_first_sample (buffer);
- GST_DEBUG ("buffer: %d\n", buffer->size);
- GST_DEBUG ("buffer: %04x\n", buffer_get_first_sample (buffer));
- size += buffer->size;
+ first_sample = _get_first_sample (sample);
+
+ size += gst_buffer_get_size (gst_sample_get_buffer (sample));
- gst_buffer_unref (buffer);
- buffer = NULL;
+ gst_sample_unref (sample);
+ sample = NULL;
}
while (TRUE);
GstElement *pipeline;
GstElement *appsink;
GstEvent *event;
- GstBuffer *buffer = NULL;
+ GstSample *sample = NULL;
guint16 first_sample = 0;
guint size = 0;
gchar *path =
gst_element_set_state (pipeline, GST_STATE_PLAYING);
do {
- g_signal_emit_by_name (appsink, "pull-buffer", &buffer);
- if (buffer == NULL)
+ g_signal_emit_by_name (appsink, "pull-sample", &sample);
+ if (sample == NULL)
break;
if (first_sample == 0)
- first_sample = buffer_get_first_sample (buffer);
- size += buffer->size;
+ first_sample = _get_first_sample (sample);
+ size += gst_buffer_get_size (gst_sample_get_buffer (sample));
- gst_buffer_unref (buffer);
- buffer = NULL;
+ gst_sample_unref (sample);
+ sample = NULL;
}
while (TRUE);
GstElement *pipeline;
GstElement *appsink;
GstEvent *event;
- GstBuffer *buffer = NULL;
+ GstSample *sample = NULL;
guint size = 0;
guint16 first_sample = 0;
gchar *path =
gst_element_set_state (pipeline, GST_STATE_PLAYING);
do {
- GST_DEBUG ("pulling buffer");
- g_signal_emit_by_name (appsink, "pull-buffer", &buffer);
- GST_DEBUG ("pulled buffer %p", buffer);
- if (buffer == NULL)
+ GST_DEBUG ("pulling sample");
+ g_signal_emit_by_name (appsink, "pull-sample", &sample);
+ GST_DEBUG ("pulled sample %p", sample);
+ if (sample == NULL)
break;
if (first_sample == 0) {
- fail_unless_equals_int (GST_BUFFER_OFFSET (buffer), 0L);
- first_sample = buffer_get_first_sample (buffer);
+// fail_unless_equals_int (GST_BUFFER_OFFSET (buffer), 0L);
+ first_sample = _get_first_sample (sample);
}
- size += buffer->size;
+ size += gst_buffer_get_size (gst_sample_get_buffer (sample));
- gst_buffer_unref (buffer);
- buffer = NULL;
+ gst_sample_unref (sample);
+ sample = NULL;
}
while (TRUE);
setter = GST_TAG_SETTER (mux);
fail_unless (setter != NULL);
- sent_tags = gst_structure_from_string (tag_str, NULL);
+ sent_tags = gst_tag_list_new_from_string (tag_str);
fail_unless (sent_tags != NULL);
gst_tag_setter_merge_tags (setter, sent_tags, GST_TAG_MERGE_REPLACE);
gst_tag_list_free (sent_tags);
GstBus *bus;
GMainLoop *loop;
GstTagList *sent_tags;
- gint i, j, n_recv, n_sent;
+ gint i, j, k, n_recv, n_sent;
const gchar *name_sent, *name_recv;
const GValue *value_sent, *value_recv;
gboolean found;
bus_watch = gst_bus_add_watch (bus, bus_handler, loop);
gst_object_unref (bus);
- sent_tags = gst_structure_from_string (tag_str, NULL);
+ sent_tags = gst_tag_list_new_from_string (tag_str);
fail_unless (sent_tags != NULL);
gst_element_set_state (pipeline, GST_STATE_PLAYING);
/* verify tags */
fail_unless (received_tags != NULL);
- n_recv = gst_structure_n_fields (received_tags);
- n_sent = gst_structure_n_fields (sent_tags);
+ n_recv = gst_tag_list_n_tags (received_tags);
+ n_sent = gst_tag_list_n_tags (sent_tags);
fail_unless (n_recv >= n_sent);
/* FIXME: compare taglits values */
for (i = 0; i < n_sent; i++) {
- name_sent = gst_structure_nth_field_name (sent_tags, i);
- value_sent = gst_structure_get_value (sent_tags, name_sent);
+ name_sent = gst_tag_list_nth_tag_name (sent_tags, i);
+
found = FALSE;
for (j = 0; j < n_recv; j++) {
- name_recv = gst_structure_nth_field_name (received_tags, j);
+ name_recv = gst_tag_list_nth_tag_name (received_tags, j);
+
if (!strcmp (name_sent, name_recv)) {
- value_recv = gst_structure_get_value (received_tags, name_recv);
- comparison = gst_value_compare (value_sent, value_recv);
- if (comparison != GST_VALUE_EQUAL) {
- gchar *vs = g_strdup_value_contents (value_sent);
- gchar *vr = g_strdup_value_contents (value_recv);
- GST_DEBUG ("sent = %s:'%s', recv = %s:'%s'",
- G_VALUE_TYPE_NAME (value_sent), vs,
- G_VALUE_TYPE_NAME (value_recv), vr);
- g_free (vs);
- g_free (vr);
+ guint sent_len, recv_len;
+
+ sent_len = gst_tag_list_get_tag_size (sent_tags, name_sent);
+ recv_len = gst_tag_list_get_tag_size (received_tags, name_recv);
+
+ fail_unless (sent_len == recv_len,
+ "tag item %s has been received with different size", name_sent);
+
+ for (k = 0; k < sent_len; k++) {
+ value_sent = gst_tag_list_get_value_index (sent_tags, name_sent, k);
+ value_recv =
+ gst_tag_list_get_value_index (received_tags, name_recv, k);
+
+ comparison = gst_value_compare (value_sent, value_recv);
+ if (comparison != GST_VALUE_EQUAL) {
+ gchar *vs = g_strdup_value_contents (value_sent);
+ gchar *vr = g_strdup_value_contents (value_recv);
+ GST_DEBUG ("sent = %s:'%s', recv = %s:'%s'",
+ G_VALUE_TYPE_NAME (value_sent), vs,
+ G_VALUE_TYPE_NAME (value_recv), vr);
+ g_free (vs);
+ g_free (vr);
+ }
+ fail_unless (comparison == GST_VALUE_EQUAL,
+ "tag item %s has been received with different type or value",
+ name_sent);
+ found = TRUE;
+ break;
}
- fail_unless (comparison == GST_VALUE_EQUAL,
- "tag item %s has been received with different type or value",
- name_sent);
- found = TRUE;
- break;
}
}
fail_unless (found, "tag item %s is lost", name_sent);
GST_START_TEST (test_common_tags)
{
- if (!gst_default_registry_check_feature_version ("qtdemux", 0, 10, 23)) {
+ if (!gst_registry_check_feature_version (gst_registry_get (), "qtdemux", 0,
+ 10, 23)) {
GST_INFO ("Skipping test, qtdemux either not available or too old");
return;
}
GST_START_TEST (test_geo_location_tags)
{
- if (!gst_default_registry_check_feature_version ("qtdemux", 0, 10, 23)) {
+ if (!gst_registry_check_feature_version (gst_registry_get (), "qtdemux", 0,
+ 10, 23)) {
GST_INFO ("Skipping test, qtdemux either not available or too old");
return;
}
GST_START_TEST (test_user_tags)
{
- if (!gst_default_registry_check_feature_version ("qtdemux", 0, 10, 23)) {
+ if (!gst_registry_check_feature_version (gst_registry_get (), "qtdemux", 0,
+ 10, 23)) {
GST_INFO ("Skipping test, qtdemux either not available or too old");
return;
}
* Boston, MA 02111-1307, USA.
*/
+/* FIXME 0.11: suppress warnings for deprecated API such as GValueArray
+ * with newer GLib versions (>= 2.31.0) */
+#define GLIB_DISABLE_DEPRECATION_WARNINGS
+
#include <gst/check/gstcheck.h>
-#include <gst/audio/multichannel.h>
+#include <gst/audio/audio.h>
+#include <gst/audio/audio-enumtypes.h>
static gboolean
bus_handler (GstBus * bus, GstMessage * message, gpointer data)
noinst_PROGRAMS = firfilter-example iirfilter-example
+# FIXME 0.11: ignore GValueArray warnings for now until this is sorted
+ERROR_CFLAGS=
+
firfilter_example_CFLAGS = $(GST_CFLAGS) $(GST_PLUGINS_BASE_CFLAGS)
firfilter_example_LDADD = $(GST_LIBS) $(GST_PLUGINS_BASE_LIBS) -lgstfft-@GST_MAJORMINOR@ $(LIBM)
gst_init (&argc, &argv);
- caps = gst_caps_from_string ("audio/x-raw-int,channels=2");
+ caps = gst_caps_from_string ("audio/x-raw,channels=2");
pipeline = gst_pipeline_new (NULL);
g_assert (pipeline);
noinst_PROGRAMS = pulse
pulse_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_BASE_CFLAGS) $(GST_CFLAGS)
-pulse_LDADD = $(GST_PLUGINS_BASE_LIBS) -lgstinterfaces-0.10 $(GST_BASE_LIBS) $(GST_LIBS)
+pulse_LDADD = $(GST_PLUGINS_BASE_LIBS) -lgstinterfaces-$(GST_MAJORMINOR) $(GST_BASE_LIBS) $(GST_LIBS)
#include <gst/gst.h>
+#if 0
#include <gst/interfaces/propertyprobe.h>
static void
gst_element_set_state (element, GST_STATE_NULL);
gst_object_unref (GST_OBJECT (element));
}
+#endif
int
main (int argc, char *argv[])
{
gst_init (&argc, &argv);
+#if 0
test_element ("pulsesink");
test_element ("pulsesrc");
+#endif
return 0;
}
noinst_PROGRAMS = server-alsasrc-PCMA client-PCMA
+# FIXME 0.11: ignore GValueArray warnings for now until this is sorted
+ERROR_CFLAGS=
+
server_alsasrc_PCMA_SOURCES = server-alsasrc-PCMA.c
server_alsasrc_PCMA_CFLAGS = $(GST_CFLAGS)
server_alsasrc_PCMA_LDADD = $(GST_LIBS) $(LIBM)
g_assert (res == TRUE);
/* the rtpbin element */
- rtpbin = gst_element_factory_make ("gstrtpbin", "rtpbin");
+ rtpbin = gst_element_factory_make ("rtpbin", "rtpbin");
g_assert (rtpbin);
gst_bin_add (GST_BIN (pipeline), rtpbin);
RTCP_RECV_PORT = 5003
RTCP_SEND_PORT = 5007
-#gst-launch -v gstrtpbin name=rtpbin \
+#gst-launch -v rtpbin name=rtpbin \
# udpsrc caps=$AUDIO_CAPS port=$RTP_RECV_PORT ! rtpbin.recv_rtp_sink_0 \
# rtpbin. ! rtppcmadepay ! alawdec ! audioconvert ! audioresample ! autoaudiosink \
# udpsrc port=$RTCP_RECV_PORT ! rtpbin.recv_rtcp_sink_0 \
res = gst.element_link_many(audiodepay, audiodec, audioconv, audiores, audiosink)
# the rtpbin element
-rtpbin = gst.element_factory_make('gstrtpbin', 'rtpbin')
+rtpbin = gst.element_factory_make('rtpbin', 'rtpbin')
pipeline.add(rtpbin)
/* build a pipeline equivalent to:
*
- * gst-launch -v gstrtpbin name=rtpbin \
+ * gst-launch -v rtpbin name=rtpbin \
* $AUDIO_SRC ! audioconvert ! audioresample ! $AUDIO_ENC ! $AUDIO_PAY ! rtpbin.send_rtp_sink_0 \
* rtpbin.send_rtp_src_0 ! udpsink port=5002 host=$DEST \
* rtpbin.send_rtcp_src_0 ! udpsink port=5003 host=$DEST sync=false async=false \
}
/* the rtpbin element */
- rtpbin = gst_element_factory_make ("gstrtpbin", "rtpbin");
+ rtpbin = gst_element_factory_make ("rtpbin", "rtpbin");
g_assert (rtpbin);
gst_bin_add (GST_BIN (pipeline), rtpbin);
pygst.require("0.10")
import gst
-#gst-launch -v gstrtpbin name=rtpbin audiotestsrc ! audioconvert ! alawenc ! rtppcmapay ! rtpbin.send_rtp_sink_0 \
+#gst-launch -v rtpbin name=rtpbin audiotestsrc ! audioconvert ! alawenc ! rtppcmapay ! rtpbin.send_rtp_sink_0 \
# rtpbin.send_rtp_src_0 ! udpsink port=10000 host=xxx.xxx.xxx.xxx \
# rtpbin.send_rtcp_src_0 ! udpsink port=10001 host=xxx.xxx.xxx.xxx sync=false async=false \
# udpsrc port=10002 ! rtpbin.recv_rtcp_sink_0
res = gst.element_link_many(audiosrc, audioconv, audiores, audioenc, audiopay)
# the rtpbin element
-rtpbin = gst.element_factory_make('gstrtpbin', 'rtpbin')
+rtpbin = gst.element_factory_make('rtpbin', 'rtpbin')
pipeline.add(rtpbin)
*/
#include <gst/gst.h>
-#include <gst/controller/gstcontroller.h>
#include <gst/controller/gstlfocontrolsource.h>
+#include <gst/controller/gstdirectcontrolbinding.h>
#include <stdlib.h>
{
GstElement *pipeline;
GstElement *shapewipe;
- GstController *ctrl;
- GstLFOControlSource *csource;
- GValue val = { 0, };
+ GstControlSource *cs;
GMainLoop *loop;
GstBus *bus;
gchar *pipeline_string;
}
gst_init (&argc, &argv);
- gst_controller_init (&argc, &argv);
if (argc > 2) {
border = atof (argv[2]);
shapewipe = gst_bin_get_by_name (GST_BIN (pipeline), "shape");
- if (!(ctrl = gst_controller_new (G_OBJECT (shapewipe), "position", NULL))) {
- g_print ("can't control shapewipe element\n");
- return -3;
- }
-
- csource = gst_lfo_control_source_new ();
-
- gst_controller_set_control_source (ctrl, "position",
- GST_CONTROL_SOURCE (csource));
+ cs = gst_lfo_control_source_new ();
- g_value_init (&val, G_TYPE_FLOAT);
- g_value_set_float (&val, 0.5);
- g_object_set (G_OBJECT (csource), "amplitude", &val, NULL);
- g_value_set_float (&val, 0.5);
- g_object_set (G_OBJECT (csource), "offset", &val, NULL);
- g_value_unset (&val);
+ gst_object_add_control_binding (GST_OBJECT_CAST (shapewipe),
+ gst_direct_control_binding_new (GST_OBJECT_CAST (shapewipe), "position",
+ cs));
- g_object_set (G_OBJECT (csource), "frequency", 0.25, NULL);
- g_object_set (G_OBJECT (csource), "timeshift", 500 * GST_MSECOND, NULL);
+ g_object_set (cs,
+ "amplitude", 0.5,
+ "offset", 0.5, "frequency", 0.25, "timeshift", 500 * GST_MSECOND, NULL);
- g_object_unref (csource);
+ g_object_unref (cs);
loop = g_main_loop_new (NULL, FALSE);
g_main_loop_unref (loop);
- g_object_unref (G_OBJECT (ctrl));
gst_object_unref (G_OBJECT (pipeline));
return 0;
gst_bin_add_many (GST_BIN (bin), src, audioconvert, spectrum, sink, NULL);
- caps = gst_caps_new_simple ("audio/x-raw-int",
+ caps = gst_caps_new_simple ("audio/x-raw",
"rate", G_TYPE_INT, AUDIOFREQ, NULL);
if (!gst_element_link (src, audioconvert) ||
-noinst_PROGRAMS = probe camctrl
+noinst_PROGRAMS = camctrl
camctrl_SOURCES = camctrl.c
camctrl_CFLAGS = $(GST_BASE_CFLAGS) $(GST_CONTROLLER_CFLAGS) $(GST_CFLAGS)
camctrl_LDADD = $(GST_BASE_LIBS) $(GST_CONTROLLER_LIBS) $(GST_LIBS)
-
-probe_SOURCES = probe.c
-probe_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_BASE_CFLAGS) $(GST_CFLAGS)
-probe_LDADD = $(GST_PLUGINS_BASE_LIBS) -lgstinterfaces-0.10 $(GST_BASE_LIBS) $(GST_LIBS)
-
*/
#include <gst/gst.h>
-#include <gst/controller/gstcontroller.h>
#include <gst/controller/gstinterpolationcontrolsource.h>
+#include <gst/controller/gstdirectcontrolbinding.h>
static void
event_loop (GstElement * bin)
}
static void
-set_program (GstController * ctrl, GstStructure * prog)
+set_program (GstObject * elem, GstStructure * prog)
{
const GstStructure *s;
- GstInterpolationControlSource *cs;
+ GstControlSource *cs;
GstClockTime ts, dur;
- GValue val = { 0, };
- gint v;
+ gdouble v;
const GValue *frame;
GHashTable *css;
gint i, j;
css = g_hash_table_new (g_str_hash, g_str_equal);
- g_value_init (&val, G_TYPE_INT);
-
ts = 0;
dur = gst_util_uint64_scale_int (GST_SECOND, 1, 15);
cs = g_hash_table_lookup (css, name);
if (!cs) {
cs = gst_interpolation_control_source_new ();
- gst_controller_set_control_source (ctrl, name, GST_CONTROL_SOURCE (cs));
- gst_interpolation_control_source_set_interpolation_mode (cs,
- GST_INTERPOLATE_NONE);
+ gst_object_add_control_binding (elem,
+ gst_direct_control_binding_new (elem, name, cs));
+ g_object_set (cs, "mode", GST_INTERPOLATION_MODE_NONE, NULL);
g_hash_table_insert (css, (gpointer) name, cs);
- g_object_unref (cs);
+ gst_object_unref (cs);
}
- gst_structure_get_int (s, name, &v);
- g_value_set_int (&val, v);
- gst_interpolation_control_source_set (cs, ts, &val);
- GST_DEBUG (" %s = %d", name, v);
+ gst_structure_get_double (s, name, &v);
+ gst_timed_value_control_source_set ((GstTimedValueControlSource *) cs, ts,
+ v);
+ GST_DEBUG (" %s = %lf", name, v);
}
ts += dur;
}
- g_value_unset (&val);
-
g_hash_table_unref (css);
}
GstElement *bin;
GstElement *src, *fmt, *enc, *sink;
GstCaps *caps;
- GstController *ctrl;
GstStructure *prog;
/* init gstreamer */
gst_init (&argc, &argv);
- gst_controller_init (&argc, &argv);
/* create a new bin to hold the elements */
bin = gst_pipeline_new ("camera");
return -1;
}
- /* get the controller */
- if (!(ctrl = gst_controller_new (G_OBJECT (src), "brightness", "contrast",
- "saturation", NULL))) {
- GST_WARNING ("can't control source element");
- return -1;
- }
-
/* programm a pattern of events */
#if 0
prog = gst_structure_from_string ("program"
- ", image00=(structure)\"image\\,contrast\\=0\\;\""
- ", image01=(structure)\"image\\,contrast\\=79\\;\""
- ", image02=(structure)\"image\\,contrast\\=255\\;\""
- ", image03=(structure)\"image\\,contrast\\=15\\;\";", NULL);
+ ", image00=(structure)\"image\\,contrast\\=0.0\\;\""
+ ", image01=(structure)\"image\\,contrast\\=0.3\\;\""
+ ", image02=(structure)\"image\\,contrast\\=1.0\\;\""
+ ", image03=(structure)\"image\\,contrast\\=0.05\\;\";", NULL);
#endif
#if 1
prog = gst_structure_from_string ("program"
- ", image00=(structure)\"image\\,brightness\\=255\\,contrast\\=0\\;\""
- ", image01=(structure)\"image\\,brightness\\=127\\,contrast\\=79\\;\""
- ", image02=(structure)\"image\\,brightness\\=64\\,contrast\\=255\\;\""
- ", image03=(structure)\"image\\,brightness\\=0\\,contrast\\=15\\;\";",
+ ", image00=(structure)\"image\\,brightness\\=1.0\\,contrast\\=0.0\\;\""
+ ", image01=(structure)\"image\\,brightness\\=0.5\\,contrast\\=0.3\\;\""
+ ", image02=(structure)\"image\\,brightness\\=0.25\\,contrast\\=1.0\\;\""
+ ", image03=(structure)\"image\\,brightness\\=0.0\\,contrast\\=0.05\\;\";",
NULL);
#endif
- set_program (ctrl, prog);
+ set_program (GST_OBJECT (src), prog);
g_object_set (src, "num-buffers", gst_structure_n_fields (prog), NULL);
/* prepare playback */
+++ /dev/null
-/* GStreamer
- * Copyright (C) 2009 Filippo Argiolas <filippo.argiolas@gmail.com>
- *
- * This library is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Library General Public
- * License as published by the Free Software Foundation; either
- * version 2 of the License, or (at your option) any later version.
- *
- * This library is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Library General Public License for more details.
- *
- * You should have received a copy of the GNU Library General Public
- * License along with this library; if not, write to the
- * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
- * Boston, MA 02111-1307, USA.
- */
-
-#include <stdlib.h>
-#include <gst/gst.h>
-#include <gst/interfaces/propertyprobe.h>
-
-int
-main (int argc, char *argv[])
-{
- GstElement *src, *sink;
- GstElement *bin;
- GstPropertyProbe *probe = NULL;
- const GParamSpec *pspec = NULL;
- GValueArray *array = NULL;
- gint i, ret;
- GValue *value;
- const gchar *device;
- gchar *name;
- guint flags;
-
- gst_init (&argc, &argv);
-
- bin = gst_pipeline_new ("pipeline");
- g_assert (bin);
-
- src = gst_element_factory_make ("v4l2src", "v4l2_source");
- g_assert (src);
- sink = gst_element_factory_make ("fakesink", "fake_sink");
- g_assert (sink);
-
- /* add objects to the main pipeline */
- gst_bin_add_many (GST_BIN (bin), src, sink, NULL);
- /* link the elements */
- gst_element_link_many (src, sink, NULL);
-
- /* probe devices */
- g_print ("Probing devices with propertyprobe...\n");
- probe = GST_PROPERTY_PROBE (src);
- pspec = gst_property_probe_get_property (probe, "device");
- array = gst_property_probe_probe_and_get_values (probe, pspec);
-
- if (!array) {
- g_print ("No device found\n");
- exit (1);
- }
-
- for (i = 0; i < array->n_values; i++) {
- value = g_value_array_get_nth (array, i);
- device = g_value_get_string (value);
- g_print ("Device: %s\n", device);
- g_object_set_property (G_OBJECT (src), "device", value);
- gst_element_set_state (bin, GST_STATE_READY);
- ret = gst_element_get_state (bin, NULL, NULL, 10 * GST_SECOND);
- if (ret != GST_STATE_CHANGE_SUCCESS) {
- g_print ("Couldn't set STATE_READY\n");
- continue;
- }
- g_object_get (G_OBJECT (src), "device-name", &name, NULL);
- g_print ("Name: %s\n", name);
- g_free (name);
- g_object_get (G_OBJECT (src), "flags", &flags, NULL);
- g_print ("Flags: 0x%08X\n", flags);
- gst_element_set_state (bin, GST_STATE_NULL);
- g_print ("\n");
- }
-
- exit (0);
-}
v4l2src_test_SOURCES = v4l2src-test.c
v4l2src_test_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_CFLAGS)
-v4l2src_test_LDADD = $(GST_PLUGINS_BASE_LIBS) -lgstinterfaces-$(GST_MAJORMINOR) $(GST_LIBS)
+v4l2src_test_LDADD = $(GST_PLUGINS_BASE_LIBS) \
+ -lgstvideo-$(GST_MAJORMINOR) \
+ -lgstinterfaces-$(GST_MAJORMINOR) $(GST_LIBS)
else
V4L2_TESTS =
test_oss4_SOURCES = test-oss4.c
test_oss4_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_CFLAGS)
-test_oss4_LDADD = $(GST_PLUGINS_BASE_LIBS) -lgstinterfaces-0.10 $(GST_LIBS)
+test_oss4_LDADD = $(GST_PLUGINS_BASE_LIBS) -lgstinterfaces-$(GST_MAJORMINOR) $(GST_LIBS)
test_oss4_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
else
OSS4_TESTS=
GstPad *eq_sinkpad;
gchar *uri;
-#if !GLIB_CHECK_VERSION (2, 31, 0)
- if (!g_thread_supported ())
- g_thread_init (NULL);
-#endif
-
/* command line option parsing */
ctx = g_option_context_new ("FILENAME");
g_option_context_add_group (ctx, gst_init_get_option_group ());
/* FIXME: is this racy or does decodebin2 make sure caps are always
* negotiated at this point? */
- caps = gst_pad_get_caps (new_pad);
+ caps = gst_pad_query_caps (new_pad, NULL);
g_return_if_fail (caps != NULL);
s = gst_caps_get_structure (caps, 0);
{
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ASYNC_DONE:{
- GstFormat fmt = GST_FORMAT_TIME;
-
/* only interested in async-done messages from the top-level pipeline */
if (msg->src != GST_OBJECT_CAST (info->pipe))
break;
}
/* update position */
- if (!gst_element_query_position (info->pipe, &fmt, &info->cur_pos))
+ if (!gst_element_query_position (info->pipe, GST_FORMAT_TIME,
+ &info->cur_pos))
info->cur_pos = -1;
break;
}
case GST_MESSAGE_ELEMENT:{
const GValue *val;
GdkPixbuf *pixbuf = NULL;
+ const GstStructure *structure;
/* only interested in element messages from our gdkpixbufsink */
if (msg->src != GST_OBJECT_CAST (info->sink))
break;
/* only interested in these two messages */
- if (!gst_structure_has_name (msg->structure, "preroll-pixbuf") &&
- !gst_structure_has_name (msg->structure, "pixbuf")) {
+ if (!gst_message_has_name (msg, "preroll-pixbuf") &&
+ !gst_message_has_name (msg, "pixbuf")) {
break;
}
g_print ("pixbuf\n");
- val = gst_structure_get_value (msg->structure, "pixbuf");
+ structure = gst_message_get_structure (msg);
+ val = gst_structure_get_value (structure, "pixbuf");
g_return_if_fail (val != NULL);
pixbuf = GDK_PIXBUF (g_value_dup_object (val));
seek_to (AppInfo * info, gdouble percent)
{
GstSeekFlags seek_flags;
- GstFormat fmt = GST_FORMAT_TIME;
gint64 seek_pos, dur = -1;
- if (!gst_element_query_duration (info->pipe, &fmt, &dur) || dur <= 0) {
+ if (!gst_element_query_duration (info->pipe, GST_FORMAT_TIME, &dur)
+ || dur <= 0) {
g_printerr ("Could not query duration\n");
return;
}
GOptionContext *ctx;
GError *opt_err = NULL;
-#if !GLIB_CHECK_VERSION (2, 31, 0)
- if (!g_thread_supported ())
- g_thread_init (NULL);
-#endif
-
gtk_init (&argc, &argv);
/* command line option parsing */
GOptionContext *ctx;
GError *err = NULL;
-#if !GLIB_CHECK_VERSION (2, 31, 0)
- if (!g_thread_supported ())
- g_thread_init (NULL);
-#endif
-
ctx = g_option_context_new ("");
g_option_context_add_main_entries (ctx, options, NULL);
g_option_context_add_group (ctx, gst_init_get_option_group ());
#include <gst/gst.h>
#include <gst/interfaces/tuner.h>
-#include <gst/interfaces/colorbalance.h>
-#include <gst/interfaces/videoorientation.h>
+#include <gst/video/colorbalance.h>
+#include <gst/video/videoorientation.h>
GstElement *pipeline, *source, *sink;
GMainLoop *loop;
gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PLAYING);
loop = g_main_loop_new (NULL, FALSE);
-#if !GLIB_CHECK_VERSION (2, 31, 0)
- input_thread = g_thread_create (read_user, source, TRUE, NULL);
-#else
input_thread = g_thread_try_new ("v4l2src-test", read_user, source, NULL);
-#endif
if (input_thread == NULL) {
- fprintf (stderr, "error: g_thread_create return NULL");
+ fprintf (stderr, "error: g_thread_try_new() failed");
return -1;
}
/* need to block the streaming thread while changing these properties,
* otherwise we might get random not-negotiated errors (when caps are
* changed in between upstream calling pad_alloc_buffer() and pushing
- * the processed buffer?) */
- gst_pad_set_blocked (pad, TRUE);
+ * the processed buffer?) FIXME should not be needed */
+ /* gst_pad_set_blocked (pad, TRUE); */
g_object_set (videocrop, "left", hcrop, "top", vcrop, NULL);
- gst_pad_set_blocked (pad, FALSE);
+ /* gst_pad_set_blocked (pad, FALSE); */
waited_for_block = g_timer_elapsed (timer, NULL) * (double) GST_SECOND;
/* GST_LOG ("waited: %" GST_TIME_FORMAT ", frame len: %" GST_TIME_FORMAT,
GstCaps *filter_caps = NULL;
GList *caps_list, *l;
-#if !GLIB_CHECK_VERSION (2, 31, 0)
- if (!g_thread_supported ())
- g_thread_init (NULL);
-#endif
-
/* command line option parsing */
ctx = g_option_context_new ("");
g_option_context_add_group (ctx, gst_init_get_option_group ());