DISTCHECK_CONFIGURE_FLAGS=--enable-gtk-doc
-if USE_GCONFTOOL
-GCONF_DIR = gconf
-else
-GCONF_DIR =
-endif
-
ALWAYS_SUBDIRS = \
gst sys ext \
tests \
m4 \
pkgconfig
-SUBDIRS = \
- $(ALWAYS_SUBDIRS) \
- $(GCONF_DIR)
-
-DIST_SUBDIRS = \
- $(ALWAYS_SUBDIRS) \
- gconf
+SUBDIRS = $(ALWAYS_SUBDIRS)
+DIST_SUBDIRS = $(ALWAYS_SUBDIRS)
# include before EXTRA_DIST for win32 assignment
include $(top_srcdir)/common/win32.mak
Plugins: cdio (cdiocddasrc)
URL: http://www.gnu.org/software/libcdio/
-Package: ESound
-Version: >= 0.2.12
-Plugins: esdsink
-URL: http://www.gnome.org/
-
Package: FLAC
Version: == 1.1.2
Plugins: flac (flacenc, flacdec)
URL: http://flac.sourceforge.net/
-Package: GConf
-Version: >= 2.0
-Plugins: gconfelements (gconfvideosink, gconfvideosrc, gconfaudiosink,
- gconfaudiosrc)
-URL: http://www.gnome.org/
-
Package: HAL
Version: >= 0.5.6
Plugins: halelements (halaudiosink, halaudiosrc)
dnl initialize autoconf
dnl releases only do -Wall, git and prerelease does -Werror too
dnl use a three digit version number for releases, and four for git/pre
-AC_INIT(GStreamer Good Plug-ins, 0.10.30.1,
+AC_INIT(GStreamer Good Plug-ins, 0.11.0.1,
http://bugzilla.gnome.org/enter_bug.cgi?product=GStreamer,
gst-plugins-good)
dnl our libraries and install dirs use major.minor as a version
GST_MAJORMINOR=$PACKAGE_VERSION_MAJOR.$PACKAGE_VERSION_MINOR
dnl we override it here if we need to for the release candidate of new series
-GST_MAJORMINOR=0.10
+GST_MAJORMINOR=0.11
AC_SUBST(GST_MAJORMINOR)
AG_GST_LIBTOOL_PREPARE
AM_PROG_LIBTOOL
dnl *** required versions of GStreamer stuff ***
-GST_REQ=0.10.35.1
-GSTPB_REQ=0.10.35.1
+GST_REQ=0.11.0
+GSTPB_REQ=0.11.0
dnl *** autotools stuff ****
AC_PATH_PROG(VALGRIND_PATH, valgrind, no)
AM_CONDITIONAL(HAVE_VALGRIND, test ! "x$VALGRIND_PATH" = "xno")
-dnl check for gconftool-2
-dnl this macro defines an am conditional, so it needs to be run always
-AM_GCONF_SOURCE_2
-
dnl check for documentation tools
GTK_DOC_CHECK([1.3])
AS_PATH_PYTHON([2.1])
AM_CONDITIONAL(HAVE_GTK, test "x$HAVE_GTK" = "xyes")
AM_CONDITIONAL(HAVE_GTK_X11, test "x$HAVE_GTK_X11" = "xyes")
-dnl should we install schemas ?
-translit(dnm, m, l) AM_CONDITIONAL(USE_GCONFTOOL, true)
-AG_GST_CHECK_FEATURE(GCONFTOOL, [GConf schemas], , [
- AC_PATH_PROG(GCONFTOOL, gconftool-2, no)
- if test x$GCONFTOOL = xno; then
- AC_MSG_WARN(Not installing GConf schemas)
- HAVE_GCONFTOOL="no"
- else
- HAVE_GCONFTOOL="yes"
- fi
- AC_SUBST(HAVE_GCONFTOOL)
-])
-
dnl *** set variables based on configure arguments ***
dnl set license and copyright notice
AG_GST_PKG_CHECK_MODULES(CAIRO_GOBJECT, cairo-gobject >= 1.10.0)
])
-dnl **** ESound ****
-translit(dnm, m, l) AM_CONDITIONAL(USE_ESD, true)
-AG_GST_CHECK_FEATURE(ESD, [ESounD sound daemon], esdsink, [
- AG_GST_PKG_CHECK_MODULES(ESD, esound >= 0.2.12)
- if test $HAVE_ESD = no
- then
- AM_PATH_ESD(0.2.12, HAVE_ESD="yes")
- AS_SCRUB_INCLUDE(ESD_CFLAGS)
- fi
-])
-
dnl *** FLAC ***
translit(dnm, m, l) AM_CONDITIONAL(USE_FLAC, true)
AG_GST_CHECK_FEATURE(FLAC, [FLAC lossless audio], flac, [
AG_GST_PKG_CHECK_MODULES(FLAC, flac >= 1.1.4)
])
-dnl *** GConf ***
-translit(dnm, m, l) AM_CONDITIONAL(USE_GCONF, true)
-AG_GST_CHECK_FEATURE(GCONF, [GConf libraries], gconfelements, [
- AG_GST_PKG_CHECK_MODULES(GCONF, gconf-2.0)
-])
-
dnl *** GDK pixbuf ***
translit(dnm, m, l) AM_CONDITIONAL(USE_GDK_PIXBUF, true)
AG_GST_CHECK_FEATURE(GDK_PIXBUF, [GDK pixbuf], gdkpixbuf, [
AM_CONDITIONAL(USE_CAIRO_GOBJECT, false)
AM_CONDITIONAL(USE_DIRECTSOUND, false)
AM_CONDITIONAL(USE_DV1394, false)
-AM_CONDITIONAL(USE_ESD, false)
AM_CONDITIONAL(USE_FLAC, false)
-AM_CONDITIONAL(USE_GCONF, false)
-AM_CONDITIONAL(USE_GCONFTOOL, false)
AM_CONDITIONAL(USE_GDK_PIXBUF, false)
AM_CONDITIONAL(USE_GST_V4L2, false)
AM_CONDITIONAL(USE_HAL, false)
dnl prefer internal headers to already installed ones
dnl also add builddir include for enumtypes and marshal
dnl add ERROR_CFLAGS, but overridable
+GST_CFLAGS="$GST_CFLAGS -DGST_USE_UNSTABLE_API"
GST_CXXFLAGS="-I\$(top_srcdir)/gst-libs $GST_CFLAGS $GLIB_EXTRA_CFLAGS \$(GST_OPTION_CXXFLAGS)"
GST_CFLAGS="-I\$(top_srcdir)/gst-libs $GST_CFLAGS $GLIB_EXTRA_CFLAGS \$(GST_OPTION_CFLAGS)"
AC_SUBST(GST_CFLAGS)
ext/annodex/Makefile
ext/cairo/Makefile
ext/dv/Makefile
-ext/esd/Makefile
ext/flac/Makefile
-ext/gconf/Makefile
ext/gdk_pixbuf/Makefile
ext/hal/Makefile
ext/jack/Makefile
tests/examples/v4l2/Makefile
tests/files/Makefile
tests/icles/Makefile
-gconf/Makefile
-gconf/gstreamer.schemas
common/Makefile
common/m4/Makefile
m4/Makefile
-e "s/.* PACKAGE_STRING$/#define PACKAGE_STRING \"$PACKAGE_STRING\"/" \
-e 's/.* PACKAGE_TARNAME$/#define PACKAGE_TARNAME "'$PACKAGE_TARNAME'"/' \
-e 's/.* PACKAGE_VERSION$/#define PACKAGE_VERSION "'$PACKAGE_VERSION'"/' \
- -e 's/.* PLUGINDIR$/#ifdef _DEBUG\n# define PLUGINDIR PREFIX "\\\\debug\\\\lib\\\\gstreamer-0.10"\n#else\n# define PLUGINDIR PREFIX "\\\\lib\\\\gstreamer-0.10"\n#endif/' \
+ -e 's/.* PLUGINDIR$/#ifdef _DEBUG\n# define PLUGINDIR PREFIX "\\\\debug\\\\lib\\\\gstreamer-0.11"\n#else\n# define PLUGINDIR PREFIX "\\\\lib\\\\gstreamer-0.11"\n#endif/' \
-e 's/.* USE_BINARY_REGISTRY$/#define USE_BINARY_REGISTRY/' \
-e 's/.* VERSION$/#define VERSION "'$VERSION'"/' \
-e "s/.* DEFAULT_AUDIOSINK$/#define DEFAULT_AUDIOSINK \"directsoundsink\"/" \
$(top_srcdir)/ext/cairo/gstcairooverlay.h \
$(top_srcdir)/ext/dv/gstdvdec.h \
$(top_srcdir)/ext/dv/gstdvdemux.h \
- $(top_srcdir)/ext/esd/esdsink.h \
$(top_srcdir)/ext/flac/gstflacdec.h \
$(top_srcdir)/ext/flac/gstflacenc.h \
$(top_srcdir)/ext/flac/gstflactag.h \
- $(top_srcdir)/ext/gconf/gstgconfaudiosrc.h \
- $(top_srcdir)/ext/gconf/gstgconfaudiosink.h \
- $(top_srcdir)/ext/gconf/gstgconfvideosrc.h \
- $(top_srcdir)/ext/gconf/gstgconfvideosink.h \
$(top_srcdir)/ext/gdk_pixbuf/gstgdkpixbufsink.h \
$(top_srcdir)/ext/hal/gsthalaudiosink.h \
$(top_srcdir)/ext/hal/gsthalaudiosrc.h \
<xi:include href="xml/element-equalizer-10bands.xml" />
<xi:include href="xml/element-equalizer-3bands.xml" />
<xi:include href="xml/element-equalizer-nbands.xml" />
- <xi:include href="xml/element-esdsink.xml" />
<xi:include href="xml/element-flacdec.xml" />
<xi:include href="xml/element-flacenc.xml" />
<xi:include href="xml/element-flacparse.xml" />
<xi:include href="xml/element-flvmux.xml" />
<xi:include href="xml/element-flxdec.xml" />
<xi:include href="xml/element-gamma.xml" />
- <xi:include href="xml/element-gconfaudiosrc.xml" />
- <xi:include href="xml/element-gconfaudiosink.xml" />
- <xi:include href="xml/element-gconfvideosrc.xml" />
- <xi:include href="xml/element-gconfvideosink.xml" />
<xi:include href="xml/element-gdkpixbufsink.xml" />
<xi:include href="xml/element-goom.xml" />
<xi:include href="xml/element-goom2k1.xml" />
<xi:include href="xml/plugin-efence.xml" />
<xi:include href="xml/plugin-equalizer.xml" />
<xi:include href="xml/plugin-effectv.xml" />
- <xi:include href="xml/plugin-esdsink.xml" />
<xi:include href="xml/plugin-flac.xml" />
<xi:include href="xml/plugin-flv.xml" />
<xi:include href="xml/plugin-flxdec.xml" />
- <xi:include href="xml/plugin-gconfelements.xml" />
<xi:include href="xml/plugin-gdkpixbuf.xml" />
<xi:include href="xml/plugin-goom.xml" />
<xi:include href="xml/plugin-goom2k1.xml" />
</SECTION>
<SECTION>
-<FILE>element-esdsink</FILE>
-<TITLE>esdsink</TITLE>
-GstEsdSink
-<SUBSECTION Standard>
-GstEsdSinkClass
-GST_TYPE_ESDSINK
-GST_ESDSINK
-GST_ESDSINK_CLASS
-GST_IS_ESDSINK
-GST_IS_ESDSINK_CLASS
-gst_esdsink_get_type
-</SECTION>
-
-<SECTION>
<FILE>element-flacdec</FILE>
<TITLE>flacdec</TITLE>
GstFlacDec
</SECTION>
<SECTION>
-<FILE>element-gconfaudiosrc</FILE>
-<TITLE>gconfaudiosrc</TITLE>
-GstGConfAudioSrc
-<SUBSECTION Standard>
-GstGConfAudioSrcClass
-GST_GCONF_AUDIO_SRC
-GST_IS_GCONF_AUDIO_SRC
-GST_TYPE_GCONF_AUDIO_SRC
-GST_GCONF_AUDIO_SRC_CLASS
-GST_IS_GCONF_AUDIO_SRC_CLASS
-gst_gconf_audio_src_get_type
-</SECTION>
-
-<SECTION>
-<FILE>element-gconfaudiosink</FILE>
-<TITLE>gconfaudiosink</TITLE>
-GstGConfAudioSink
-<SUBSECTION Standard>
-GstGConfAudioSinkClass
-GST_GCONF_AUDIO_SINK
-GST_IS_GCONF_AUDIO_SINK
-GST_TYPE_GCONF_AUDIO_SINK
-GST_GCONF_AUDIO_SINK_CLASS
-GST_IS_GCONF_AUDIO_SINK_CLASS
-gst_gconf_audio_sink_get_type
-</SECTION>
-
-<SECTION>
-<FILE>element-gconfvideosrc</FILE>
-<TITLE>gconfvideosrc</TITLE>
-GstGConfVideoSrc
-<SUBSECTION Standard>
-GstGConfVideoSrcClass
-GST_GCONF_VIDEO_SRC
-GST_IS_GCONF_VIDEO_SRC
-GST_TYPE_GCONF_VIDEO_SRC
-GST_GCONF_VIDEO_SRC_CLASS
-GST_IS_GCONF_VIDEO_SRC_CLASS
-gst_gconf_video_src_get_type
-</SECTION>
-
-<SECTION>
-<FILE>element-gconfvideosink</FILE>
-<TITLE>gconfvideosink</TITLE>
-GstGConfVideoSink
-<SUBSECTION Standard>
-GstGConfVideoSinkClass
-GST_GCONF_VIDEO_SINK
-GST_IS_GCONF_VIDEO_SINK
-GST_TYPE_GCONF_VIDEO_SINK
-GST_GCONF_VIDEO_SINK_CLASS
-GST_IS_GCONF_VIDEO_SINK_CLASS
-gst_gconf_video_sink_get_type
-</SECTION>
-
-<SECTION>
<FILE>element-gdkpixbufsink</FILE>
<TITLE>gdkpixbufsink</TITLE>
GstGdkPixbufSink
+++ /dev/null
-<plugin>
- <name>esdsink</name>
- <description>ESD Element Plugins</description>
- <filename>../../ext/esd/.libs/libgstesd.so</filename>
- <basename>libgstesd.so</basename>
- <version>0.10.30.1</version>
- <license>LGPL</license>
- <source>gst-plugins-good</source>
- <package>GStreamer Good Plug-ins git</package>
- <origin>Unknown package origin</origin>
- <elements>
- <element>
- <name>esdsink</name>
- <longname>Esound audio sink</longname>
- <class>Sink/Audio</class>
- <description>Plays audio to an esound server</description>
- <author>Arwed von Merkatz <v.merkatz@gmx.net></author>
- <pads>
- <caps>
- <name>sink</name>
- <direction>sink</direction>
- <presence>always</presence>
- <details>audio/x-raw-int, endianness=(int)1234, signed=(boolean)true, width=(int)16, depth=(int)16, rate=(int)[ 1, 2147483647 ], channels=(int)[ 1, 2 ]; audio/x-raw-int, signed=(boolean){ true, false }, width=(int)8, depth=(int)8, rate=(int)[ 1, 2147483647 ], channels=(int)[ 1, 2 ]</details>
- </caps>
- </pads>
- </element>
- </elements>
-</plugin>
+++ /dev/null
-<plugin>
- <name>gconfelements</name>
- <description>elements wrapping the GStreamer/GConf audio/video output settings</description>
- <filename>../../ext/gconf/.libs/libgstgconfelements.so</filename>
- <basename>libgstgconfelements.so</basename>
- <version>0.10.30.1</version>
- <license>LGPL</license>
- <source>gst-plugins-good</source>
- <package>GStreamer Good Plug-ins git</package>
- <origin>Unknown package origin</origin>
- <elements>
- <element>
- <name>gconfaudiosink</name>
- <longname>GConf audio sink</longname>
- <class>Sink/Audio</class>
- <description>Audio sink embedding the GConf-settings for audio output</description>
- <author>Jan Schmidt <thaytan@mad.scientist.com></author>
- <pads>
- </pads>
- </element>
- <element>
- <name>gconfaudiosrc</name>
- <longname>GConf audio source</longname>
- <class>Source/Audio</class>
- <description>Audio source embedding the GConf-settings for audio input</description>
- <author>GStreamer maintainers <gstreamer-devel@lists.sourceforge.net></author>
- <pads>
- </pads>
- </element>
- <element>
- <name>gconfvideosink</name>
- <longname>GConf video sink</longname>
- <class>Sink/Video</class>
- <description>Video sink embedding the GConf-settings for video output</description>
- <author>GStreamer maintainers <gstreamer-devel@lists.sourceforge.net></author>
- <pads>
- </pads>
- </element>
- <element>
- <name>gconfvideosrc</name>
- <longname>GConf video source</longname>
- <class>Source/Video</class>
- <description>Video source embedding the GConf-settings for video input</description>
- <author>GStreamer maintainers <gstreamer-devel@lists.sourceforge.net></author>
- <pads>
- </pads>
- </element>
- </elements>
-</plugin>
CAIRO_DIR =
endif
-if USE_ESD
-ESD_DIR = esd
-else
-ESD_DIR =
-endif
-
if USE_FLAC
FLAC_DIR = flac
else
FLAC_DIR =
endif
-if USE_GCONF
-GCONF_DIR = gconf
-else
-GCONF_DIR =
-endif
-
if USE_GDK_PIXBUF
GDK_PIXBUF_DIR = gdk_pixbuf
else
$(ANNODEX_DIR) \
$(CAIRO_DIR) \
$(DV1394_DIR) \
- $(ESD_DIR) \
$(FLAC_DIR) \
- $(GCONF_DIR) \
$(GDK_PIXBUF_DIR) \
$(HAL_DIR) \
$(JACK_DIR) \
annodex \
cairo \
dv \
- esd \
flac \
- gconf \
gdk_pixbuf \
hal \
jack \
enum
{
- ARG_0,
- ARG_WIDTH,
- ARG_HEIGHT,
- ARG_DRIVER,
- ARG_DITHER,
- ARG_BRIGHTNESS,
- ARG_CONTRAST,
- ARG_GAMMA,
- ARG_INVERSION,
- ARG_RANDOMVAL,
- ARG_FRAMES_DISPLAYED,
- ARG_FRAME_TIME
+ PROP_0,
+ PROP_WIDTH,
+ PROP_HEIGHT,
+ PROP_DRIVER,
+ PROP_DITHER,
+ PROP_BRIGHTNESS,
+ PROP_CONTRAST,
+ PROP_GAMMA,
+ PROP_INVERSION,
+ PROP_RANDOMVAL,
+ PROP_FRAMES_DISPLAYED,
+ PROP_FRAME_TIME
};
static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("I420"))
);
-static void gst_aasink_base_init (gpointer g_class);
-static void gst_aasink_class_init (GstAASinkClass * klass);
-static void gst_aasink_init (GstAASink * aasink);
-
static gboolean gst_aasink_setcaps (GstBaseSink * pad, GstCaps * caps);
static void gst_aasink_get_times (GstBaseSink * sink, GstBuffer * buffer,
GstClockTime * start, GstClockTime * end);
static GstStateChangeReturn gst_aasink_change_state (GstElement * element,
GstStateChange transition);
-static GstElementClass *parent_class = NULL;
static guint gst_aasink_signals[LAST_SIGNAL] = { 0 };
-GType
-gst_aasink_get_type (void)
-{
- static GType aasink_type = 0;
-
- if (!aasink_type) {
- static const GTypeInfo aasink_info = {
- sizeof (GstAASinkClass),
- gst_aasink_base_init,
- NULL,
- (GClassInitFunc) gst_aasink_class_init,
- NULL,
- NULL,
- sizeof (GstAASink),
- 0,
- (GInstanceInitFunc) gst_aasink_init,
- };
-
- aasink_type =
- g_type_register_static (GST_TYPE_BASE_SINK, "GstAASink", &aasink_info,
- 0);
- }
- return aasink_type;
-}
+#define gst_aasink_parent_class parent_class
+G_DEFINE_TYPE (GstAASink, gst_aasink, GST_TYPE_BASE_SINK);
#define GST_TYPE_AADRIVERS (gst_aasink_drivers_get_type())
static GType
}
static void
-gst_aasink_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&sink_template));
- gst_element_class_set_details_simple (element_class, "ASCII art video sink",
- "Sink/Video",
- "An ASCII art videosink", "Wim Taymans <wim.taymans@chello.be>");
-}
-
-static void
gst_aasink_class_init (GstAASinkClass * klass)
{
GObjectClass *gobject_class;
gstelement_class = (GstElementClass *) klass;
gstbasesink_class = (GstBaseSinkClass *) klass;
- parent_class = g_type_class_peek_parent (klass);
-
gobject_class->set_property = gst_aasink_set_property;
gobject_class->get_property = gst_aasink_get_property;
/* FIXME: add long property descriptions */
- g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_WIDTH,
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_WIDTH,
g_param_spec_int ("width", "width", "width", G_MININT, G_MAXINT, 0,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
- g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_HEIGHT,
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_HEIGHT,
g_param_spec_int ("height", "height", "height", G_MININT, G_MAXINT, 0,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
- g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_DRIVER,
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_DRIVER,
g_param_spec_enum ("driver", "driver", "driver", GST_TYPE_AADRIVERS, 0,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
- g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_DITHER,
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_DITHER,
g_param_spec_enum ("dither", "dither", "dither", GST_TYPE_AADITHER, 0,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
- g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_BRIGHTNESS,
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_BRIGHTNESS,
g_param_spec_int ("brightness", "brightness", "brightness", G_MININT,
G_MAXINT, 0, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
- g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_CONTRAST,
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_CONTRAST,
g_param_spec_int ("contrast", "contrast", "contrast", G_MININT, G_MAXINT,
0, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
- g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_GAMMA,
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_GAMMA,
g_param_spec_float ("gamma", "gamma", "gamma", 0.0, 5.0, 1.0,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
- g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_INVERSION,
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_INVERSION,
g_param_spec_boolean ("inversion", "inversion", "inversion", TRUE,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
- g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_RANDOMVAL,
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_RANDOMVAL,
g_param_spec_int ("randomval", "randomval", "randomval", G_MININT,
G_MAXINT, 0, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
- g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_FRAMES_DISPLAYED,
- g_param_spec_int ("frames-displayed", "frames displayed",
- "frames displayed", G_MININT, G_MAXINT, 0,
+ g_object_class_install_property (G_OBJECT_CLASS (klass),
+ PROP_FRAMES_DISPLAYED, g_param_spec_int ("frames-displayed",
+ "frames displayed", "frames displayed", G_MININT, G_MAXINT, 0,
G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
- g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_FRAME_TIME,
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_FRAME_TIME,
g_param_spec_int ("frame-time", "frame time", "frame time", G_MININT,
G_MAXINT, 0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
G_STRUCT_OFFSET (GstAASinkClass, have_size), NULL, NULL,
gst_marshal_VOID__INT_INT, G_TYPE_NONE, 2, G_TYPE_UINT, G_TYPE_UINT);
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "ASCII art video sink", "Sink/Video", "An ASCII art videosink",
+ "Wim Taymans <wim.taymans@chello.be>");
+
gstelement_class->change_state = GST_DEBUG_FUNCPTR (gst_aasink_change_state);
gstbasesink_class->set_caps = GST_DEBUG_FUNCPTR (gst_aasink_setcaps);
GstClockTime * start, GstClockTime * end)
{
*start = GST_BUFFER_TIMESTAMP (buffer);
- *end = *start + GST_BUFFER_DURATION (buffer);
+ if (GST_BUFFER_DURATION_IS_VALID (buffer))
+ *end = *start + GST_BUFFER_DURATION (buffer);
}
static GstFlowReturn
gst_aasink_render (GstBaseSink * basesink, GstBuffer * buffer)
{
GstAASink *aasink;
+ guint8 *data;
aasink = GST_AASINK (basesink);
GST_DEBUG ("render");
- gst_aasink_scale (aasink, GST_BUFFER_DATA (buffer), /* src */
+ data = gst_buffer_map (buffer, NULL, NULL, GST_MAP_READ);
+ gst_aasink_scale (aasink, data, /* src */
aa_image (aasink->context), /* dest */
aasink->width, /* sw */
aasink->height, /* sh */
0, 0, aa_imgwidth (aasink->context), aa_imgheight (aasink->context));
aa_flush (aasink->context);
aa_getevent (aasink->context, FALSE);
+ gst_buffer_unmap (buffer, data, -1);
return GST_FLOW_OK;
}
aasink = GST_AASINK (object);
switch (prop_id) {
- case ARG_WIDTH:
+ case PROP_WIDTH:
aasink->ascii_surf.width = g_value_get_int (value);
break;
- case ARG_HEIGHT:
+ case PROP_HEIGHT:
aasink->ascii_surf.height = g_value_get_int (value);
break;
- case ARG_DRIVER:{
+ case PROP_DRIVER:{
aasink->aa_driver = g_value_get_enum (value);
break;
}
- case ARG_DITHER:{
+ case PROP_DITHER:{
aasink->ascii_parms.dither = g_value_get_enum (value);
break;
}
- case ARG_BRIGHTNESS:{
+ case PROP_BRIGHTNESS:{
aasink->ascii_parms.bright = g_value_get_int (value);
break;
}
- case ARG_CONTRAST:{
+ case PROP_CONTRAST:{
aasink->ascii_parms.contrast = g_value_get_int (value);
break;
}
- case ARG_GAMMA:{
+ case PROP_GAMMA:{
aasink->ascii_parms.gamma = g_value_get_float (value);
break;
}
- case ARG_INVERSION:{
+ case PROP_INVERSION:{
aasink->ascii_parms.inversion = g_value_get_boolean (value);
break;
}
- case ARG_RANDOMVAL:{
+ case PROP_RANDOMVAL:{
aasink->ascii_parms.randomval = g_value_get_int (value);
break;
}
aasink = GST_AASINK (object);
switch (prop_id) {
- case ARG_WIDTH:{
+ case PROP_WIDTH:{
g_value_set_int (value, aasink->ascii_surf.width);
break;
}
- case ARG_HEIGHT:{
+ case PROP_HEIGHT:{
g_value_set_int (value, aasink->ascii_surf.height);
break;
}
- case ARG_DRIVER:{
+ case PROP_DRIVER:{
g_value_set_enum (value, aasink->aa_driver);
break;
}
- case ARG_DITHER:{
+ case PROP_DITHER:{
g_value_set_enum (value, aasink->ascii_parms.dither);
break;
}
- case ARG_BRIGHTNESS:{
+ case PROP_BRIGHTNESS:{
g_value_set_int (value, aasink->ascii_parms.bright);
break;
}
- case ARG_CONTRAST:{
+ case PROP_CONTRAST:{
g_value_set_int (value, aasink->ascii_parms.contrast);
break;
}
- case ARG_GAMMA:{
+ case PROP_GAMMA:{
g_value_set_float (value, aasink->ascii_parms.gamma);
break;
}
- case ARG_INVERSION:{
+ case PROP_INVERSION:{
g_value_set_boolean (value, aasink->ascii_parms.inversion);
break;
}
- case ARG_RANDOMVAL:{
+ case PROP_RANDOMVAL:{
g_value_set_int (value, aasink->ascii_parms.randomval);
break;
}
- case ARG_FRAMES_DISPLAYED:{
+ case PROP_FRAMES_DISPLAYED:{
g_value_set_int (value, aasink->frames_displayed);
break;
}
- case ARG_FRAME_TIME:{
+ case PROP_FRAME_TIME:{
g_value_set_int (value, aasink->frame_time / 1000000);
break;
}
+++ /dev/null
-plugin_LTLIBRARIES = libgstesd.la
-
-libgstesd_la_SOURCES = esdsink.c gstesd.c
-libgstesd_la_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_BASE_CFLAGS) $(GST_CFLAGS) $(ESD_CFLAGS)
-libgstesd_la_LIBADD = \
- $(GST_PLUGINS_BASE_LIBS) -lgstaudio-$(GST_MAJORMINOR) \
- $(GST_BASE_LIBS) \
- $(GST_LIBS) \
- $(ESD_LIBS)
-libgstesd_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
-libgstesd_la_LIBTOOLFLAGS = --tag=disable-static
-
-noinst_HEADERS = esdsink.h esdmon.h
-EXTRA_DIST =
+++ /dev/null
-/* GStreamer
- * Copyright (C) <2001,2002> Richard Boulton <richard-gst@tartarus.org>
- *
- * Based on example.c:
- * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
- *
- * This library is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Library General Public
- * License as published by the Free Software Foundation; either
- * version 2 of the License, or (at your option) any later version.
- *
- * This library is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Library General Public License for more details.
- *
- * You should have received a copy of the GNU Library General Public
- * License along with this library; if not, write to the
- * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
- * Boston, MA 02111-1307, USA.
- */
-/**
- * SECTION:element-esdmon
- * @see_also: #GstAlsaSrc, #GstAutoAudioSrc
- *
- * This element records sound from an already-running Enlightened Sound Daemon
- * (ESound Daemon, esd). Note that a sound daemon will never be auto-spawned
- * through this element (regardless of the system configuration), since this
- * is actively prevented by the element. If you must use esd, you need to
- * make sure it is started automatically with your session or otherwise.
- *
- * TODO: insert some comments about how sucky esd is and that all the cool
- * kids use pulseaudio or whatever these days.
- *
- * <refsect2>
- * <title>Example launch line</title>
- * |[
- * gst-launch esdmon ! audioconvert ! waveenc ! filesink location=record.wav
- * ]| Record from audioinput
- * </refsect2>
- */
-
-#ifdef HAVE_CONFIG_H
-#include "config.h"
-#endif
-#include "esdmon.h"
-#include <esd.h>
-#include <unistd.h>
-
-/* Signals and args */
-enum
-{
- /* FILL ME */
- LAST_SIGNAL
-};
-
-enum
-{
- ARG_0,
- ARG_DEPTH,
- ARG_BYTESPERREAD,
- ARG_CUROFFSET,
- ARG_CHANNELS,
- ARG_RATE,
- ARG_HOST
-};
-
-static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src",
- GST_PAD_SRC,
- GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("audio/x-raw-int, "
- "endianness = (int) " G_STRINGIFY (G_BYTE_ORDER) ", "
- "signed = (boolean) TRUE, "
- "width = (int) 16, "
- "depth = (int) 16, "
- "rate = [ 8000, 96000 ], "
- "channels = [ 1, 2 ]; "
- "audio/x-raw-int, "
- "signed = (boolean) FALSE, "
- "width = (int) 8, "
- "depth = (int) 8, " "rate = [ 8000, 96000 ], " "channels = [ 1, 2 ]")
- );
-
-static void gst_esdmon_base_init (gpointer g_class);
-static void gst_esdmon_class_init (gpointer g_class, gpointer class_data);
-static void gst_esdmon_init (GTypeInstance * instance, gpointer g_class);
-
-static gboolean gst_esdmon_open_audio (GstEsdmon * src);
-static void gst_esdmon_close_audio (GstEsdmon * src);
-static GstStateChangeReturn gst_esdmon_change_state (GstElement * element,
- GstStateChange transition);
-static gboolean gst_esdmon_sync_parms (GstEsdmon * esdmon);
-
-static GstData *gst_esdmon_get (GstPad * pad);
-
-static void gst_esdmon_set_property (GObject * object, guint prop_id,
- const GValue * value, GParamSpec * pspec);
-static void gst_esdmon_get_property (GObject * object, guint prop_id,
- GValue * value, GParamSpec * pspec);
-
-#define GST_TYPE_ESDMON_DEPTHS (gst_esdmon_depths_get_type())
-static GType
-gst_esdmon_depths_get_type (void)
-{
- static GType esdmon_depths_type = 0;
- static const GEnumValue esdmon_depths[] = {
- {8, "8 Bits", "8"},
- {16, "16 Bits", "16"},
- {0, NULL, NULL},
- };
-
- if (!esdmon_depths_type) {
- esdmon_depths_type =
- g_enum_register_static ("GstEsdmonDepths", esdmon_depths);
- }
- return esdmon_depths_type;
-}
-
-#define GST_TYPE_ESDMON_CHANNELS (gst_esdmon_channels_get_type())
-static GType
-gst_esdmon_channels_get_type (void)
-{
- static GType esdmon_channels_type = 0;
- static const GEnumValue esdmon_channels[] = {
- {1, "Mono", "mono"},
- {2, "Stereo", "stereo"},
- {0, NULL, NULL},
- };
-
- if (!esdmon_channels_type) {
- esdmon_channels_type =
- g_enum_register_static ("GstEsdmonChannels", esdmon_channels);
- }
- return esdmon_channels_type;
-}
-
-
-static GstElementClass *parent_class = NULL;
-
-/*static guint gst_esdmon_signals[LAST_SIGNAL] = { 0 }; */
-
-GType
-gst_esdmon_get_type (void)
-{
- static GType esdmon_type = 0;
-
- if (!esdmon_type) {
- static const GTypeInfo esdmon_info = {
- sizeof (GstEsdmonClass),
- gst_esdmon_base_init,
- NULL,
- gst_esdmon_class_init,
- NULL,
- NULL,
- sizeof (GstEsdmon),
- 0,
- gst_esdmon_init,
- };
-
- esdmon_type =
- g_type_register_static (GST_TYPE_ELEMENT, "GstEsdmon", &esdmon_info, 0);
- }
- return esdmon_type;
-}
-
-static void
-gst_esdmon_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&src_factory));
- gst_element_class_set_details_simple (element_class, "Esound audio monitor",
- "Source/Audio",
- "Monitors audio from an esound server",
- "Richard Boulton <richard-gst@tartarus.org>");
-}
-
-static void
-gst_esdmon_class_init (gpointer g_class, gpointer class_data)
-{
- GObjectClass *gobject_class = G_OBJECT_CLASS (g_class);
- GstElementClass *gstelement_class = GST_ELEMENT_CLASS (g_class);
-
- parent_class = g_type_class_peek_parent (g_class);
-
- /* FIXME: add long property descriptions */
- g_object_class_install_property (gobject_class, ARG_BYTESPERREAD,
- g_param_spec_ulong ("bytes-per-read", "bytes per read", "bytes per read",
- 0, G_MAXULONG, 0, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
- g_object_class_install_property (gobject_class, ARG_CUROFFSET,
- g_param_spec_ulong ("curoffset", "curoffset", "curoffset",
- 0, G_MAXULONG, 0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
- g_object_class_install_property (gobject_class, ARG_DEPTH,
- g_param_spec_enum ("depth", "depth", "depth", GST_TYPE_ESDMON_DEPTHS,
- 16, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
- g_object_class_install_property (gobject_class, ARG_CHANNELS,
- g_param_spec_enum ("channels", "channels", "channels",
- GST_TYPE_ESDMON_CHANNELS, 2,
- G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
- g_object_class_install_property (gobject_class, ARG_RATE,
- g_param_spec_int ("frequency", "frequency", "frequency",
- G_MININT, G_MAXINT, 0, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
- g_object_class_install_property (gobject_class, ARG_HOST,
- g_param_spec_string ("host", "host", "host", NULL,
- G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
-
- gobject_class->set_property = gst_esdmon_set_property;
- gobject_class->get_property = gst_esdmon_get_property;
-
- gstelement_class->change_state = gst_esdmon_change_state;
-}
-
-static void
-gst_esdmon_init (GTypeInstance * instance, gpointer g_class)
-{
- GstEsdmon *esdmon = GST_ESDMON (instance);
-
- esdmon->srcpad =
- gst_pad_new_from_template (gst_element_class_get_pad_template
- (GST_ELEMENT_GET_CLASS (esdmon), "src"), "src");
- gst_pad_set_get_function (esdmon->srcpad, gst_esdmon_get);
- gst_pad_use_explicit_caps (esdmon->srcpad);
- gst_element_add_pad (GST_ELEMENT (esdmon), esdmon->srcpad);
-
- esdmon->fd = -1;
- /* FIXME: get default from somewhere better than just putting them inline. */
- esdmon->depth = 16;
- esdmon->channels = 2;
- esdmon->frequency = 44100;
- esdmon->host = NULL;
- esdmon->bytes_per_read = 4096;
- esdmon->curoffset = 0;
- esdmon->basetime = 0;
- esdmon->samples_since_basetime = 0;
-}
-
-static gboolean
-gst_esdmon_sync_parms (GstEsdmon * esdmon)
-{
- g_return_val_if_fail (esdmon != NULL, FALSE);
- g_return_val_if_fail (GST_IS_ESDMON (esdmon), FALSE);
-
- if (esdmon->fd == -1)
- return TRUE;
-
- /* Need to set fd to use new parameters: only way to do this is to reopen. */
- gst_esdmon_close_audio (esdmon);
- return gst_esdmon_open_audio (esdmon);
-}
-
-static GstData *
-gst_esdmon_get (GstPad * pad)
-{
- GstEsdmon *esdmon;
- GstBuffer *buf;
- glong readbytes;
- glong readsamples;
-
- g_return_val_if_fail (pad != NULL, NULL);
- esdmon = GST_ESDMON (gst_pad_get_parent (pad));
-
- GST_DEBUG ("attempting to read something from esdmon");
-
- buf = gst_buffer_new ();
- g_return_val_if_fail (buf, NULL);
-
- GST_BUFFER_DATA (buf) = (gpointer) g_malloc (esdmon->bytes_per_read);
-
- readbytes = read (esdmon->fd, GST_BUFFER_DATA (buf), esdmon->bytes_per_read);
-
- if (readbytes == 0) {
- gst_element_set_eos (GST_ELEMENT (esdmon));
- return NULL;
- }
- if (!GST_PAD_CAPS (pad)) {
- GstCaps *caps = gst_caps_new_simple ("audio/x-raw-int",
- "endianness", G_TYPE_INT, G_BYTE_ORDER,
- "signed", G_TYPE_BOOLEAN, esdmon->depth == 8 ? FALSE : TRUE,
- "width", G_TYPE_INT, esdmon->depth,
- "depth", G_TYPE_INT, esdmon->depth,
- "rate", G_TYPE_INT, esdmon->frequency,
- "channels", G_TYPE_INT, esdmon->channels,
- NULL);
-
- /* set caps on src pad */
- if (gst_pad_set_explicit_caps (esdmon->srcpad, caps) <= 0) {
- GST_ELEMENT_ERROR (esdmon, CORE, NEGOTIATION, (NULL), (NULL));
- gst_caps_free (caps);
- return NULL;
- }
- gst_caps_free (caps);
- }
-
- GST_BUFFER_SIZE (buf) = readbytes;
- GST_BUFFER_OFFSET (buf) = esdmon->curoffset;
- GST_BUFFER_TIMESTAMP (buf) = esdmon->basetime +
- esdmon->samples_since_basetime * GST_SECOND / esdmon->frequency;
-
- esdmon->curoffset += readbytes;
- readsamples = readbytes / esdmon->channels;
- if (esdmon->depth == 16)
- readsamples /= 2;
- esdmon->samples_since_basetime += readsamples;
-
- GST_DEBUG ("pushed buffer from esdmon of %ld bytes, timestamp %"
- G_GINT64_FORMAT, readbytes, GST_BUFFER_TIMESTAMP (buf));
- gst_object_unref (esdmon);
- return GST_DATA (buf);
-}
-
-static void
-gst_esdmon_set_property (GObject * object, guint prop_id, const GValue * value,
- GParamSpec * pspec)
-{
- GstEsdmon *esdmon;
-
- g_return_if_fail (GST_IS_ESDMON (object));
- esdmon = GST_ESDMON (object);
-
- switch (prop_id) {
- case ARG_BYTESPERREAD:
- esdmon->bytes_per_read = g_value_get_ulong (value);
- /* No need to sync params - will just happen on next read. */
- break;
- case ARG_DEPTH:
- esdmon->depth = g_value_get_enum (value);
- gst_esdmon_sync_parms (esdmon);
- break;
- case ARG_CHANNELS:
- esdmon->channels = g_value_get_enum (value);
- gst_esdmon_sync_parms (esdmon);
- break;
- case ARG_RATE:
- /* Preserve the timestamps */
- esdmon->basetime =
- esdmon->samples_since_basetime * GST_SECOND / esdmon->frequency;
- esdmon->samples_since_basetime = 0;
-
- /* Set the new frequency */
- esdmon->frequency = g_value_get_int (value);
- gst_esdmon_sync_parms (esdmon);
- break;
- case ARG_HOST:
- if (esdmon->host != NULL)
- g_free (esdmon->host);
- if (g_value_get_string (value) == NULL)
- esdmon->host = NULL;
- else
- esdmon->host = g_strdup (g_value_get_string (value));
- break;
- default:
- break;
- }
-}
-
-static void
-gst_esdmon_get_property (GObject * object, guint prop_id, GValue * value,
- GParamSpec * pspec)
-{
- GstEsdmon *esdmon;
-
- g_return_if_fail (GST_IS_ESDMON (object));
- esdmon = GST_ESDMON (object);
-
- switch (prop_id) {
- case ARG_BYTESPERREAD:
- g_value_set_ulong (value, esdmon->bytes_per_read);
- break;
- case ARG_CUROFFSET:
- g_value_set_ulong (value, esdmon->curoffset);
- break;
- case ARG_DEPTH:
- g_value_set_enum (value, esdmon->depth);
- break;
- case ARG_CHANNELS:
- g_value_set_enum (value, esdmon->channels);
- break;
- case ARG_RATE:
- g_value_set_int (value, esdmon->frequency);
- break;
- case ARG_HOST:
- g_value_set_string (value, esdmon->host);
- break;
- default:
- G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
- break;
- }
-}
-
-static gboolean
-gst_esdmon_open_audio (GstEsdmon * src)
-{
- /* Name used by esound for this connection. */
- const char connname[] = "GStreamer";
-
- /* Bitmap describing audio format. */
- esd_format_t esdformat = ESD_STREAM | ESD_PLAY;
-
- g_return_val_if_fail (src->fd == -1, FALSE);
-
- if (src->depth == 16)
- esdformat |= ESD_BITS16;
- else if (src->depth == 8)
- esdformat |= ESD_BITS8;
- else {
- GST_DEBUG ("esdmon: invalid bit depth (%d)", src->depth);
- return FALSE;
- }
-
- if (src->channels == 2)
- esdformat |= ESD_STEREO;
- else if (src->channels == 1)
- esdformat |= ESD_MONO;
- else {
- GST_DEBUG ("esdmon: invalid number of channels (%d)", src->channels);
- return FALSE;
- }
-
- GST_DEBUG ("esdmon: attempting to open connection to esound server");
- src->fd = esd_monitor_stream (esdformat, src->frequency, src->host, connname);
- if (src->fd < 0) {
- GST_DEBUG ("esdmon: can't open connection to esound server");
- return FALSE;
- }
-
- GST_OBJECT_FLAG_SET (src, GST_ESDMON_OPEN);
-
- return TRUE;
-}
-
-static void
-gst_esdmon_close_audio (GstEsdmon * src)
-{
- if (src->fd < 0)
- return;
-
- close (src->fd);
- src->fd = -1;
-
- GST_OBJECT_FLAG_UNSET (src, GST_ESDMON_OPEN);
-
- GST_DEBUG ("esdmon: closed sound device");
-}
-
-static GstStateChangeReturn
-gst_esdmon_change_state (GstElement * element, GstStateChange transition)
-{
- g_return_val_if_fail (GST_IS_ESDMON (element), FALSE);
-
- /* if going down into NULL state, close the fd if it's open */
- if (GST_STATE_PENDING (element) == GST_STATE_NULL) {
- if (GST_OBJECT_FLAG_IS_SET (element, GST_ESDMON_OPEN))
- gst_esdmon_close_audio (GST_ESDMON (element));
- /* otherwise (READY or higher) we need to open the fd */
- } else {
- if (!GST_OBJECT_FLAG_IS_SET (element, GST_ESDMON_OPEN)) {
- if (!gst_esdmon_open_audio (GST_ESDMON (element)))
- return GST_STATE_CHANGE_FAILURE;
- }
- }
-
- if (GST_ELEMENT_CLASS (parent_class)->change_state)
- return GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
- return GST_STATE_CHANGE_SUCCESS;
-}
+++ /dev/null
-/* GStreamer
- * Copyright (C) <2001,2002> Richard Boulton <richard-gst@tartarus.org>
- *
- * Based on example.c:
- * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
- *
- * This library is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Library General Public
- * License as published by the Free Software Foundation; either
- * version 2 of the License, or (at your option) any later version.
- *
- * This library is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Library General Public License for more details.
- *
- * You should have received a copy of the GNU Library General Public
- * License along with this library; if not, write to the
- * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
- * Boston, MA 02111-1307, USA.
- */
-
-#ifndef __GST_ESDMON_H__
-#define __GST_ESDMON_H__
-
-#include <gst/gst.h>
-
-G_BEGIN_DECLS
-
-#define GST_TYPE_ESDMON \
- (gst_esdmon_get_type())
-#define GST_ESDMON(obj) \
- (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_ESDMON,GstEsdmon))
-#define GST_ESDMON_CLASS(klass) \
- (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_ESDMON,GstEsdmonClass))
-#define GST_IS_ESDMON(obj) \
- (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_ESDMON))
-#define GST_IS_ESDMON_CLASS(klass) \
- (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_ESDMON))
-
-typedef enum {
- GST_ESDMON_OPEN = (GST_ELEMENT_FLAG_LAST << 0),
- GST_ESDMON_FLAG_LAST = (GST_ELEMENT_FLAG_LAST << 2)
-} GstEsdSrcFlags;
-
-typedef struct _GstEsdmon GstEsdmon;
-typedef struct _GstEsdmonClass GstEsdmonClass;
-
-struct _GstEsdmon {
- GstElement element;
-
- GstPad *srcpad;
-
- gchar* host;
-
- int fd;
-
- gint depth;
- gint channels;
- gint frequency;
-
- guint64 basetime;
- guint64 samples_since_basetime;
- guint64 curoffset;
- guint64 bytes_per_read;
-};
-
-struct _GstEsdmonClass {
- GstElementClass parent_class;
-};
-
-GType gst_esdmon_get_type (void);
-
-G_END_DECLS
-
-#endif /* __GST_ESDMON_H__ */
-
+++ /dev/null
-/* GStreamer
- * Copyright (C) <2005> Arwed v. Merkatz <v.merkatz@gmx.net>
- *
- * Roughly based on the gstreamer 0.8 esdsink plugin:
- * Copyright (C) <2001> Richard Boulton <richard-gst@tartarus.org>
- *
- * esdsink.c: an EsounD audio sink
- *
- * This library is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Library General Public
- * License as published by the Free Software Foundation; either
- * version 2 of the License, or (at your option) any later version.
- *
- * This library is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Library General Public License for more details.
- *
- * You should have received a copy of the GNU Library General Public
- * License along with this library; if not, write to the
- * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
- * Boston, MA 02111-1307, USA.
- */
-
-/**
- * SECTION:element-esdsink
- * @see_also: #GstAlsaSink, #GstAutoAudioSink
- *
- * This element outputs sound to an already-running Enlightened Sound Daemon
- * (ESound Daemon, esd). Note that a sound daemon will never be auto-spawned
- * through this element (regardless of the system configuration), since this
- * is actively prevented by the element. If you must use esd, you need to
- * make sure it is started automatically with your session or otherwise.
- *
- * TODO: insert some comments about how sucky esd is and that all the cool
- * kids use pulseaudio or whatever these days.
- *
- * <refsect2>
- * <title>Example launch line</title>
- * |[
- * gst-launch -v filesrc location=foo.ogg ! decodebin ! audioconvert ! audioresample ! esdsink
- * ]| play an Ogg/Vorbis audio file via esd
- * </refsect2>
- */
-
-#ifdef HAVE_CONFIG_H
-#include "config.h"
-#endif
-
-#include "esdsink.h"
-#include <esd.h>
-#include <unistd.h>
-#include <errno.h>
-
-#include <gst/gst-i18n-plugin.h>
-
-/* wtay: from my esd.h (debian unstable libesd0-dev 0.2.36-3) */
-#ifndef ESD_MAX_WRITE_SIZE
-#define ESD_MAX_WRITE_SIZE (21 * 4096)
-#endif
-
-GST_DEBUG_CATEGORY_EXTERN (esd_debug);
-#define GST_CAT_DEFAULT esd_debug
-
-enum
-{
- PROP_0,
- PROP_HOST
-};
-
-static GstStaticPadTemplate sink_factory = GST_STATIC_PAD_TEMPLATE ("sink",
- GST_PAD_SINK,
- GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("audio/x-raw-int, "
- "endianness = (int) BYTE_ORDER, "
- "signed = (boolean) TRUE, "
- "width = (int) 16, "
- "depth = (int) 16, "
- "rate = (int) [ 1, MAX ], "
- "channels = (int) [ 1, 2 ]; "
- "audio/x-raw-int, "
- "signed = (boolean) { true, false }, "
- "width = (int) 8, "
- "depth = (int) 8, "
- "rate = (int) [ 1, MAX ], " "channels = (int) [ 1, 2 ]")
- );
-
-static void gst_esdsink_finalize (GObject * object);
-
-static GstCaps *gst_esdsink_getcaps (GstBaseSink * bsink);
-
-static gboolean gst_esdsink_open (GstAudioSink * asink);
-static gboolean gst_esdsink_close (GstAudioSink * asink);
-static gboolean gst_esdsink_prepare (GstAudioSink * asink,
- GstRingBufferSpec * spec);
-static gboolean gst_esdsink_unprepare (GstAudioSink * asink);
-static guint gst_esdsink_write (GstAudioSink * asink, gpointer data,
- guint length);
-static guint gst_esdsink_delay (GstAudioSink * asink);
-static void gst_esdsink_reset (GstAudioSink * asink);
-
-static void gst_esdsink_set_property (GObject * object, guint prop_id,
- const GValue * value, GParamSpec * pspec);
-static void gst_esdsink_get_property (GObject * object, guint prop_id,
- GValue * value, GParamSpec * pspec);
-
-GST_BOILERPLATE (GstEsdSink, gst_esdsink, GstAudioSink, GST_TYPE_AUDIO_SINK);
-
-static void
-gst_esdsink_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&sink_factory));
- gst_element_class_set_details_simple (element_class, "Esound audio sink",
- "Sink/Audio",
- "Plays audio to an esound server",
- "Arwed von Merkatz <v.merkatz@gmx.net>");
-}
-
-static void
-gst_esdsink_class_init (GstEsdSinkClass * klass)
-{
- GObjectClass *gobject_class;
- GstBaseSinkClass *gstbasesink_class;
- GstAudioSinkClass *gstaudiosink_class;
-
- gobject_class = (GObjectClass *) klass;
- gstbasesink_class = (GstBaseSinkClass *) klass;
- gstaudiosink_class = (GstAudioSinkClass *) klass;
-
- parent_class = g_type_class_peek_parent (klass);
-
- gobject_class->finalize = gst_esdsink_finalize;
-
- gstbasesink_class->get_caps = GST_DEBUG_FUNCPTR (gst_esdsink_getcaps);
-
- gstaudiosink_class->open = GST_DEBUG_FUNCPTR (gst_esdsink_open);
- gstaudiosink_class->close = GST_DEBUG_FUNCPTR (gst_esdsink_close);
- gstaudiosink_class->prepare = GST_DEBUG_FUNCPTR (gst_esdsink_prepare);
- gstaudiosink_class->unprepare = GST_DEBUG_FUNCPTR (gst_esdsink_unprepare);
- gstaudiosink_class->write = GST_DEBUG_FUNCPTR (gst_esdsink_write);
- gstaudiosink_class->delay = GST_DEBUG_FUNCPTR (gst_esdsink_delay);
- gstaudiosink_class->reset = GST_DEBUG_FUNCPTR (gst_esdsink_reset);
-
- gobject_class->set_property = gst_esdsink_set_property;
- gobject_class->get_property = gst_esdsink_get_property;
-
- /* default value is filled in the _init method */
- g_object_class_install_property (gobject_class, PROP_HOST,
- g_param_spec_string ("host", "Host",
- "The host running the esound daemon", NULL,
- G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
-}
-
-static void
-gst_esdsink_init (GstEsdSink * esdsink, GstEsdSinkClass * klass)
-{
- esdsink->fd = -1;
- esdsink->ctrl_fd = -1;
- esdsink->host = g_strdup (g_getenv ("ESPEAKER"));
-}
-
-static void
-gst_esdsink_finalize (GObject * object)
-{
- GstEsdSink *esdsink = GST_ESDSINK (object);
-
- gst_caps_replace (&esdsink->cur_caps, NULL);
- g_free (esdsink->host);
-
- G_OBJECT_CLASS (parent_class)->finalize (object);
-}
-
-static GstCaps *
-gst_esdsink_getcaps (GstBaseSink * bsink)
-{
- GstEsdSink *esdsink;
-
- esdsink = GST_ESDSINK (bsink);
-
- /* no fd, we're done with the template caps */
- if (esdsink->ctrl_fd < 0 || esdsink->cur_caps == NULL) {
- GST_LOG_OBJECT (esdsink, "getcaps called, returning template caps");
- return NULL;
- }
-
- GST_LOG_OBJECT (esdsink, "returning %" GST_PTR_FORMAT, esdsink->cur_caps);
-
- return gst_caps_ref (esdsink->cur_caps);
-}
-
-static gboolean
-gst_esdsink_open (GstAudioSink * asink)
-{
- esd_server_info_t *server_info;
- GstPadTemplate *pad_template;
- GstEsdSink *esdsink;
- gchar *saved_env;
- gint i;
-
- esdsink = GST_ESDSINK (asink);
-
- GST_DEBUG_OBJECT (esdsink, "open");
-
- /* ensure libesd doesn't auto-spawn a sound daemon if none is running yet */
- saved_env = g_strdup (g_getenv ("ESD_NO_SPAWN"));
- g_setenv ("ESD_NO_SPAWN", "1", TRUE);
-
- /* now try to connect to any existing/running sound daemons */
- esdsink->ctrl_fd = esd_open_sound (esdsink->host);
-
- /* and restore the previous state */
- if (saved_env != NULL) {
- g_setenv ("ESD_NO_SPAWN", saved_env, TRUE);
- } else {
- g_unsetenv ("ESD_NO_SPAWN");
- }
- g_free (saved_env);
-
- if (esdsink->ctrl_fd < 0)
- goto couldnt_connect;
-
- /* get server info */
- server_info = esd_get_server_info (esdsink->ctrl_fd);
- if (!server_info)
- goto no_server_info;
-
- GST_INFO_OBJECT (esdsink, "got server info rate: %i", server_info->rate);
-
- pad_template = gst_static_pad_template_get (&sink_factory);
- esdsink->cur_caps = gst_caps_copy (gst_pad_template_get_caps (pad_template));
- gst_object_unref (pad_template);
-
- for (i = 0; i < esdsink->cur_caps->structs->len; i++) {
- GstStructure *s;
-
- s = gst_caps_get_structure (esdsink->cur_caps, i);
- gst_structure_set (s, "rate", G_TYPE_INT, server_info->rate, NULL);
- }
-
- esd_free_server_info (server_info);
-
- GST_INFO_OBJECT (esdsink, "server caps: %" GST_PTR_FORMAT, esdsink->cur_caps);
-
- return TRUE;
-
- /* ERRORS */
-couldnt_connect:
- {
- GST_ELEMENT_ERROR (esdsink, RESOURCE, OPEN_WRITE,
- (_("Could not establish connection to sound server")),
- ("can't open connection to esound server"));
- return FALSE;
- }
-no_server_info:
- {
- GST_ELEMENT_ERROR (esdsink, RESOURCE, OPEN_WRITE,
- (_("Failed to query sound server capabilities")),
- ("couldn't get server info!"));
- return FALSE;
- }
-}
-
-static gboolean
-gst_esdsink_close (GstAudioSink * asink)
-{
- GstEsdSink *esdsink = GST_ESDSINK (asink);
-
- GST_DEBUG_OBJECT (esdsink, "close");
-
- gst_caps_replace (&esdsink->cur_caps, NULL);
- esd_close (esdsink->ctrl_fd);
- esdsink->ctrl_fd = -1;
-
- return TRUE;
-}
-
-static gboolean
-gst_esdsink_prepare (GstAudioSink * asink, GstRingBufferSpec * spec)
-{
- GstEsdSink *esdsink = GST_ESDSINK (asink);
- esd_format_t esdformat;
-
- /* Name used by esound for this connection. */
- const char connname[] = "GStreamer";
-
- GST_DEBUG_OBJECT (esdsink, "prepare");
-
- /* Bitmap describing audio format. */
- esdformat = ESD_STREAM | ESD_PLAY;
-
- switch (spec->depth) {
- case 8:
- esdformat |= ESD_BITS8;
- break;
- case 16:
- esdformat |= ESD_BITS16;
- break;
- default:
- goto unsupported_depth;
- }
-
- switch (spec->channels) {
- case 1:
- esdformat |= ESD_MONO;
- break;
- case 2:
- esdformat |= ESD_STEREO;
- break;
- default:
- goto unsupported_channels;
- }
-
- GST_INFO_OBJECT (esdsink,
- "attempting to open data connection to esound server");
-
- esdsink->fd =
- esd_play_stream (esdformat, spec->rate, esdsink->host, connname);
-
- if ((esdsink->fd < 0) || (esdsink->ctrl_fd < 0))
- goto cannot_open;
-
- esdsink->rate = spec->rate;
-
- spec->segsize = ESD_BUF_SIZE;
- spec->segtotal = (ESD_MAX_WRITE_SIZE / spec->segsize);
-
- /* FIXME: this is wrong for signed ints (and the
- * audioringbuffers should do it for us anyway) */
- spec->silence_sample[0] = 0;
- spec->silence_sample[1] = 0;
- spec->silence_sample[2] = 0;
- spec->silence_sample[3] = 0;
-
- GST_INFO_OBJECT (esdsink, "successfully opened connection to esound server");
-
- return TRUE;
-
- /* ERRORS */
-unsupported_depth:
- {
- GST_ELEMENT_ERROR (esdsink, STREAM, WRONG_TYPE, (NULL),
- ("can't handle sample depth of %d, only 8 or 16 supported",
- spec->depth));
- return FALSE;
- }
-unsupported_channels:
- {
- GST_ELEMENT_ERROR (esdsink, STREAM, WRONG_TYPE, (NULL),
- ("can't handle %d channels, only 1 or 2 supported", spec->channels));
- return FALSE;
- }
-cannot_open:
- {
- GST_ELEMENT_ERROR (esdsink, RESOURCE, OPEN_WRITE,
- (_("Could not establish connection to sound server")),
- ("can't open connection to esound server"));
- return FALSE;
- }
-}
-
-static gboolean
-gst_esdsink_unprepare (GstAudioSink * asink)
-{
- GstEsdSink *esdsink = GST_ESDSINK (asink);
-
- if ((esdsink->fd < 0) && (esdsink->ctrl_fd < 0))
- return TRUE;
-
- close (esdsink->fd);
- esdsink->fd = -1;
-
- GST_INFO_OBJECT (esdsink, "closed sound device");
-
- return TRUE;
-}
-
-
-static guint
-gst_esdsink_write (GstAudioSink * asink, gpointer data, guint length)
-{
- GstEsdSink *esdsink = GST_ESDSINK (asink);
- gint to_write = 0;
-
- to_write = length;
-
- while (to_write > 0) {
- int done;
-
- done = write (esdsink->fd, data, to_write);
-
- if (done < 0)
- goto write_error;
-
- to_write -= done;
- data = (char *) data + done;
- }
- return length;
-
- /* ERRORS */
-write_error:
- {
- GST_ELEMENT_ERROR (esdsink, RESOURCE, WRITE,
- ("Failed to write data to the esound daemon"), GST_ERROR_SYSTEM);
- return -1;
- }
-}
-
-static guint
-gst_esdsink_delay (GstAudioSink * asink)
-{
- GstEsdSink *esdsink = GST_ESDSINK (asink);
- guint latency;
-
- latency = esd_get_latency (esdsink->ctrl_fd);
-
- if (latency == (guint) - 1) {
- GST_WARNING_OBJECT (asink, "couldn't get latency");
- return 0;
- }
-
- /* latency is measured in samples at a rate of 44100, this
- * cannot overflow. */
- latency = latency * G_GINT64_CONSTANT (44100) / esdsink->rate;
-
- GST_DEBUG_OBJECT (asink, "got latency: %u", latency);
-
- return latency;
-}
-
-static void
-gst_esdsink_reset (GstAudioSink * asink)
-{
- GST_DEBUG_OBJECT (asink, "reset called");
-}
-
-static void
-gst_esdsink_set_property (GObject * object, guint prop_id, const GValue * value,
- GParamSpec * pspec)
-{
- GstEsdSink *esdsink = GST_ESDSINK (object);
-
- switch (prop_id) {
- case PROP_HOST:
- g_free (esdsink->host);
- esdsink->host = g_value_dup_string (value);
- break;
- default:
- break;
- }
-}
-
-static void
-gst_esdsink_get_property (GObject * object, guint prop_id, GValue * value,
- GParamSpec * pspec)
-{
- GstEsdSink *esdsink = GST_ESDSINK (object);
-
- switch (prop_id) {
- case PROP_HOST:
- g_value_set_string (value, esdsink->host);
- break;
- default:
- G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
- break;
- }
-}
+++ /dev/null
-/* GStreamer
- * Copyright (C) <2005> Arwed v. Merkatz <v.merkatz@gmx.net>
- *
- * esdsink.h: an EsounD audio sink
- *
- * This library is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Library General Public
- * License as published by the Free Software Foundation; either
- * version 2 of the License, or (at your option) any later version.
- *
- * This library is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Library General Public License for more details.
- *
- * You should have received a copy of the GNU Library General Public
- * License along with this library; if not, write to the
- * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
- * Boston, MA 02111-1307, USA.
- */
-
-
-#ifndef __GST_ESDSINK_H__
-#define __GST_ESDSINK_H__
-
-#include <gst/gst.h>
-#include <gst/audio/gstaudiosink.h>
-
-G_BEGIN_DECLS
-
-#define GST_TYPE_ESDSINK \
- (gst_esdsink_get_type())
-#define GST_ESDSINK(obj) \
- (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_ESDSINK,GstEsdSink))
-#define GST_ESDSINK_CLASS(klass) \
- (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_ESDSINK,GstEsdSinkClass))
-#define GST_IS_ESDSINK(obj) \
- (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_ESDSINK))
-#define GST_IS_ESDSINK_CLASS(klass) \
- (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_ESDSINK))
-
-typedef struct _GstEsdSink GstEsdSink;
-typedef struct _GstEsdSinkClass GstEsdSinkClass;
-
-struct _GstEsdSink {
- GstAudioSink sink;
-
- int fd;
- int ctrl_fd;
- gchar *host;
-
- guint rate;
- GstCaps *cur_caps;
-};
-
-struct _GstEsdSinkClass {
- GstAudioSinkClass parent_class;
-};
-
-GType gst_esdsink_get_type (void);
-
-G_END_DECLS
-
-#endif /* __GST_ESDSINK_H__ */
+++ /dev/null
-/* GStreamer
- * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
- * Copyright (C) <2003> David A. Schleef <ds@schleef.org>
- *
- * This library is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Library General Public
- * License as published by the Free Software Foundation; either
- * version 2 of the License, or (at your option) any later version.
- *
- * This library is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Library General Public License for more details.
- *
- * You should have received a copy of the GNU Library General Public
- * License along with this library; if not, write to the
- * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
- * Boston, MA 02111-1307, USA.
- */
-
-#ifdef HAVE_CONFIG_H
-#include "config.h"
-#endif
-#include "esdsink.h"
-#if 0
-#include "esdmon.h"
-#endif
-
-#include "gst/gst-i18n-plugin.h"
-
-GST_DEBUG_CATEGORY (esd_debug);
-
-static gboolean
-plugin_init (GstPlugin * plugin)
-{
- if (!gst_element_register (plugin, "esdsink", GST_RANK_MARGINAL,
- GST_TYPE_ESDSINK))
- return FALSE;
-
-#if 0
- if (!gst_element_register (plugin, "esdmon", GST_RANK_NONE, GST_TYPE_ESDMON))
- return FALSE;
-#endif
-
- GST_DEBUG_CATEGORY_INIT (esd_debug, "esd", 0, "ESounD elements");
-
-#ifdef ENABLE_NLS
- setlocale (LC_ALL, "");
- bindtextdomain (GETTEXT_PACKAGE, LOCALEDIR);
- bind_textdomain_codeset (GETTEXT_PACKAGE, "UTF-8");
-#endif /* ENABLE_NLS */
-
- return TRUE;
-}
-
-GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
- GST_VERSION_MINOR,
- "esdsink",
- "ESD Element Plugins",
- plugin_init, VERSION, "LGPL", GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)
static void gst_flac_dec_error_cb (const FLAC__StreamDecoder *
decoder, FLAC__StreamDecoderErrorStatus status, void *client_data);
-GST_BOILERPLATE (GstFlacDec, gst_flac_dec, GstElement, GST_TYPE_ELEMENT);
+#define gst_flac_dec_parent_class parent_class
+G_DEFINE_TYPE (GstFlacDec, gst_flac_dec, GST_TYPE_ELEMENT);
/* FIXME 0.11: Use width=32 for all depths and let audioconvert
* handle the conversions instead of doing it ourself.
);
static void
-gst_flac_dec_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&flac_dec_src_factory));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&flac_dec_sink_factory));
- gst_element_class_set_details_simple (element_class, "FLAC audio decoder",
- "Codec/Decoder/Audio",
- "Decodes FLAC lossless audio streams", "Wim Taymans <wim@fluendo.com>");
-
- GST_DEBUG_CATEGORY_INIT (flacdec_debug, "flacdec", 0, "flac decoder");
-}
-
-static void
gst_flac_dec_class_init (GstFlacDecClass * klass)
{
GstElementClass *gstelement_class;
gstelement_class = (GstElementClass *) klass;
gobject_class = (GObjectClass *) klass;
+ GST_DEBUG_CATEGORY_INIT (flacdec_debug, "flacdec", 0, "flac decoder");
+
gobject_class->finalize = gst_flac_dec_finalize;
gstelement_class->change_state =
GST_DEBUG_FUNCPTR (gst_flac_dec_change_state);
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&flac_dec_src_factory));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&flac_dec_sink_factory));
+
+ gst_element_class_set_details_simple (gstelement_class, "FLAC audio decoder",
+ "Codec/Decoder/Audio",
+ "Decodes FLAC lossless audio streams", "Wim Taymans <wim@fluendo.com>");
}
static void
-gst_flac_dec_init (GstFlacDec * flacdec, GstFlacDecClass * klass)
+gst_flac_dec_init (GstFlacDec * flacdec)
{
flacdec->sinkpad =
gst_pad_new_from_static_template (&flac_dec_sink_factory, "sink");
while (offset >= MAX (SCANBLOCK_SIZE / 2, file_size / 2)) {
GstFlowReturn flow;
GstBuffer *buf = NULL;
- guint8 *data;
- guint size;
+ guint8 *data, *ptr;
+ gsize size, left;
/* divide by 2 = not very sophisticated way to deal with overlapping */
offset -= SCANBLOCK_SIZE / 2;
return;
}
- size = GST_BUFFER_SIZE (buf);
- data = GST_BUFFER_DATA (buf);
+ data = gst_buffer_map (buf, &size, NULL, GST_MAP_READ);
+
+ ptr = data;
+ left = size;
- while (size > 16) {
- if (gst_flac_dec_scan_got_frame (flacdec, data, size, samples)) {
+ while (left > 16) {
+ if (gst_flac_dec_scan_got_frame (flacdec, ptr, left, samples)) {
GST_DEBUG_OBJECT (flacdec, "frame sync at offset %" G_GINT64_FORMAT,
- offset + GST_BUFFER_SIZE (buf) - size);
+ offset + size - left);
+ gst_buffer_unmap (buf, data, size);
gst_buffer_unref (buf);
return;
}
- ++data;
- --size;
+ ++ptr;
+ --left;
}
+ gst_buffer_unmap (buf, data, size);
gst_buffer_unref (buf);
}
}
return FLAC__STREAM_DECODER_READ_STATUS_ABORT;
}
+ *bytes = gst_buffer_get_size (buf);
GST_DEBUG_OBJECT (flacdec, "Read %d bytes at %" G_GUINT64_FORMAT,
- GST_BUFFER_SIZE (buf), flacdec->offset);
- memcpy (buffer, GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (buf));
- *bytes = GST_BUFFER_SIZE (buf);
+ *bytes, flacdec->offset);
+
+ gst_buffer_extract (buf, 0, buffer, *bytes);
gst_buffer_unref (buf);
flacdec->offset += *bytes;
guint samples = frame->header.blocksize;
guint j, i;
GstClockTime next;
+ gpointer data;
+ gsize size;
GST_LOG_OBJECT (flacdec, "samples in frame header: %d", samples);
* downstream negotiation work on older basetransform */
ret = gst_pad_alloc_buffer_and_set_caps (flacdec->srcpad,
GST_BUFFER_OFFSET (flacdec->pending),
- GST_BUFFER_SIZE (flacdec->pending),
+ gst_buffer_get_size (flacdec->pending),
GST_BUFFER_CAPS (flacdec->pending), &outbuf);
if (ret == GST_FLOW_OK) {
gst_pad_push (flacdec->srcpad, flacdec->pending);
GST_BUFFER_DURATION (outbuf) = next - GST_BUFFER_TIMESTAMP (outbuf);
+ data = gst_buffer_map (outbuf, &size, NULL, GST_MAP_WRITE);
if (width == 8) {
- gint8 *outbuffer = (gint8 *) GST_BUFFER_DATA (outbuf);
+ gint8 *outbuffer = (gint8 *) data;
for (i = 0; i < samples; i++) {
for (j = 0; j < channels; j++) {
}
}
} else if (width == 16) {
- gint16 *outbuffer = (gint16 *) GST_BUFFER_DATA (outbuf);
+ gint16 *outbuffer = (gint16 *) data;
for (i = 0; i < samples; i++) {
for (j = 0; j < channels; j++) {
}
}
} else if (width == 32) {
- gint32 *outbuffer = (gint32 *) GST_BUFFER_DATA (outbuf);
+ gint32 *outbuffer = (gint32 *) data;
for (i = 0; i < samples; i++) {
for (j = 0; j < channels; j++) {
} else {
g_assert_not_reached ();
}
+ gst_buffer_unmap (outbuf, data, size);
if (!flacdec->seeking) {
GST_DEBUG_OBJECT (flacdec, "pushing %d samples at offset %" G_GINT64_FORMAT
if (flacdec->discont) {
GST_DEBUG_OBJECT (flacdec, "marking discont");
- outbuf = gst_buffer_make_metadata_writable (outbuf);
+ outbuf = gst_buffer_make_writable (outbuf);
GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
flacdec->discont = FALSE;
}
GST_LOG_OBJECT (dec, "buffer with ts=%" GST_TIME_FORMAT ", end_offset=%"
G_GINT64_FORMAT ", size=%u", GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
- GST_BUFFER_OFFSET_END (buf), GST_BUFFER_SIZE (buf));
+ GST_BUFFER_OFFSET_END (buf), gst_buffer_get_size (buf));
if (dec->init) {
GST_DEBUG_OBJECT (dec, "initializing decoder");
if (dec->framed) {
gint64 unused;
+ guint8 *data;
+ gsize size;
/* check if this is a flac audio frame (rather than a header or junk) */
- got_audio_frame = gst_flac_dec_scan_got_frame (dec, GST_BUFFER_DATA (buf),
- GST_BUFFER_SIZE (buf), &unused);
+ data = gst_buffer_map (buf, &size, NULL, GST_MAP_READ);
+ got_audio_frame = gst_flac_dec_scan_got_frame (dec, data, size, &unused);
+ gst_buffer_unmap (buf, data, size);
/* oggdemux will set granulepos in OFFSET_END instead of timestamp */
if (G_LIKELY (got_audio_frame)) {
GST_DEBUG_CATEGORY_STATIC (flacenc_debug);
#define GST_CAT_DEFAULT flacenc_debug
-
-#define _do_init(type) \
- G_STMT_START{ \
- static const GInterfaceInfo tag_setter_info = { \
- NULL, \
- NULL, \
- NULL \
- }; \
- static const GInterfaceInfo preset_info = { \
- NULL, \
- NULL, \
- NULL \
- }; \
- g_type_add_interface_static (type, GST_TYPE_TAG_SETTER, \
- &tag_setter_info); \
- g_type_add_interface_static (type, GST_TYPE_PRESET, \
- &preset_info); \
- }G_STMT_END
-
-GST_BOILERPLATE_FULL (GstFlacEnc, gst_flac_enc, GstElement, GST_TYPE_ELEMENT,
- _do_init);
+#define gst_flac_enc_parent_class parent_class
+G_DEFINE_TYPE_WITH_CODE (GstFlacEnc, gst_flac_enc, GST_TYPE_ELEMENT,
+ G_IMPLEMENT_INTERFACE (GST_TYPE_TAG_SETTER, NULL);
+ G_IMPLEMENT_INTERFACE (GST_TYPE_PRESET, NULL));
static void gst_flac_enc_finalize (GObject * object);
}
static void
-gst_flac_enc_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&src_factory));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&sink_factory));
-
- gst_element_class_set_details_simple (element_class, "FLAC audio encoder",
- "Codec/Encoder/Audio",
- "Encodes audio with the FLAC lossless audio encoder",
- "Wim Taymans <wim.taymans@chello.be>");
-
- GST_DEBUG_CATEGORY_INIT (flacenc_debug, "flacenc", 0,
- "Flac encoding element");
-}
-
-static void
gst_flac_enc_class_init (GstFlacEncClass * klass)
{
GObjectClass *gobject_class;
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
+ GST_DEBUG_CATEGORY_INIT (flacenc_debug, "flacenc", 0,
+ "Flac encoding element");
+
gobject_class->set_property = gst_flac_enc_set_property;
gobject_class->get_property = gst_flac_enc_get_property;
gobject_class->finalize = gst_flac_enc_finalize;
G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
gstelement_class->change_state = gst_flac_enc_change_state;
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&src_factory));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&sink_factory));
+
+ gst_element_class_set_details_simple (gstelement_class, "FLAC audio encoder",
+ "Codec/Encoder/Audio",
+ "Encodes audio with the FLAC lossless audio encoder",
+ "Wim Taymans <wim.taymans@chello.be>");
}
static void
-gst_flac_enc_init (GstFlacEnc * flacenc, GstFlacEncClass * klass)
+gst_flac_enc_init (GstFlacEnc * flacenc)
{
flacenc->sinkpad = gst_pad_new_from_static_template (&sink_factory, "sink");
gst_pad_set_chain_function (flacenc->sinkpad,
GstStructure *structure;
GstTagImageType image_type = GST_TAG_IMAGE_TYPE_NONE;
gint i;
+ guint8 *data;
+ gsize size;
for (i = 0; i < n_images + n_preview_images; i++) {
if (i < n_images) {
else
image_type = image_type + 2;
+ data = gst_buffer_map (buffer, &size, NULL, GST_MAP_READ);
FLAC__metadata_object_picture_set_data (flacenc->meta[entries],
- GST_BUFFER_DATA (buffer), GST_BUFFER_SIZE (buffer), TRUE);
+ data, size, TRUE);
+ gst_buffer_unmap (buffer, data, size);
+
/* FIXME: There's no way to set the picture type in libFLAC */
flacenc->meta[entries]->data.picture.type = image_type;
FLAC__metadata_object_picture_set_mime_type (flacenc->meta[entries],
"rate", G_TYPE_INT, enc->sample_rate, NULL);
for (l = enc->headers; l != NULL; l = l->next) {
- const guint8 *data;
- guint size;
+ GstBuffer *buf;
+ guint8 *data;
+ gsize size;
/* mark buffers so oggmux will ignore them if it already muxed the
* header buffers from the streamheaders field in the caps */
- l->data = gst_buffer_make_metadata_writable (GST_BUFFER (l->data));
- GST_BUFFER_FLAG_SET (GST_BUFFER (l->data), GST_BUFFER_FLAG_IN_CAPS);
+ l->data = gst_buffer_make_writable (GST_BUFFER_CAST (l->data));
- data = GST_BUFFER_DATA (GST_BUFFER_CAST (l->data));
- size = GST_BUFFER_SIZE (GST_BUFFER_CAST (l->data));
+ buf = GST_BUFFER_CAST (l->data);
+ GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_IN_CAPS);
+
+ data = gst_buffer_map (buf, &size, NULL, GST_MAP_READ);
/* find initial 4-byte marker which we need to skip later on */
if (size == 4 && memcmp (data, "fLaC", 4) == 0) {
- marker = GST_BUFFER_CAST (l->data);
+ marker = buf;
} else if (size > 1 && (data[0] & 0x7f) == HDR_TYPE_STREAMINFO) {
- streaminfo = GST_BUFFER_CAST (l->data);
+ streaminfo = buf;
} else if (size > 1 && (data[0] & 0x7f) == HDR_TYPE_VORBISCOMMENT) {
- vorbiscomment = GST_BUFFER_CAST (l->data);
+ vorbiscomment = buf;
}
+
+ gst_buffer_unmap (buf, data, size);
}
if (marker == NULL || streaminfo == NULL || vorbiscomment == NULL) {
{
GstBuffer *buf;
guint16 num;
+ guint8 *bdata;
+ gsize bsize, slen;
/* minus one for the marker that is merged with streaminfo here */
num = g_list_length (enc->headers) - 1;
- buf = gst_buffer_new_and_alloc (13 + GST_BUFFER_SIZE (streaminfo));
- GST_BUFFER_DATA (buf)[0] = 0x7f;
- memcpy (GST_BUFFER_DATA (buf) + 1, "FLAC", 4);
- GST_BUFFER_DATA (buf)[5] = 0x01; /* mapping version major */
- GST_BUFFER_DATA (buf)[6] = 0x00; /* mapping version minor */
- GST_BUFFER_DATA (buf)[7] = (num & 0xFF00) >> 8;
- GST_BUFFER_DATA (buf)[8] = (num & 0x00FF) >> 0;
- memcpy (GST_BUFFER_DATA (buf) + 9, "fLaC", 4);
- memcpy (GST_BUFFER_DATA (buf) + 13, GST_BUFFER_DATA (streaminfo),
- GST_BUFFER_SIZE (streaminfo));
+ slen = gst_buffer_get_size (streaminfo);
+ buf = gst_buffer_new_and_alloc (13 + slen);
+
+ bdata = gst_buffer_map (buf, &bsize, NULL, GST_MAP_WRITE);
+ bdata[0] = 0x7f;
+ memcpy (bdata + 1, "FLAC", 4);
+ bdata[5] = 0x01; /* mapping version major */
+ bdata[6] = 0x00; /* mapping version minor */
+ bdata[7] = (num & 0xFF00) >> 8;
+ bdata[8] = (num & 0x00FF) >> 0;
+ memcpy (bdata + 9, "fLaC", 4);
+ gst_buffer_extract (streaminfo, 0, bdata + 13, slen);
+ gst_buffer_unmap (buf, bdata, bsize);
+
notgst_value_array_append_buffer (&array, buf);
gst_buffer_unref (buf);
}
/* add other headers, if there are any */
for (l = enc->headers; l != NULL; l = l->next) {
- if (GST_BUFFER_CAST (l->data) != marker &&
- GST_BUFFER_CAST (l->data) != streaminfo &&
- GST_BUFFER_CAST (l->data) != vorbiscomment) {
- notgst_value_array_append_buffer (&array, GST_BUFFER_CAST (l->data));
+ GstBuffer *buf = GST_BUFFER_CAST (l->data);
+
+ if (buf != marker && buf != streaminfo && buf != vorbiscomment) {
+ notgst_value_array_append_buffer (&array, buf);
}
}
buf = GST_BUFFER (l->data);
gst_buffer_set_caps (buf, caps);
GST_LOG_OBJECT (enc, "Pushing header buffer, size %u bytes",
- GST_BUFFER_SIZE (buf));
+ gst_buffer_get_size (buf));
+#if 0
GST_MEMDUMP_OBJECT (enc, "header buffer", GST_BUFFER_DATA (buf),
GST_BUFFER_SIZE (buf));
+#endif
(void) gst_pad_push (enc->srcpad, buf);
l->data = NULL;
}
return FLAC__STREAM_ENCODER_WRITE_STATUS_OK;
outbuf = gst_buffer_new_and_alloc (bytes);
- memcpy (GST_BUFFER_DATA (outbuf), buffer, bytes);
+ gst_buffer_fill (outbuf, 0, buffer, bytes);
if (samples > 0 && flacenc->samples_written != (guint64) - 1) {
guint64 granulepos;
} else if (flacenc->got_headers && samples == 0) {
GST_DEBUG_OBJECT (flacenc, "Fixing up headers at pos=%" G_GUINT64_FORMAT
", size=%u", flacenc->offset, (guint) bytes);
+#if 0
GST_MEMDUMP_OBJECT (flacenc, "Presumed header fragment",
GST_BUFFER_DATA (outbuf), GST_BUFFER_SIZE (outbuf));
+#endif
} else {
GST_LOG ("Pushing buffer: ts=%" GST_TIME_FORMAT ", samples=%u, size=%u, "
"pos=%" G_GUINT64_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)),
{
GstFlacEnc *flacenc;
FLAC__int32 *data;
- gulong insize;
+ gsize bsize;
gint samples, width;
gulong i;
FLAC__bool res;
+ gpointer bdata;
flacenc = GST_FLAC_ENC (GST_PAD_PARENT (pad));
else
flacenc->next_ts = GST_CLOCK_TIME_NONE;
- insize = GST_BUFFER_SIZE (buffer);
- samples = insize / (width >> 3);
+ bdata = gst_buffer_map (buffer, &bsize, NULL, GST_MAP_READ);
+ samples = bsize / (width >> 3);
data = g_malloc (samples * sizeof (FLAC__int32));
if (width == 8) {
- gint8 *indata = (gint8 *) GST_BUFFER_DATA (buffer);
+ gint8 *indata = (gint8 *) bdata;
for (i = 0; i < samples; i++)
data[i] = (FLAC__int32) indata[i];
} else if (width == 16) {
- gint16 *indata = (gint16 *) GST_BUFFER_DATA (buffer);
+ gint16 *indata = (gint16 *) bdata;
for (i = 0; i < samples; i++)
data[i] = (FLAC__int32) indata[i];
} else if (width == 32) {
- gint32 *indata = (gint32 *) GST_BUFFER_DATA (buffer);
+ gint32 *indata = (gint32 *) bdata;
for (i = 0; i < samples; i++)
data[i] = (FLAC__int32) indata[i];
} else {
g_assert_not_reached ();
}
-
+ gst_buffer_unmap (buffer, bdata, bsize);
gst_buffer_unref (buffer);
res = FLAC__stream_encoder_process_interleaved (flacenc->encoder,
static gboolean gst_flac_tag_sink_setcaps (GstPad * pad, GstCaps * caps);
-static void
-gst_flac_tag_setup_interfaces (GType flac_tag_type)
-{
- static const GInterfaceInfo tag_setter_info = { NULL, NULL, NULL };
-
- g_type_add_interface_static (flac_tag_type, GST_TYPE_TAG_SETTER,
- &tag_setter_info);
-}
+#define gst_flac_tag_parent_class parent_class
+G_DEFINE_TYPE_WITH_CODE (GstFlacTag, gst_flac_tag, GST_TYPE_ELEMENT,
+ G_IMPLEMENT_INTERFACE (GST_TYPE_TAG_SETTER, NULL));
-GST_BOILERPLATE_FULL (GstFlacTag, gst_flac_tag, GstElement, GST_TYPE_ELEMENT,
- gst_flac_tag_setup_interfaces);
-
-static void
-gst_flac_tag_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_set_details_simple (element_class, "FLAC tagger",
- "Formatter/Metadata",
- "Rewrite tags in a FLAC file", "Christophe Fergeau <teuf@gnome.org>");
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&flac_tag_sink_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&flac_tag_src_template));
-
- GST_DEBUG_CATEGORY_INIT (flactag_debug, "flactag", 0, "flac tag rewriter");
-}
static void
gst_flac_tag_class_init (GstFlacTagClass * klass)
GstElementClass *gstelement_class;
GObjectClass *gobject_class;
+ GST_DEBUG_CATEGORY_INIT (flactag_debug, "flactag", 0, "flac tag rewriter");
+
gstelement_class = (GstElementClass *) klass;
gobject_class = (GObjectClass *) klass;
- parent_class = g_type_class_peek_parent (klass);
-
gobject_class->dispose = gst_flac_tag_dispose;
gstelement_class->change_state = gst_flac_tag_change_state;
+
+ gst_element_class_set_details_simple (gstelement_class, "FLAC tagger",
+ "Formatter/Metadata",
+ "Rewrite tags in a FLAC file", "Christophe Fergeau <teuf@gnome.org>");
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&flac_tag_sink_template));
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&flac_tag_src_template));
}
static void
static void
-gst_flac_tag_init (GstFlacTag * tag, GstFlacTagClass * klass)
+gst_flac_tag_init (GstFlacTag * tag)
{
/* create the sink and src pads */
tag->sinkpad =
{
GstFlacTag *tag;
GstFlowReturn ret;
+ guint8 *data;
+ gsize size;
ret = GST_FLOW_OK;
tag = GST_FLAC_TAG (gst_pad_get_parent (pad));
id_buffer = gst_adapter_take_buffer (tag->adapter, FLAC_MAGIC_SIZE);
GST_DEBUG_OBJECT (tag, "looking for " FLAC_MAGIC " identifier");
- if (memcmp (GST_BUFFER_DATA (id_buffer), FLAC_MAGIC, FLAC_MAGIC_SIZE) == 0) {
+ if (gst_buffer_memcmp (id_buffer, 0, FLAC_MAGIC, FLAC_MAGIC_SIZE) == 0) {
GST_DEBUG_OBJECT (tag, "pushing " FLAC_MAGIC " identifier buffer");
gst_buffer_set_caps (id_buffer, GST_PAD_CAPS (tag->srcpad));
* of a metadata block
*/
if (tag->state == GST_FLAC_TAG_STATE_METADATA_BLOCKS) {
- guint size;
guint type;
gboolean is_last;
const guint8 *block_header;
if (gst_adapter_available (tag->adapter) < 4)
goto cleanup;
- block_header = gst_adapter_peek (tag->adapter, 4);
+ block_header = gst_adapter_map (tag->adapter, 4);
is_last = ((block_header[0] & 0x80) == 0x80);
type = block_header[0] & 0x7F;
size = (block_header[1] << 16)
| (block_header[2] << 8)
| block_header[3];
+ gst_adapter_unmap (tag->adapter, 0);
/* The 4 bytes long header isn't included in the metadata size */
tag->metadata_block_size = size + 4;
/* clear the is-last flag, as the last metadata block will
* be the vorbis comment block which we will build ourselves.
*/
- GST_BUFFER_DATA (metadata_buffer)[0] &= (~0x80);
+ data = gst_buffer_map (metadata_buffer, &size, NULL, GST_MAP_READWRITE);
+ data[0] &= (~0x80);
+ gst_buffer_unmap (metadata_buffer, data, size);
if (tag->state == GST_FLAC_TAG_STATE_WRITING_METADATA_BLOCK) {
GST_DEBUG_OBJECT (tag, "pushing metadata block buffer");
* block, and stop now if the user only wants to read tags
*/
if (tag->vorbiscomment != NULL) {
+ guint8 id_data[4];
/* We found some tags, try to parse them and notify the other elements
* that we encountered some tags
*/
GST_DEBUG_OBJECT (tag, "emitting vorbiscomment tags");
+ gst_buffer_extract (tag->vorbiscomment, 0, id_data, 4);
tag->tags = gst_tag_list_from_vorbiscomment_buffer (tag->vorbiscomment,
- GST_BUFFER_DATA (tag->vorbiscomment), 4, NULL);
+ id_data, 4, NULL);
if (tag->tags != NULL) {
gst_element_found_tags (GST_ELEMENT (tag),
gst_tag_list_copy (tag->tags));
*/
if (tag->state == GST_FLAC_TAG_STATE_ADD_VORBIS_COMMENT) {
GstBuffer *buffer;
- gint size;
const GstTagList *user_tags;
GstTagList *merged_tags;
*/
GST_WARNING_OBJECT (tag, "No tags found");
buffer = gst_buffer_new_and_alloc (12);
- if (buffer == NULL) {
- GST_ELEMENT_ERROR (tag, CORE, TOO_LAZY, (NULL),
- ("Error creating 12-byte buffer for padding block"));
- ret = GST_FLOW_ERROR;
- goto cleanup;
- }
- memset (GST_BUFFER_DATA (buffer), 0, GST_BUFFER_SIZE (buffer));
- GST_BUFFER_DATA (buffer)[0] = 0x81; /* 0x80 = Last metadata block,
- * 0x01 = padding block
- */
+ if (buffer == NULL)
+ goto no_buffer;
+
+ data = gst_buffer_map (buffer, &size, NULL, GST_MAP_WRITE);
+ memset (data, 0, size);
+ data[0] = 0x81; /* 0x80 = Last metadata block,
+ * 0x01 = padding block */
+ gst_buffer_unmap (buffer, data, size);
} else {
guchar header[4];
+ guint8 fbit[1];
memset (header, 0, sizeof (header));
header[0] = 0x84; /* 0x80 = Last metadata block,
- * 0x04 = vorbiscomment block
- */
+ * 0x04 = vorbiscomment block */
buffer = gst_tag_list_to_vorbiscomment_buffer (merged_tags, header,
sizeof (header), NULL);
GST_DEBUG_OBJECT (tag, "Writing tags %" GST_PTR_FORMAT, merged_tags);
gst_tag_list_free (merged_tags);
- if (buffer == NULL) {
- GST_ELEMENT_ERROR (tag, CORE, TAG, (NULL),
- ("Error converting tag list to vorbiscomment buffer"));
- ret = GST_FLOW_ERROR;
- goto cleanup;
- }
- size = GST_BUFFER_SIZE (buffer) - 4;
- if ((size > 0xFFFFFF) || (size < 0)) {
- /* FLAC vorbis comment blocks are limited to 2^24 bytes,
- * while the vorbis specs allow more than that. Shouldn't
- * be a real world problem though
- */
- GST_ELEMENT_ERROR (tag, CORE, TAG, (NULL),
- ("Vorbis comment of size %d too long", size));
- ret = GST_FLOW_ERROR;
- goto cleanup;
- }
+ if (buffer == NULL)
+ goto no_comment;
+
+ size = gst_buffer_get_size (buffer);
+ if ((size < 4) || ((size - 4) > 0xFFFFFF))
+ goto comment_too_long;
+ fbit[0] = 1;
/* Get rid of the framing bit at the end of the vorbiscomment buffer
* if it exists since libFLAC seems to lose sync because of this
* bit in gstflacdec
*/
- if (GST_BUFFER_DATA (buffer)[GST_BUFFER_SIZE (buffer) - 1] == 1) {
- GstBuffer *sub;
-
- sub = gst_buffer_create_sub (buffer, 0, GST_BUFFER_SIZE (buffer) - 1);
- gst_buffer_unref (buffer);
- buffer = sub;
+ if (gst_buffer_memcmp (buffer, size - 1, fbit, 1) == 0) {
+ buffer = gst_buffer_make_writable (buffer);
+ gst_buffer_resize (buffer, 0, size - 1);
}
}
/* The 4 byte metadata block header isn't accounted for in the total
* size of the metadata block
*/
- size = GST_BUFFER_SIZE (buffer) - 4;
+ data = gst_buffer_map (buffer, &size, NULL, GST_MAP_WRITE);
+ data[1] = (((size - 4) & 0xFF0000) >> 16);
+ data[2] = (((size - 4) & 0x00FF00) >> 8);
+ data[3] = ((size - 4) & 0x0000FF);
+ gst_buffer_unmap (buffer, data, size);
+
+ GST_DEBUG_OBJECT (tag, "pushing %d byte vorbiscomment buffer", size);
- GST_BUFFER_DATA (buffer)[1] = ((size & 0xFF0000) >> 16);
- GST_BUFFER_DATA (buffer)[2] = ((size & 0x00FF00) >> 8);
- GST_BUFFER_DATA (buffer)[3] = (size & 0x0000FF);
- GST_DEBUG_OBJECT (tag, "pushing %d byte vorbiscomment buffer",
- GST_BUFFER_SIZE (buffer));
gst_buffer_set_caps (buffer, GST_PAD_CAPS (tag->srcpad));
ret = gst_pad_push (tag->srcpad, buffer);
if (ret != GST_FLOW_OK) {
gst_object_unref (tag);
return ret;
-}
+ /* ERRORS */
+no_buffer:
+ {
+ GST_ELEMENT_ERROR (tag, CORE, TOO_LAZY, (NULL),
+ ("Error creating 12-byte buffer for padding block"));
+ ret = GST_FLOW_ERROR;
+ goto cleanup;
+ }
+no_comment:
+ {
+ GST_ELEMENT_ERROR (tag, CORE, TAG, (NULL),
+ ("Error converting tag list to vorbiscomment buffer"));
+ ret = GST_FLOW_ERROR;
+ goto cleanup;
+ }
+comment_too_long:
+ {
+ /* FLAC vorbis comment blocks are limited to 2^24 bytes,
+ * while the vorbis specs allow more than that. Shouldn't
+ * be a real world problem though
+ */
+ GST_ELEMENT_ERROR (tag, CORE, TAG, (NULL),
+ ("Vorbis comment of size %d too long", size));
+ ret = GST_FLOW_ERROR;
+ goto cleanup;
+ }
+}
static GstStateChangeReturn
gst_flac_tag_change_state (GstElement * element, GstStateChange transition)
break;
}
- return parent_class->change_state (element, transition);
+ return GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
}
+++ /dev/null
-plugin_LTLIBRARIES = libgstgconfelements.la
-
-libgstgconfelements_la_SOURCES = \
- gstgconfaudiosink.c \
- gstgconfaudiosrc.c \
- gstgconfelements.c \
- gstgconfvideosink.c \
- gstgconfvideosrc.c \
- gstswitchsink.c \
- gstswitchsrc.c \
- gstgconf.c
-
-DIR_CFLAGS = -DGST_GCONF_DIR=\"/system/gstreamer/@GST_MAJORMINOR@\"
-libgstgconfelements_la_CFLAGS = $(GST_CFLAGS) $(GCONF_CFLAGS) $(DIR_CFLAGS)
-libgstgconfelements_la_LIBADD = $(GST_LIBS) $(GCONF_LIBS)
-libgstgconfelements_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
-libgstgconfelements_la_LIBTOOLFLAGS = --tag=disable-static
-
-noinst_HEADERS = \
- gstgconfaudiosink.h \
- gstgconfaudiosrc.h \
- gstgconfelements.h \
- gstgconfvideosink.h \
- gstgconfvideosrc.h \
- gstswitchsink.h \
- gstswitchsrc.h \
- gstgconf.h
+++ /dev/null
-/* GStreamer
- * nf_get_default_audio_sink
- * Copyright (C) <2002> Thomas Vander Stichele <thomas@apestaart.org>
- * Copyright (C) <2006> Jürg Billeter <j@bitron.ch>
- *
- * This library is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Library General Public
- * License as published by the Free Software Foundation; either
- * version 2 of the License, or (at your option) any later version.
- *
- * This library is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Library General Public License for more details.
- *
- * You should have received a copy of the GNU Library General Public
- * License along with this library; if not, write to the
- * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
- * Boston, MA 02111-1307, USA.
- */
-
-/*
- * this library handles interaction with GConf
- */
-
-#ifdef HAVE_CONFIG_H
-#include "config.h"
-#endif
-
-#include <gst/gst.h>
-
-#include "gstgconf.h"
-#include "gstgconfelements.h" /* for debug category */
-
-#ifndef GST_GCONF_DIR
-#error "GST_GCONF_DIR is not defined !"
-#endif
-
-static GConfClient *_gst_gconf_client = NULL; /* GConf connection */
-
-
-/* internal functions */
-
-static GConfClient *
-gst_gconf_get_client (void)
-{
- if (!_gst_gconf_client)
- _gst_gconf_client = gconf_client_get_default ();
-
- return _gst_gconf_client;
-}
-
-/* external functions */
-
-/**
- * gst_gconf_get_string:
- * @key: a #gchar corresponding to the key you want to get.
- *
- * Get GConf key @key's string value.
- *
- * Returns: a newly allocated #gchar string containing @key's value,
- * or NULL in the case of an error..
- */
-gchar *
-gst_gconf_get_string (const gchar * key)
-{
- GError *error = NULL;
- gchar *value = NULL;
- gchar *full_key;
-
- if (!g_str_has_prefix (key, GST_GCONF_DIR))
- full_key = g_strdup_printf ("%s/%s", GST_GCONF_DIR, key);
- else
- full_key = g_strdup (key);
-
- value = gconf_client_get_string (gst_gconf_get_client (), full_key, &error);
- g_free (full_key);
-
- if (error) {
- g_warning ("gst_gconf_get_string: error: %s\n", error->message);
- g_error_free (error);
- return NULL;
- }
-
- return value;
-}
-
-const gchar *
-gst_gconf_get_key_for_sink_profile (GstGConfProfile profile)
-{
- switch (profile) {
- case GCONF_PROFILE_SOUNDS:
- return GST_GCONF_DIR "/" GST_GCONF_AUDIOSINK_KEY;
- case GCONF_PROFILE_MUSIC:
- return GST_GCONF_DIR "/" GST_GCONF_MUSIC_AUDIOSINK_KEY;
- case GCONF_PROFILE_CHAT:
- return GST_GCONF_DIR "/" GST_GCONF_CHAT_AUDIOSINK_KEY;
- default:
- break;
- }
-
- g_return_val_if_reached (GST_GCONF_DIR "/" GST_GCONF_AUDIOSINK_KEY);
-}
-
-/**
- * gst_gconf_set_string:
- * @key: a #gchar corresponding to the key you want to set.
- * @value: a #gchar containing key value.
- *
- * Set GConf key @key to string value @value.
- */
-void
-gst_gconf_set_string (const gchar * key, const gchar * value)
-{
- GError *error = NULL;
- gchar *full_key;
-
- if (!g_str_has_prefix (key, GST_GCONF_DIR))
- full_key = g_strdup_printf ("%s/%s", GST_GCONF_DIR, key);
- else
- full_key = g_strdup (key);
-
- gconf_client_set_string (gst_gconf_get_client (), full_key, value, &error);
- if (error) {
- GST_ERROR ("gst_gconf_set_string: error: %s\n", error->message);
- g_error_free (error);
- }
- g_free (full_key);
-}
-
-/**
- * gst_gconf_render_bin_from_key:
- * @key: a #gchar string corresponding to a GConf key.
- *
- * Render bin from GConf key @key.
- *
- * Returns: a #GstElement containing the rendered bin.
- */
-GstElement *
-gst_gconf_render_bin_from_key (const gchar * key)
-{
- GstElement *bin = NULL;
- gchar *value;
-
- value = gst_gconf_get_string (key);
-
- GST_LOG ("%s = %s", GST_STR_NULL (key), GST_STR_NULL (value));
-
- if (value) {
- GError *err = NULL;
-
- bin = gst_parse_bin_from_description (value, TRUE, &err);
- if (err) {
- GST_ERROR ("gconf: error creating bin '%s': %s", value, err->message);
- g_error_free (err);
- }
-
- g_free (value);
- }
- return bin;
-}
-
-/**
- * gst_gconf_render_bin_with_default:
- * @bin: a #gchar string describing the pipeline to construct.
- * @default_sink: an element to use as default if the given pipeline fails to construct.
- *
- * Render bin from description @bin using @default_sink element as a fallback.
- *
- * Returns: a #GstElement containing the rendered bin.
- */
-GstElement *
-gst_gconf_render_bin_with_default (const gchar * bin,
- const gchar * default_sink)
-{
- GstElement *ret = NULL;
- GError *err = NULL;
-
- if (bin != NULL)
- ret = gst_parse_bin_from_description (bin, TRUE, &err);
-
- if (ret == NULL || err != NULL) {
- if (err) {
- GST_DEBUG ("Could not create audio sink from GConf settings: %s",
- err->message);
- g_error_free (err);
- } else {
- GST_DEBUG ("Could not create audio sink from GConf settings");
- }
-
- ret = gst_element_factory_make (default_sink, NULL);
-
- if (!ret)
- g_warning
- ("Could not build GConf audio sink and the replacement %s doesn't work",
- DEFAULT_AUDIOSINK);
- }
-
- return ret;
-}
-
-/**
- * gst_gconf_get_default_video_sink:
- *
- * Render video output bin from GStreamer GConf key : "default/videosink".
- * If key is invalid, the default video sink for the platform is used
- * (typically xvimagesink or ximagesink).
- *
- * Returns: a #GstElement containing the video output bin, or NULL if
- * everything failed.
- */
-GstElement *
-gst_gconf_get_default_video_sink (void)
-{
- GstElement *ret = gst_gconf_render_bin_from_key (GST_GCONF_VIDEOSINK_KEY);
-
- if (!ret) {
- ret = gst_element_factory_make (DEFAULT_VIDEOSINK, NULL);
-
- if (!ret)
- g_warning ("No GConf default video sink key and %s doesn't work",
- DEFAULT_VIDEOSINK);
- }
-
- return ret;
-}
-
-/**
- * gst_gconf_get_default_audio_src:
- *
- * Render audio acquisition bin from GStreamer GConf key : "default/audiosrc".
- * If key is invalid, the default audio source for the plaform is used.
- * (typically osssrc or sunaudiosrc).
- *
- * Returns: a #GstElement containing the audio source bin, or NULL if
- * everything failed.
- */
-GstElement *
-gst_gconf_get_default_audio_src (void)
-{
- GstElement *ret = gst_gconf_render_bin_from_key (GST_GCONF_AUDIOSRC_KEY);
-
- if (!ret) {
- ret = gst_element_factory_make (DEFAULT_AUDIOSRC, NULL);
-
- if (!ret)
- g_warning ("No GConf default audio src key and %s doesn't work",
- DEFAULT_AUDIOSRC);
- }
-
- return ret;
-}
-
-/**
- * gst_gconf_get_default_video_src:
- *
- * Render video acquisition bin from GStreamer GConf key :
- * "default/videosrc". If key is invalid, the default video source
- * for the platform is used (typically videotestsrc).
- *
- * Returns: a #GstElement containing the video source bin, or NULL if
- * everything failed.
- */
-GstElement *
-gst_gconf_get_default_video_src (void)
-{
- GstElement *ret = gst_gconf_render_bin_from_key (GST_GCONF_VIDEOSRC_KEY);
-
- if (!ret) {
- ret = gst_element_factory_make (DEFAULT_VIDEOSRC, NULL);
-
- if (!ret)
- g_warning ("No GConf default video src key and %s doesn't work",
- DEFAULT_VIDEOSRC);
- }
-
- return ret;
-}
-
-/**
- * gst_gconf_get_default_visualization_element:
- *
- * Render visualization bin from GStreamer GConf key : "default/visualization".
- * If key is invalid, the default visualization element is used.
- *
- * Returns: a #GstElement containing the visualization bin, or NULL if
- * everything failed.
- */
-GstElement *
-gst_gconf_get_default_visualization_element (void)
-{
- GstElement *ret = gst_gconf_render_bin_from_key ("default/visualization");
-
- if (!ret) {
- ret = gst_element_factory_make (DEFAULT_VISUALIZER, NULL);
-
- if (!ret)
- g_warning
- ("No GConf default visualization plugin key and %s doesn't work",
- DEFAULT_VISUALIZER);
- }
-
- return ret;
-}
+++ /dev/null
-/* GStreamer
- * Copyright (C) <2002> Thomas Vander Stichele <thomas@apestaart.org>
- * Copyright (C) <2006> Jürg Billeter <j@bitron.ch>
- *
- * This library is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Library General Public
- * License as published by the Free Software Foundation; either
- * version 2 of the License, or (at your option) any later version.
- *
- * This library is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Library General Public License for more details.
- *
- * You should have received a copy of the GNU Library General Public
- * License along with this library; if not, write to the
- * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
- * Boston, MA 02111-1307, USA.
- */
-
-#ifndef GST_GCONF_H
-#define GST_GCONF_H
-
-/*
- * this library handles interaction with GConf
- */
-
-#include <gst/gst.h>
-#include <gconf/gconf-client.h>
-
-G_BEGIN_DECLS
-
-#define GST_GCONF_AUDIOSRC_KEY "default/audiosrc"
-#define GST_GCONF_AUDIOSINK_KEY "default/audiosink"
-#define GST_GCONF_MUSIC_AUDIOSINK_KEY "default/musicaudiosink"
-#define GST_GCONF_CHAT_AUDIOSINK_KEY "default/chataudiosink"
-#define GST_GCONF_VIDEOSRC_KEY "default/videosrc"
-#define GST_GCONF_VIDEOSINK_KEY "default/videosink"
-
-typedef enum
-{
- GCONF_PROFILE_SOUNDS,
- GCONF_PROFILE_MUSIC,
- GCONF_PROFILE_CHAT,
- GCONF_PROFILE_NONE /* Internal value only */
-} GstGConfProfile;
-
-gchar * gst_gconf_get_string (const gchar *key);
-void gst_gconf_set_string (const gchar *key,
- const gchar *value);
-
-const gchar * gst_gconf_get_key_for_sink_profile (GstGConfProfile profile);
-
-GstElement * gst_gconf_render_bin_from_key (const gchar *key);
-GstElement * gst_gconf_render_bin_with_default (const gchar *bin,
- const gchar *default_sink);
-
-GstElement * gst_gconf_get_default_video_sink (void);
-GstElement * gst_gconf_get_default_audio_sink (int profile);
-GstElement * gst_gconf_get_default_video_src (void);
-GstElement * gst_gconf_get_default_audio_src (void);
-GstElement * gst_gconf_get_default_visualization_element (void);
-
-G_END_DECLS
-
-#endif /* GST_GCONF_H */
+++ /dev/null
-/* GStreamer
- * (c) 2005 Ronald S. Bultje <rbultje@ronald.bitfreak.net>
- * (c) 2006 Jürg Billeter <j@bitron.ch>
- *
- * This library is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Library General Public
- * License as published by the Free Software Foundation; either
- * version 2 of the License, or (at your option) any later version.
- *
- * This library is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Library General Public License for more details.
- *
- * You should have received a copy of the GNU Library General Public
- * License along with this library; if not, write to the
- * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
- * Boston, MA 02111-1307, USA.
- */
-/**
- * SECTION:element-gconfaudiosink
- *
- * This element outputs sound to the audiosink that has been configured in
- * GConf by the user.
- *
- * <refsect2>
- * <title>Example launch line</title>
- * |[
- * gst-launch filesrc location=foo.ogg ! decodebin ! audioconvert ! audioresample ! gconfaudiosink
- * ]| Play on configured audiosink
- * </refsect2>
- */
-
-#ifdef HAVE_CONFIG_H
-#include "config.h"
-#endif
-
-#include <string.h>
-
-#include "gstgconfelements.h"
-#include "gstgconfaudiosink.h"
-
-static void gst_gconf_audio_sink_dispose (GObject * object);
-static void gst_gconf_audio_sink_finalize (GstGConfAudioSink * sink);
-static void cb_change_child (GConfClient * client,
- guint connection_id, GConfEntry * entry, gpointer data);
-static GstStateChangeReturn
-gst_gconf_audio_sink_change_state (GstElement * element,
- GstStateChange transition);
-static void gst_gconf_switch_profile (GstGConfAudioSink * sink,
- GstGConfProfile profile);
-
-enum
-{
- PROP_0,
- PROP_PROFILE
-};
-
-GST_BOILERPLATE (GstGConfAudioSink, gst_gconf_audio_sink, GstSwitchSink,
- GST_TYPE_SWITCH_SINK);
-
-static void gst_gconf_audio_sink_set_property (GObject * object, guint prop_id,
- const GValue * value, GParamSpec * pspec);
-static void gst_gconf_audio_sink_get_property (GObject * object, guint prop_id,
- GValue * value, GParamSpec * pspec);
-
-static void
-gst_gconf_audio_sink_base_init (gpointer klass)
-{
- GstElementClass *eklass = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_set_details_simple (eklass, "GConf audio sink",
- "Sink/Audio",
- "Audio sink embedding the GConf-settings for audio output",
- "Jan Schmidt <thaytan@mad.scientist.com>");
-}
-
-#define GST_TYPE_GCONF_PROFILE (gst_gconf_profile_get_type())
-static GType
-gst_gconf_profile_get_type (void)
-{
- static GType gconf_profile_type = 0;
- static const GEnumValue gconf_profiles[] = {
- {GCONF_PROFILE_SOUNDS, "Sound Events", "sounds"},
- {GCONF_PROFILE_MUSIC, "Music and Movies", "music"},
- {GCONF_PROFILE_CHAT, "Audio/Video Conferencing", "chat"},
- {0, NULL, NULL}
- };
-
- if (!gconf_profile_type) {
- gconf_profile_type =
- g_enum_register_static ("GstGConfProfile", gconf_profiles);
- }
- return gconf_profile_type;
-}
-
-static void
-gst_gconf_audio_sink_class_init (GstGConfAudioSinkClass * klass)
-{
- GObjectClass *oklass = G_OBJECT_CLASS (klass);
- GstElementClass *eklass = GST_ELEMENT_CLASS (klass);
-
- oklass->set_property = gst_gconf_audio_sink_set_property;
- oklass->get_property = gst_gconf_audio_sink_get_property;
- oklass->dispose = gst_gconf_audio_sink_dispose;
- oklass->finalize = (GObjectFinalizeFunc) gst_gconf_audio_sink_finalize;
- eklass->change_state = gst_gconf_audio_sink_change_state;
-
- g_object_class_install_property (oklass, PROP_PROFILE,
- g_param_spec_enum ("profile", "Profile", "Profile",
- GST_TYPE_GCONF_PROFILE, GCONF_PROFILE_SOUNDS,
- G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
-}
-
-static void
-gst_gconf_audio_sink_reset (GstGConfAudioSink * sink)
-{
- gst_switch_sink_set_child (GST_SWITCH_SINK (sink), NULL);
-
- g_free (sink->gconf_str);
- sink->gconf_str = NULL;
-}
-
-static void
-gst_gconf_audio_sink_init (GstGConfAudioSink * sink,
- GstGConfAudioSinkClass * g_class)
-{
- gst_gconf_audio_sink_reset (sink);
-
- sink->client = gconf_client_get_default ();
- gconf_client_add_dir (sink->client, GST_GCONF_DIR "/default",
- GCONF_CLIENT_PRELOAD_RECURSIVE, NULL);
-
- gst_gconf_switch_profile (sink, GCONF_PROFILE_SOUNDS);
-}
-
-static void
-gst_gconf_audio_sink_dispose (GObject * object)
-{
- GstGConfAudioSink *sink = GST_GCONF_AUDIO_SINK (object);
-
- if (sink->client) {
- gst_gconf_switch_profile (sink, GCONF_PROFILE_NONE);
- g_object_unref (G_OBJECT (sink->client));
- sink->client = NULL;
- }
-
- GST_CALL_PARENT (G_OBJECT_CLASS, dispose, (object));
-}
-
-static void
-gst_gconf_audio_sink_finalize (GstGConfAudioSink * sink)
-{
- g_free (sink->gconf_str);
-
- GST_CALL_PARENT (G_OBJECT_CLASS, finalize, ((GObject *) (sink)));
-}
-
-static gboolean
-do_change_child (GstGConfAudioSink * sink)
-{
- const gchar *key;
- gchar *new_gconf_str;
- GstElement *new_kid;
-
- if (sink->profile == GCONF_PROFILE_NONE)
- return FALSE; /* Can't switch to a 'NONE' sink */
-
- key = gst_gconf_get_key_for_sink_profile (sink->profile);
- new_gconf_str = gst_gconf_get_string (key);
-
- GST_LOG_OBJECT (sink, "old gconf string: %s", GST_STR_NULL (sink->gconf_str));
- GST_LOG_OBJECT (sink, "new gconf string: %s", GST_STR_NULL (new_gconf_str));
-
- if (new_gconf_str != NULL && sink->gconf_str != NULL &&
- (strlen (new_gconf_str) == 0 ||
- strcmp (sink->gconf_str, new_gconf_str) == 0)) {
- g_free (new_gconf_str);
- GST_DEBUG_OBJECT (sink,
- "GConf key was updated, but it didn't change. Ignoring");
- return TRUE;
- }
-
- GST_DEBUG_OBJECT (sink, "GConf key changed: '%s' to '%s'",
- GST_STR_NULL (sink->gconf_str), GST_STR_NULL (new_gconf_str));
-
- GST_DEBUG_OBJECT (sink, "Creating new child for profile %d", sink->profile);
- new_kid =
- gst_gconf_render_bin_with_default (new_gconf_str, DEFAULT_AUDIOSINK);
-
- if (new_kid == NULL) {
- GST_ELEMENT_ERROR (sink, LIBRARY, SETTINGS, (NULL),
- ("Failed to render audio sink from GConf"));
- goto fail;
- }
-
- if (!gst_switch_sink_set_child (GST_SWITCH_SINK (sink), new_kid)) {
- GST_WARNING_OBJECT (sink, "Failed to update child element");
- goto fail;
- }
-
- g_free (sink->gconf_str);
- sink->gconf_str = new_gconf_str;
-
- GST_DEBUG_OBJECT (sink, "done changing gconf audio sink");
-
- return TRUE;
-
-fail:
- g_free (new_gconf_str);
- return FALSE;
-}
-
-static void
-gst_gconf_switch_profile (GstGConfAudioSink * sink, GstGConfProfile profile)
-{
- if (sink->client == NULL)
- return;
-
- if (sink->notify_id) {
- GST_DEBUG_OBJECT (sink, "Unsubscribing old key %s for profile %d",
- gst_gconf_get_key_for_sink_profile (sink->profile), sink->profile);
- gconf_client_notify_remove (sink->client, sink->notify_id);
- sink->notify_id = 0;
- }
-
- sink->profile = profile;
- if (profile != GCONF_PROFILE_NONE) {
- const gchar *key = gst_gconf_get_key_for_sink_profile (sink->profile);
-
- GST_DEBUG_OBJECT (sink, "Subscribing to key %s for profile %d",
- key, profile);
- sink->notify_id = gconf_client_notify_add (sink->client, key,
- cb_change_child, sink, NULL, NULL);
- }
-}
-
-static void
-gst_gconf_audio_sink_set_property (GObject * object, guint prop_id,
- const GValue * value, GParamSpec * pspec)
-{
- GstGConfAudioSink *sink;
-
- sink = GST_GCONF_AUDIO_SINK (object);
-
- switch (prop_id) {
- case PROP_PROFILE:
- gst_gconf_switch_profile (sink, g_value_get_enum (value));
- break;
- default:
- break;
- }
-}
-
-static void
-gst_gconf_audio_sink_get_property (GObject * object, guint prop_id,
- GValue * value, GParamSpec * pspec)
-{
- GstGConfAudioSink *sink;
-
- sink = GST_GCONF_AUDIO_SINK (object);
-
- switch (prop_id) {
- case PROP_PROFILE:
- g_value_set_enum (value, sink->profile);
- break;
- default:
- G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
- break;
- }
-}
-
-static void
-cb_change_child (GConfClient * client,
- guint connection_id, GConfEntry * entry, gpointer data)
-{
- do_change_child (GST_GCONF_AUDIO_SINK (data));
-}
-
-static GstStateChangeReturn
-gst_gconf_audio_sink_change_state (GstElement * element,
- GstStateChange transition)
-{
- GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
- GstGConfAudioSink *sink = GST_GCONF_AUDIO_SINK (element);
-
- switch (transition) {
- case GST_STATE_CHANGE_NULL_TO_READY:
- if (!do_change_child (sink)) {
- gst_gconf_audio_sink_reset (sink);
- return GST_STATE_CHANGE_FAILURE;
- }
- break;
- default:
- break;
- }
-
- ret = GST_CALL_PARENT_WITH_DEFAULT (GST_ELEMENT_CLASS, change_state,
- (element, transition), GST_STATE_CHANGE_SUCCESS);
-
- switch (transition) {
- case GST_STATE_CHANGE_READY_TO_NULL:
- gst_gconf_audio_sink_reset (sink);
- break;
- default:
- break;
- }
-
- return ret;
-}
+++ /dev/null
-/* GStreamer
- * (c) 2005 Ronald S. Bultje <rbultje@ronald.bitfreak.net>
- *
- * This library is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Library General Public
- * License as published by the Free Software Foundation; either
- * version 2 of the License, or (at your option) any later version.
- *
- * This library is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Library General Public License for more details.
- *
- * You should have received a copy of the GNU Library General Public
- * License along with this library; if not, write to the
- * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
- * Boston, MA 02111-1307, USA.
- */
-
-#ifndef __GST_GCONF_AUDIO_SINK_H__
-#define __GST_GCONF_AUDIO_SINK_H__
-
-#include <gst/gst.h>
-#include <gconf/gconf-client.h>
-#include "gstswitchsink.h"
-
-G_BEGIN_DECLS
-
-#define GST_TYPE_GCONF_AUDIO_SINK \
- (gst_gconf_audio_sink_get_type ())
-#define GST_GCONF_AUDIO_SINK(obj) \
- (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_GCONF_AUDIO_SINK, \
- GstGConfAudioSink))
-#define GST_GCONF_AUDIO_SINK_CLASS(klass) \
- (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_GCONF_AUDIO_SINK, \
- GstGConfAudioSinkClass))
-#define GST_IS_GCONF_AUDIO_SINK(obj) \
- (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_GCONF_AUDIO_SINK))
-#define GST_IS_GCONF_AUDIO_SINK_CLASS(klass) \
- (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_GCONF_AUDIO_SINK))
-
-typedef struct _GstGConfAudioSink {
- GstSwitchSink parent;
-
- /* explicit pointers to stuff used */
- GConfClient *client;
- GstGConfProfile profile;
- guint notify_id;
-
- /* Current gconf string */
- gchar *gconf_str;
-} GstGConfAudioSink;
-
-typedef struct _GstGConfAudioSinkClass {
- GstSwitchSinkClass parent_class;
-} GstGConfAudioSinkClass;
-
-GType gst_gconf_audio_sink_get_type (void);
-
-G_END_DECLS
-
-#endif /* __GST_GCONF_AUDIO_SINK_H__ */
+++ /dev/null
-/* GStreamer
- * (c) 2005 Ronald S. Bultje <rbultje@ronald.bitfreak.net>
- * (c) 2005 Tim-Philipp Müller <tim centricular net>
- *
- * This library is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Library General Public
- * License as published by the Free Software Foundation; either
- * version 2 of the License, or (at your option) any later version.
- *
- * This library is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Library General Public License for more details.
- *
- * You should have received a copy of the GNU Library General Public
- * License along with this library; if not, write to the
- * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
- * Boston, MA 02111-1307, USA.
- */
-/**
- * SECTION:element-gconfaudiosrc
- * @see_also: #GstAlsaSrc, #GstAutoAudioSrc
- *
- * This element records sound from the audiosink that has been configured in
- * GConf by the user.
- *
- * <refsect2>
- * <title>Example launch line</title>
- * |[
- * gst-launch gconfaudiosrc ! audioconvert ! wavenc ! filesink location=record.wav
- * ]| Record from configured audioinput
- * </refsect2>
- */
-
-#ifdef HAVE_CONFIG_H
-#include "config.h"
-#endif
-
-#include <string.h>
-
-#include "gstgconfelements.h"
-#include "gstgconfaudiosrc.h"
-
-static void gst_gconf_audio_src_dispose (GObject * object);
-static void gst_gconf_audio_src_finalize (GstGConfAudioSrc * src);
-static void cb_toggle_element (GConfClient * client,
- guint connection_id, GConfEntry * entry, gpointer data);
-static GstStateChangeReturn
-gst_gconf_audio_src_change_state (GstElement * element,
- GstStateChange transition);
-
-GST_BOILERPLATE (GstGConfAudioSrc, gst_gconf_audio_src, GstSwitchSrc,
- GST_TYPE_SWITCH_SRC);
-
-static void
-gst_gconf_audio_src_base_init (gpointer klass)
-{
- GstElementClass *eklass = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_set_details_simple (eklass, "GConf audio source",
- "Source/Audio",
- "Audio source embedding the GConf-settings for audio input",
- "GStreamer maintainers <gstreamer-devel@lists.sourceforge.net>");
-}
-
-static void
-gst_gconf_audio_src_class_init (GstGConfAudioSrcClass * klass)
-{
- GObjectClass *oklass = G_OBJECT_CLASS (klass);
- GstElementClass *eklass = GST_ELEMENT_CLASS (klass);
-
- oklass->dispose = gst_gconf_audio_src_dispose;
- oklass->finalize = (GObjectFinalizeFunc) gst_gconf_audio_src_finalize;
- eklass->change_state = gst_gconf_audio_src_change_state;
-}
-
-/*
- * Hack to make negotiation work.
- */
-
-static gboolean
-gst_gconf_audio_src_reset (GstGConfAudioSrc * src)
-{
- gst_switch_src_set_child (GST_SWITCH_SRC (src), NULL);
-
- g_free (src->gconf_str);
- src->gconf_str = NULL;
- return TRUE;
-}
-
-static void
-gst_gconf_audio_src_init (GstGConfAudioSrc * src,
- GstGConfAudioSrcClass * g_class)
-{
- gst_gconf_audio_src_reset (src);
-
- src->client = gconf_client_get_default ();
- gconf_client_add_dir (src->client, GST_GCONF_DIR,
- GCONF_CLIENT_PRELOAD_RECURSIVE, NULL);
- src->gconf_notify_id = gconf_client_notify_add (src->client,
- GST_GCONF_DIR "/" GST_GCONF_AUDIOSRC_KEY,
- cb_toggle_element, src, NULL, NULL);
-}
-
-static void
-gst_gconf_audio_src_dispose (GObject * object)
-{
- GstGConfAudioSrc *src = GST_GCONF_AUDIO_SRC (object);
-
- if (src->client) {
- if (src->gconf_notify_id) {
- gconf_client_notify_remove (src->client, src->gconf_notify_id);
- src->gconf_notify_id = 0;
- }
-
- g_object_unref (G_OBJECT (src->client));
- src->client = NULL;
- }
-
- GST_CALL_PARENT (G_OBJECT_CLASS, dispose, (object));
-}
-
-static void
-gst_gconf_audio_src_finalize (GstGConfAudioSrc * src)
-{
- g_free (src->gconf_str);
-
- GST_CALL_PARENT (G_OBJECT_CLASS, finalize, ((GObject *) (src)));
-}
-
-static gboolean
-do_toggle_element (GstGConfAudioSrc * src)
-{
- GstElement *new_kid;
- gchar *new_gconf_str;
-
- new_gconf_str = gst_gconf_get_string (GST_GCONF_AUDIOSRC_KEY);
- if (new_gconf_str != NULL && src->gconf_str != NULL &&
- (strlen (new_gconf_str) == 0 ||
- strcmp (src->gconf_str, new_gconf_str) == 0)) {
- g_free (new_gconf_str);
- GST_DEBUG_OBJECT (src, "GConf key was updated, but it didn't change");
- return TRUE;
- }
-
- GST_DEBUG_OBJECT (src, "GConf key changed: '%s' to '%s'",
- GST_STR_NULL (src->gconf_str), GST_STR_NULL (new_gconf_str));
-
- GST_DEBUG_OBJECT (src, "Creating new kid");
- if (!(new_kid = gst_gconf_get_default_audio_src ())) {
- GST_ELEMENT_ERROR (src, LIBRARY, SETTINGS, (NULL),
- ("Failed to render audio src from GConf"));
- return FALSE;
- }
-
- if (!gst_switch_src_set_child (GST_SWITCH_SRC (src), new_kid)) {
- GST_WARNING_OBJECT (src, "Failed to update child element");
- goto fail;
- }
-
- g_free (src->gconf_str);
- src->gconf_str = new_gconf_str;
-
- GST_DEBUG_OBJECT (src, "done changing gconf audio src");
-
- return TRUE;
-fail:
- g_free (new_gconf_str);
- return FALSE;
-}
-
-static void
-cb_toggle_element (GConfClient * client,
- guint connection_id, GConfEntry * entry, gpointer data)
-{
- do_toggle_element (GST_GCONF_AUDIO_SRC (data));
-}
-
-static GstStateChangeReturn
-gst_gconf_audio_src_change_state (GstElement * element,
- GstStateChange transition)
-{
- GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
- GstGConfAudioSrc *src = GST_GCONF_AUDIO_SRC (element);
-
- switch (transition) {
- case GST_STATE_CHANGE_NULL_TO_READY:
- if (!do_toggle_element (src)) {
- gst_gconf_audio_src_reset (src);
- return GST_STATE_CHANGE_FAILURE;
- }
- break;
- default:
- break;
- }
-
- ret = GST_CALL_PARENT_WITH_DEFAULT (GST_ELEMENT_CLASS, change_state,
- (element, transition), GST_STATE_CHANGE_SUCCESS);
-
- switch (transition) {
- case GST_STATE_CHANGE_READY_TO_NULL:
- if (!gst_gconf_audio_src_reset (src))
- ret = GST_STATE_CHANGE_FAILURE;
- break;
- default:
- break;
- }
-
- return ret;
-}
+++ /dev/null
-/* GStreamer
- * (c) 2005 Ronald S. Bultje <rbultje@ronald.bitfreak.net>
- * (c) 2005 Tim-Philipp Müller <tim centricular net>
- *
- * This library is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Library General Public
- * License as published by the Free Software Foundation; either
- * version 2 of the License, or (at your option) any later version.
- *
- * This library is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Library General Public License for more details.
- *
- * You should have received a copy of the GNU Library General Public
- * License along with this library; if not, write to the
- * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
- * Boston, MA 02111-1307, USA.
- */
-
-#ifndef __GST_GCONF_AUDIO_SRC_H__
-#define __GST_GCONF_AUDIO_SRC_H__
-
-#include <gst/gst.h>
-#include <gconf/gconf-client.h>
-
-#include "gstswitchsrc.h"
-
-G_BEGIN_DECLS
-
-#define GST_TYPE_GCONF_AUDIO_SRC (gst_gconf_audio_src_get_type ())
-#define GST_GCONF_AUDIO_SRC(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_GCONF_AUDIO_SRC, GstGConfAudioSrc))
-#define GST_GCONF_AUDIO_SRC_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_GCONF_AUDIO_SRC, GstGConfAudioSrcClass))
-#define GST_IS_GCONF_AUDIO_SRC(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_GCONF_AUDIO_SRC))
-#define GST_IS_GCONF_AUDIO_SRC_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_GCONF_AUDIO_SRC))
-
-typedef struct _GstGConfAudioSrc {
- GstSwitchSrc parent;
-
- /* explicit pointers to stuff used */
- GConfClient *client;
-
- guint gconf_notify_id;
-
- /* Current gconf string */
- gchar *gconf_str;
-} GstGConfAudioSrc;
-
-typedef struct _GstGConfAudioSrcClass {
- GstSwitchSrcClass parent_class;
-} GstGConfAudioSrcClass;
-
-GType gst_gconf_audio_src_get_type (void);
-
-G_END_DECLS
-
-#endif /* __GST_GCONF_AUDIO_SRC_H__ */
+++ /dev/null
-/* GStreamer
- * (c) 2005 Ronald S. Bultje <rbultje@ronald.bitfreak.net>
- *
- * This library is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Library General Public
- * License as published by the Free Software Foundation; either
- * version 2 of the License, or (at your option) any later version.
- *
- * This library is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Library General Public License for more details.
- *
- * You should have received a copy of the GNU Library General Public
- * License along with this library; if not, write to the
- * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
- * Boston, MA 02111-1307, USA.
- */
-
-#ifdef HAVE_CONFIG_H
-#include "config.h"
-#endif
-
-#include <gst/gst.h>
-
-#include "gstgconfelements.h"
-
-#include "gstgconfaudiosink.h"
-#include "gstgconfaudiosrc.h"
-#include "gstgconfvideosink.h"
-#include "gstgconfvideosrc.h"
-
-GST_DEBUG_CATEGORY (gconf_debug);
-
-static gboolean
-plugin_init (GstPlugin * plugin)
-{
- GST_DEBUG_CATEGORY_INIT (gconf_debug, "gconf", 0,
- "GConf/GStreamer audio/video output wrapper elements");
-
- if (!gst_element_register (plugin, "gconfvideosink",
- GST_RANK_NONE, GST_TYPE_GCONF_VIDEO_SINK) ||
- !gst_element_register (plugin, "gconfvideosrc",
- GST_RANK_NONE, GST_TYPE_GCONF_VIDEO_SRC) ||
- !gst_element_register (plugin, "gconfaudiosink",
- GST_RANK_NONE, GST_TYPE_GCONF_AUDIO_SINK) ||
- !gst_element_register (plugin, "gconfaudiosrc",
- GST_RANK_NONE, GST_TYPE_GCONF_AUDIO_SRC)) {
- return FALSE;
- }
-
- return TRUE;
-}
-
-GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
- GST_VERSION_MINOR,
- "gconfelements",
- "elements wrapping the GStreamer/GConf audio/video output settings",
- plugin_init, VERSION, GST_LICENSE, GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)
+++ /dev/null
-/* GStreamer
- * (c) 2005 Ronald S. Bultje <rbultje@ronald.bitfreak.net>
- *
- * This library is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Library General Public
- * License as published by the Free Software Foundation; either
- * version 2 of the License, or (at your option) any later version.
- *
- * This library is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Library General Public License for more details.
- *
- * You should have received a copy of the GNU Library General Public
- * License along with this library; if not, write to the
- * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
- * Boston, MA 02111-1307, USA.
- */
-
-#ifndef __GST_GCONF_ELEMENTS_H__
-#define __GST_GCONF_ELEMENTS_H__
-
-#include "gstgconf.h"
-
-GST_DEBUG_CATEGORY_EXTERN (gconf_debug);
-#define GST_CAT_DEFAULT gconf_debug
-
-#endif /* __GST_GCONF_ELEMENTS_H__ */
+++ /dev/null
-/* GStreamer
- * (c) 2005 Ronald S. Bultje <rbultje@ronald.bitfreak.net>
- *
- * This library is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Library General Public
- * License as published by the Free Software Foundation; either
- * version 2 of the License, or (at your option) any later version.
- *
- * This library is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Library General Public License for more details.
- *
- * You should have received a copy of the GNU Library General Public
- * License along with this library; if not, write to the
- * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
- * Boston, MA 02111-1307, USA.
- */
-/**
- * SECTION:element-gconfvideosink
- *
- * This element outputs video to the videosink that has been configured in
- * GConf by the user.
- *
- * <refsect2>
- * <title>Example launch line</title>
- * |[
- * gst-launch filesrc location=foo.ogg ! decodebin ! ffmpegcolorspace ! gconfvideosink
- * ]| Play on configured videosink
- * </refsect2>
- */
-
-#ifdef HAVE_CONFIG_H
-#include "config.h"
-#endif
-
-#include <string.h>
-
-#include "gstgconfelements.h"
-#include "gstgconfvideosink.h"
-
-static void gst_gconf_video_sink_dispose (GObject * object);
-static void gst_gconf_video_sink_finalize (GstGConfVideoSink * sink);
-static void cb_toggle_element (GConfClient * client,
- guint connection_id, GConfEntry * entry, gpointer data);
-static GstStateChangeReturn
-gst_gconf_video_sink_change_state (GstElement * element,
- GstStateChange transition);
-
-GST_BOILERPLATE (GstGConfVideoSink, gst_gconf_video_sink, GstSwitchSink,
- GST_TYPE_SWITCH_SINK);
-
-static void
-gst_gconf_video_sink_base_init (gpointer klass)
-{
- GstElementClass *eklass = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_set_details_simple (eklass, "GConf video sink",
- "Sink/Video",
- "Video sink embedding the GConf-settings for video output",
- "GStreamer maintainers <gstreamer-devel@lists.sourceforge.net>");
-}
-
-static void
-gst_gconf_video_sink_class_init (GstGConfVideoSinkClass * klass)
-{
- GObjectClass *oklass = G_OBJECT_CLASS (klass);
- GstElementClass *eklass = GST_ELEMENT_CLASS (klass);
-
- oklass->dispose = gst_gconf_video_sink_dispose;
- oklass->finalize = (GObjectFinalizeFunc) gst_gconf_video_sink_finalize;
- eklass->change_state = gst_gconf_video_sink_change_state;
-}
-
-/*
- * Hack to make negotiation work.
- */
-
-static void
-gst_gconf_video_sink_reset (GstGConfVideoSink * sink)
-{
- gst_switch_sink_set_child (GST_SWITCH_SINK (sink), NULL);
-
- g_free (sink->gconf_str);
- sink->gconf_str = NULL;
-}
-
-static void
-gst_gconf_video_sink_init (GstGConfVideoSink * sink,
- GstGConfVideoSinkClass * g_class)
-{
- gst_gconf_video_sink_reset (sink);
-
- sink->client = gconf_client_get_default ();
- gconf_client_add_dir (sink->client, GST_GCONF_DIR,
- GCONF_CLIENT_PRELOAD_RECURSIVE, NULL);
- sink->notify_id = gconf_client_notify_add (sink->client,
- GST_GCONF_DIR "/" GST_GCONF_VIDEOSINK_KEY,
- cb_toggle_element, sink, NULL, NULL);
-}
-
-static void
-gst_gconf_video_sink_dispose (GObject * object)
-{
- GstGConfVideoSink *sink = GST_GCONF_VIDEO_SINK (object);
-
- if (sink->client) {
- if (sink->notify_id != 0)
- gconf_client_notify_remove (sink->client, sink->notify_id);
-
- g_object_unref (G_OBJECT (sink->client));
- sink->client = NULL;
- }
-
- GST_CALL_PARENT (G_OBJECT_CLASS, dispose, (object));
-}
-
-static void
-gst_gconf_video_sink_finalize (GstGConfVideoSink * sink)
-{
- g_free (sink->gconf_str);
-
- GST_CALL_PARENT (G_OBJECT_CLASS, finalize, ((GObject *) (sink)));
-}
-
-static gboolean
-do_change_child (GstGConfVideoSink * sink)
-{
- gchar *new_gconf_str;
- GstElement *new_kid;
-
- new_gconf_str = gst_gconf_get_string (GST_GCONF_VIDEOSINK_KEY);
-
- GST_LOG_OBJECT (sink, "old gconf string: %s", GST_STR_NULL (sink->gconf_str));
- GST_LOG_OBJECT (sink, "new gconf string: %s", GST_STR_NULL (new_gconf_str));
-
- if (new_gconf_str != NULL && sink->gconf_str != NULL &&
- (strlen (new_gconf_str) == 0 ||
- strcmp (sink->gconf_str, new_gconf_str) == 0)) {
- g_free (new_gconf_str);
- GST_DEBUG_OBJECT (sink,
- "GConf key was updated, but it didn't change. Ignoring");
- return TRUE;
- }
-
- GST_DEBUG_OBJECT (sink, "GConf key changed: '%s' to '%s'",
- GST_STR_NULL (sink->gconf_str), GST_STR_NULL (new_gconf_str));
-
- GST_DEBUG_OBJECT (sink, "Creating new kid");
- if (!(new_kid = gst_gconf_get_default_video_sink ())) {
- GST_ELEMENT_ERROR (sink, LIBRARY, SETTINGS, (NULL),
- ("Failed to render video sink from GConf"));
- return FALSE;
- }
-
- if (!gst_switch_sink_set_child (GST_SWITCH_SINK (sink), new_kid)) {
- GST_WARNING_OBJECT (sink, "Failed to update child element");
- goto fail;
- }
-
- g_free (sink->gconf_str);
- sink->gconf_str = new_gconf_str;
-
- GST_DEBUG_OBJECT (sink, "done changing gconf video sink");
-
- return TRUE;
-
-fail:
- g_free (new_gconf_str);
- return FALSE;
-}
-
-static void
-cb_toggle_element (GConfClient * client,
- guint connection_id, GConfEntry * entry, gpointer data)
-{
- do_change_child (GST_GCONF_VIDEO_SINK (data));
-}
-
-static GstStateChangeReturn
-gst_gconf_video_sink_change_state (GstElement * element,
- GstStateChange transition)
-{
- GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
- GstGConfVideoSink *sink = GST_GCONF_VIDEO_SINK (element);
-
- switch (transition) {
- case GST_STATE_CHANGE_NULL_TO_READY:
- if (!do_change_child (sink)) {
- gst_gconf_video_sink_reset (sink);
- return GST_STATE_CHANGE_FAILURE;
- }
- break;
- default:
- break;
- }
-
- ret = GST_CALL_PARENT_WITH_DEFAULT (GST_ELEMENT_CLASS, change_state,
- (element, transition), GST_STATE_CHANGE_SUCCESS);
-
- switch (transition) {
- case GST_STATE_CHANGE_READY_TO_NULL:
- gst_gconf_video_sink_reset (sink);
- break;
- default:
- break;
- }
-
- return ret;
-}
+++ /dev/null
-/* GStreamer
- * (c) 2005 Ronald S. Bultje <rbultje@ronald.bitfreak.net>
- *
- * This library is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Library General Public
- * License as published by the Free Software Foundation; either
- * version 2 of the License, or (at your option) any later version.
- *
- * This library is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Library General Public License for more details.
- *
- * You should have received a copy of the GNU Library General Public
- * License along with this library; if not, write to the
- * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
- * Boston, MA 02111-1307, USA.
- */
-
-#ifndef __GST_GCONF_VIDEO_SINK_H__
-#define __GST_GCONF_VIDEO_SINK_H__
-
-#include <gst/gst.h>
-#include <gconf/gconf-client.h>
-
-#include "gstswitchsink.h"
-
-G_BEGIN_DECLS
-
-#define GST_TYPE_GCONF_VIDEO_SINK \
- (gst_gconf_video_sink_get_type ())
-#define GST_GCONF_VIDEO_SINK(obj) \
- (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_GCONF_VIDEO_SINK, \
- GstGConfVideoSink))
-#define GST_GCONF_VIDEO_SINK_CLASS(klass) \
- (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_GCONF_VIDEO_SINK, \
- GstGConfVideoSinkClass))
-#define GST_IS_GCONF_VIDEO_SINK(obj) \
- (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_GCONF_VIDEO_SINK))
-#define GST_IS_GCONF_VIDEO_SINK_CLASS(klass) \
- (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_GCONF_VIDEO_SINK))
-
-typedef struct _GstGConfVideoSink {
- GstSwitchSink parent;
-
- /* explicit pointers to stuff used */
- GConfClient *client;
-
- /* gconf notify id */
- guint notify_id;
-
- /* Current gconf string */
- gchar *gconf_str;
-} GstGConfVideoSink;
-
-typedef struct _GstGConfVideoSinkClass {
- GstSwitchSinkClass parent_class;
-} GstGConfVideoSinkClass;
-
-GType gst_gconf_video_sink_get_type (void);
-
-G_END_DECLS
-
-#endif /* __GST_GCONF_VIDEO_SINK_H__ */
+++ /dev/null
-/* GStreamer
- * (c) 2005 Ronald S. Bultje <rbultje@ronald.bitfreak.net>
- * (c) 2005 Tim-Philipp Müller <tim centricular net>
- *
- * This library is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Library General Public
- * License as published by the Free Software Foundation; either
- * version 2 of the License, or (at your option) any later version.
- *
- * This library is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Library General Public License for more details.
- *
- * You should have received a copy of the GNU Library General Public
- * License along with this library; if not, write to the
- * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
- * Boston, MA 02111-1307, USA.
- */
-/**
- * SECTION:element-gconfvideosrc
- * @see_also: #GstAlsaSrc, #GstAutoVideoSrc
- *
- * This element records video from the videosink that has been configured in
- * GConf by the user.
- *
- * <refsect2>
- * <title>Example launch line</title>
- * |[
- * gst-launch gconfvideosrc ! theoraenc ! oggmux ! filesink location=record.ogg
- * ]| Record from configured videoinput
- * </refsect2>
- */
-
-#ifdef HAVE_CONFIG_H
-#include "config.h"
-#endif
-
-#include <string.h>
-
-#include "gstgconfelements.h"
-#include "gstgconfvideosrc.h"
-
-static void gst_gconf_video_src_dispose (GObject * object);
-static void gst_gconf_video_src_finalize (GstGConfVideoSrc * src);
-static void cb_toggle_element (GConfClient * client,
- guint connection_id, GConfEntry * entry, gpointer data);
-static GstStateChangeReturn
-gst_gconf_video_src_change_state (GstElement * element,
- GstStateChange transition);
-
-GST_BOILERPLATE (GstGConfVideoSrc, gst_gconf_video_src, GstSwitchSrc,
- GST_TYPE_SWITCH_SRC);
-
-static void
-gst_gconf_video_src_base_init (gpointer klass)
-{
- GstElementClass *eklass = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_set_details_simple (eklass, "GConf video source",
- "Source/Video",
- "Video source embedding the GConf-settings for video input",
- "GStreamer maintainers <gstreamer-devel@lists.sourceforge.net>");
-}
-
-static void
-gst_gconf_video_src_class_init (GstGConfVideoSrcClass * klass)
-{
- GObjectClass *oklass = G_OBJECT_CLASS (klass);
- GstElementClass *eklass = GST_ELEMENT_CLASS (klass);
-
- oklass->dispose = gst_gconf_video_src_dispose;
- oklass->finalize = (GObjectFinalizeFunc) gst_gconf_video_src_finalize;
- eklass->change_state = gst_gconf_video_src_change_state;
-}
-
-/*
- * Hack to make negotiation work.
- */
-
-static gboolean
-gst_gconf_video_src_reset (GstGConfVideoSrc * src)
-{
- gst_switch_src_set_child (GST_SWITCH_SRC (src), NULL);
-
- g_free (src->gconf_str);
- src->gconf_str = NULL;
-
- return TRUE;
-}
-
-static void
-gst_gconf_video_src_init (GstGConfVideoSrc * src,
- GstGConfVideoSrcClass * g_class)
-{
- gst_gconf_video_src_reset (src);
-
- src->client = gconf_client_get_default ();
- gconf_client_add_dir (src->client, GST_GCONF_DIR,
- GCONF_CLIENT_PRELOAD_RECURSIVE, NULL);
- src->notify_id = gconf_client_notify_add (src->client,
- GST_GCONF_DIR "/" GST_GCONF_VIDEOSRC_KEY,
- cb_toggle_element, src, NULL, NULL);
-}
-
-static void
-gst_gconf_video_src_dispose (GObject * object)
-{
- GstGConfVideoSrc *src = GST_GCONF_VIDEO_SRC (object);
-
- if (src->client) {
- if (src->notify_id != 0)
- gconf_client_notify_remove (src->client, src->notify_id);
-
- g_object_unref (G_OBJECT (src->client));
- src->client = NULL;
- }
-
- GST_CALL_PARENT (G_OBJECT_CLASS, dispose, (object));
-}
-
-static void
-gst_gconf_video_src_finalize (GstGConfVideoSrc * src)
-{
- g_free (src->gconf_str);
-
- GST_CALL_PARENT (G_OBJECT_CLASS, finalize, ((GObject *) (src)));
-}
-
-static gboolean
-do_toggle_element (GstGConfVideoSrc * src)
-{
- GstElement *new_kid;
- gchar *new_gconf_str;
-
- new_gconf_str = gst_gconf_get_string (GST_GCONF_VIDEOSRC_KEY);
- if (new_gconf_str != NULL && src->gconf_str != NULL &&
- (strlen (new_gconf_str) == 0 ||
- strcmp (src->gconf_str, new_gconf_str) == 0)) {
- g_free (new_gconf_str);
- GST_DEBUG_OBJECT (src, "GConf key was updated, but it didn't change");
- return TRUE;
- }
-
- GST_DEBUG_OBJECT (src, "GConf key changed: '%s' to '%s'",
- GST_STR_NULL (src->gconf_str), GST_STR_NULL (new_gconf_str));
-
- GST_DEBUG_OBJECT (src, "Creating new kid");
- if (!(new_kid = gst_gconf_get_default_video_src ())) {
- GST_ELEMENT_ERROR (src, LIBRARY, SETTINGS, (NULL),
- ("Failed to render video src from GConf"));
- return FALSE;
- }
-
- if (!gst_switch_src_set_child (GST_SWITCH_SRC (src), new_kid)) {
- GST_WARNING_OBJECT (src, "Failed to update child element");
- goto fail;
- }
-
- g_free (src->gconf_str);
- src->gconf_str = new_gconf_str;
-
- GST_DEBUG_OBJECT (src, "done changing gconf video src");
-
- return TRUE;
-fail:
- g_free (new_gconf_str);
- return FALSE;
-}
-
-static void
-cb_toggle_element (GConfClient * client,
- guint connection_id, GConfEntry * entry, gpointer data)
-{
- do_toggle_element (GST_GCONF_VIDEO_SRC (data));
-}
-
-static GstStateChangeReturn
-gst_gconf_video_src_change_state (GstElement * element,
- GstStateChange transition)
-{
- GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
- GstGConfVideoSrc *src = GST_GCONF_VIDEO_SRC (element);
-
- switch (transition) {
- case GST_STATE_CHANGE_NULL_TO_READY:
- if (!do_toggle_element (src)) {
- gst_gconf_video_src_reset (src);
- return GST_STATE_CHANGE_FAILURE;
- }
- break;
- default:
- break;
- }
-
- ret = GST_CALL_PARENT_WITH_DEFAULT (GST_ELEMENT_CLASS, change_state,
- (element, transition), GST_STATE_CHANGE_SUCCESS);
-
- switch (transition) {
- case GST_STATE_CHANGE_READY_TO_NULL:
- if (!gst_gconf_video_src_reset (src))
- ret = GST_STATE_CHANGE_FAILURE;
- break;
- default:
- break;
- }
-
- return ret;
-}
+++ /dev/null
-/* GStreamer
- * (c) 2005 Ronald S. Bultje <rbultje@ronald.bitfreak.net>
- * (c) 2005 Tim-Philipp Müller <tim centricular net>
- *
- * This library is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Library General Public
- * License as published by the Free Software Foundation; either
- * version 2 of the License, or (at your option) any later version.
- *
- * This library is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Library General Public License for more details.
- *
- * You should have received a copy of the GNU Library General Public
- * License along with this library; if not, write to the
- * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
- * Boston, MA 02111-1307, USA.
- */
-
-#ifndef __GST_GCONF_VIDEO_SRC_H__
-#define __GST_GCONF_VIDEO_SRC_H__
-
-#include <gst/gst.h>
-#include <gconf/gconf-client.h>
-
-#include "gstswitchsrc.h"
-
-G_BEGIN_DECLS
-
-#define GST_TYPE_GCONF_VIDEO_SRC (gst_gconf_video_src_get_type ())
-#define GST_GCONF_VIDEO_SRC(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_GCONF_VIDEO_SRC, GstGConfVideoSrc))
-#define GST_GCONF_VIDEO_SRC_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_GCONF_VIDEO_SRC, GstGConfVideoSrcClass))
-#define GST_IS_GCONF_VIDEO_SRC(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_GCONF_VIDEO_SRC))
-#define GST_IS_GCONF_VIDEO_SRC_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_GCONF_VIDEO_SRC))
-
-typedef struct _GstGConfVideoSrc {
- GstSwitchSrc parent;
-
- /* explicit pointers to stuff used */
- GConfClient *client;
-
- /* gconf key notification id */
- guint notify_id;
-
- /* Current gconf string */
- gchar *gconf_str;
-} GstGConfVideoSrc;
-
-typedef struct _GstGConfVideoSrcClass {
- GstSwitchSrcClass parent_class;
-} GstGConfVideoSrcClass;
-
-GType gst_gconf_video_src_get_type (void);
-
-G_END_DECLS
-
-#endif /* __GST_GCONF_VIDEO_SRC_H__ */
+++ /dev/null
-/* GStreamer
- * Copyright (c) 2005 Ronald S. Bultje <rbultje@ronald.bitfreak.net>
- * Copyright (c) 2006 Jürg Billeter <j@bitron.ch>
- * Copyright (c) 2007 Jan Schmidt <thaytan@noraisin.net>
- *
- * This library is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Library General Public
- * License as published by the Free Software Foundation; either
- * version 2 of the License, or (at your option) any later version.
- *
- * This library is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Library General Public License for more details.
- *
- * You should have received a copy of the GNU Library General Public
- * License along with this library; if not, write to the
- * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
- * Boston, MA 02111-1307, USA.
- */
-
-#ifdef HAVE_CONFIG_H
-#include "config.h"
-#endif
-
-#include <string.h>
-
-#include "gstswitchsink.h"
-
-GST_DEBUG_CATEGORY_STATIC (switch_debug);
-#define GST_CAT_DEFAULT switch_debug
-
-static void gst_switch_sink_dispose (GObject * object);
-static GstStateChangeReturn
-gst_switch_sink_change_state (GstElement * element, GstStateChange transition);
-
-enum
-{
- PROP_0
-};
-
-GST_BOILERPLATE (GstSwitchSink, gst_switch_sink, GstBin, GST_TYPE_BIN);
-
-static void
-gst_switch_sink_base_init (gpointer klass)
-{
- GST_DEBUG_CATEGORY_INIT (switch_debug, "switchsink", 0, "switchsink element");
-}
-
-static void
-gst_switch_sink_class_init (GstSwitchSinkClass * klass)
-{
- GObjectClass *oklass = G_OBJECT_CLASS (klass);
- GstElementClass *eklass = GST_ELEMENT_CLASS (klass);
- static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
- GST_PAD_SINK,
- GST_PAD_ALWAYS,
- GST_STATIC_CAPS_ANY);
- GstPadTemplate *child_pad_templ;
-
- oklass->dispose = gst_switch_sink_dispose;
- eklass->change_state = gst_switch_sink_change_state;
-
- /* Provide a default pad template if the child didn't */
- child_pad_templ = gst_element_class_get_pad_template (eklass, "sink");
- if (child_pad_templ == NULL) {
- gst_element_class_add_pad_template (eklass,
- gst_static_pad_template_get (&sink_template));
- }
-}
-
-static gboolean
-gst_switch_sink_reset (GstSwitchSink * sink)
-{
- /* this will install fakesink if no other child has been set,
- * otherwise we rely on the subclass to know when to unset its
- * custom kid */
- if (sink->kid == NULL) {
- return gst_switch_sink_set_child (sink, NULL);
- }
-
- return TRUE;
-}
-
-static void
-gst_switch_sink_init (GstSwitchSink * sink, GstSwitchSinkClass * g_class)
-{
- GstElementClass *eklass = GST_ELEMENT_GET_CLASS (sink);
- GstPadTemplate *templ;
-
- templ = gst_element_class_get_pad_template (eklass, "sink");
- sink->pad = gst_ghost_pad_new_no_target_from_template ("sink", templ);
- gst_element_add_pad (GST_ELEMENT (sink), sink->pad);
-
- gst_switch_sink_reset (sink);
-
- GST_OBJECT_FLAG_SET (sink, GST_ELEMENT_IS_SINK);
-}
-
-static void
-gst_switch_sink_dispose (GObject * object)
-{
- GstSwitchSink *sink = GST_SWITCH_SINK (object);
- GstObject *new_kid, *kid;
-
- GST_OBJECT_LOCK (sink);
- new_kid = GST_OBJECT_CAST (sink->new_kid);
- sink->new_kid = NULL;
-
- kid = GST_OBJECT_CAST (sink->kid);
- sink->kid = NULL;
- GST_OBJECT_UNLOCK (sink);
-
- gst_object_replace (&new_kid, NULL);
- gst_object_replace (&kid, NULL);
-
- GST_CALL_PARENT (G_OBJECT_CLASS, dispose, (object));
-}
-
-static gboolean
-gst_switch_sink_commit_new_kid (GstSwitchSink * sink)
-{
- GstPad *targetpad;
- GstState kid_state;
- GstElement *new_kid, *old_kid;
- gboolean is_fakesink = FALSE;
- GstBus *bus;
-
- /* need locking around member accesses */
- GST_OBJECT_LOCK (sink);
- /* If we're currently changing state, set the child to the next state
- * we're transitioning too, rather than our current state which is
- * about to change */
- if (GST_STATE_NEXT (sink) != GST_STATE_VOID_PENDING)
- kid_state = GST_STATE_NEXT (sink);
- else
- kid_state = GST_STATE (sink);
-
- new_kid = sink->new_kid ? gst_object_ref (sink->new_kid) : NULL;
- sink->new_kid = NULL;
- GST_OBJECT_UNLOCK (sink);
-
- /* Fakesink by default if NULL is passed as the new child */
- if (new_kid == NULL) {
- GST_DEBUG_OBJECT (sink, "Replacing kid with fakesink");
- new_kid = gst_element_factory_make ("fakesink", "testsink");
- if (new_kid == NULL) {
- GST_ERROR_OBJECT (sink, "Failed to create fakesink");
- return FALSE;
- }
- /* Add a reference, as it would if the element came from sink->new_kid */
- gst_object_ref (new_kid);
- g_object_set (new_kid, "sync", TRUE, NULL);
- is_fakesink = TRUE;
- } else {
- GST_DEBUG_OBJECT (sink, "Setting new kid");
- }
-
- /* set temporary bus of our own to catch error messages from the child
- * (could we just set our own bus on it, or would the state change messages
- * from the not-yet-added element confuse the state change algorithm? Let's
- * play it safe for now) */
- bus = gst_bus_new ();
- gst_element_set_bus (new_kid, bus);
- gst_object_unref (bus);
-
- if (gst_element_set_state (new_kid, kid_state) == GST_STATE_CHANGE_FAILURE) {
- GstMessage *msg;
-
- /* check if child posted an error message and if so re-post it on our bus
- * so that the application gets to see a decent error and not our generic
- * fallback error message which is completely indecipherable to the user */
- msg = gst_bus_pop_filtered (GST_ELEMENT_BUS (new_kid), GST_MESSAGE_ERROR);
- if (msg) {
- GST_INFO_OBJECT (sink, "Forwarding kid error: %" GST_PTR_FORMAT, msg);
- gst_element_post_message (GST_ELEMENT (sink), msg);
- }
- GST_ELEMENT_ERROR (sink, CORE, STATE_CHANGE, (NULL),
- ("Failed to set state on new child."));
- gst_element_set_bus (new_kid, NULL);
- gst_object_unref (new_kid);
- return FALSE;
- }
- gst_element_set_bus (new_kid, NULL);
- gst_bin_add (GST_BIN (sink), new_kid);
-
- /* Now, replace the existing child */
- GST_OBJECT_LOCK (sink);
- old_kid = sink->kid;
- sink->kid = new_kid;
- /* Mark whether a custom kid or fakesink has been installed */
- sink->have_kid = !is_fakesink;
- GST_OBJECT_UNLOCK (sink);
-
- /* kill old element */
- if (old_kid) {
- GST_DEBUG_OBJECT (sink, "Removing old kid %" GST_PTR_FORMAT, old_kid);
- gst_element_set_state (old_kid, GST_STATE_NULL);
- gst_bin_remove (GST_BIN (sink), old_kid);
- gst_object_unref (old_kid);
- /* Don't lose the SINK flag */
- GST_OBJECT_FLAG_SET (sink, GST_ELEMENT_IS_SINK);
- }
-
- /* re-attach ghostpad */
- GST_DEBUG_OBJECT (sink, "Creating new ghostpad");
- targetpad = gst_element_get_static_pad (sink->kid, "sink");
- gst_ghost_pad_set_target (GST_GHOST_PAD (sink->pad), targetpad);
- gst_object_unref (targetpad);
- GST_DEBUG_OBJECT (sink, "done changing child of switchsink");
-
- /* FIXME: Push new-segment info and pre-roll buffer(s) into the kid */
-
- return TRUE;
-}
-
-gboolean
-gst_switch_sink_set_child (GstSwitchSink * sink, GstElement * new_kid)
-{
- GstState cur, next;
- GstElement **p_kid;
-
- /* Nothing to do if clearing the child and we've already installed fakesink */
- if (new_kid == NULL && sink->kid != NULL && sink->have_kid == FALSE)
- return TRUE;
-
- /* Store the new kid to be committed later */
- GST_OBJECT_LOCK (sink);
- cur = GST_STATE (sink);
- next = GST_STATE_NEXT (sink);
- p_kid = &sink->new_kid;
- gst_object_replace ((GstObject **) p_kid, (GstObject *) new_kid);
- GST_OBJECT_UNLOCK (sink);
- if (new_kid)
- gst_object_unref (new_kid);
-
- /* Sometime, it would be lovely to allow sink changes even when
- * already running, but this involves sending an appropriate new-segment
- * and possibly prerolling etc */
- /* FIXME: Block the pad and replace the kid when it completes */
- if (cur > GST_STATE_READY || next == GST_STATE_PAUSED) {
- GST_DEBUG_OBJECT (sink,
- "Switch-sink is already running. Ignoring change of child.");
- gst_object_unref (new_kid);
- return TRUE;
- }
-
- return gst_switch_sink_commit_new_kid (sink);
-}
-
-static GstStateChangeReturn
-gst_switch_sink_change_state (GstElement * element, GstStateChange transition)
-{
- GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
- GstSwitchSink *sink = GST_SWITCH_SINK (element);
-
- ret = GST_CALL_PARENT_WITH_DEFAULT (GST_ELEMENT_CLASS, change_state,
- (element, transition), GST_STATE_CHANGE_SUCCESS);
-
- switch (transition) {
- case GST_STATE_CHANGE_READY_TO_NULL:
- if (!gst_switch_sink_reset (sink))
- ret = GST_STATE_CHANGE_FAILURE;
- break;
- default:
- break;
- }
-
- return ret;
-}
+++ /dev/null
-/* GStreamer
- * Copyright (c) 2005 Ronald S. Bultje <rbultje@ronald.bitfreak.net>
- * Copyright (c) 2007 Jan Schmidt <thaytan@mad.scientist.com>
- *
- * This library is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Library General Public
- * License as published by the Free Software Foundation; either
- * version 2 of the License, or (at your option) any later version.
- *
- * This library is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Library General Public License for more details.
- *
- * You should have received a copy of the GNU Library General Public
- * License along with this library; if not, write to the
- * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
- * Boston, MA 02111-1307, USA.
- */
-
-#ifndef __GST_SWITCH_SINK_H__
-#define __GST_SWITCH_SINK_H__
-
-#include <gst/gst.h>
-
-G_BEGIN_DECLS
-
-#define GST_TYPE_SWITCH_SINK \
- (gst_switch_sink_get_type ())
-#define GST_SWITCH_SINK(obj) \
- (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_SWITCH_SINK, \
- GstSwitchSink))
-#define GST_SWITCH_SINK_CLASS(klass) \
- (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_SWITCH_SINK, \
- GstSwitchSinkClass))
-#define GST_IS_SWITCH_SINK(obj) \
- (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_SWITCH_SINK))
-#define GST_IS_SWITCH_SINK_CLASS(klass) \
- (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_SWITCH_SINK))
-
-typedef struct _GstSwitchSink {
- GstBin parent;
-
- GstElement *kid;
- GstElement *new_kid;
- GstPad *pad;
-
- /* If a custom child has been set... */
- gboolean have_kid;
-} GstSwitchSink;
-
-typedef struct _GstSwitchSinkClass {
- GstBinClass parent_class;
-} GstSwitchSinkClass;
-
-GType gst_switch_sink_get_type (void);
-
-gboolean gst_switch_sink_set_child (GstSwitchSink *ssink, GstElement *new_kid);
-
-G_END_DECLS
-
-#endif /* __GST_SWITCH_SINK_H__ */
+++ /dev/null
-/* GStreamer
- * Copyright (c) 2005 Ronald S. Bultje <rbultje@ronald.bitfreak.net>
- * Copyright (c) 2006 Jürg Billeter <j@bitron.ch>
- * Copyright (c) 2007 Jan Schmidt <thaytan@noraisin.net>
- * Copyright (c) 2010 Sebastian Dröge <sebastian.droege@collabora.co.uk>
- *
- * This library is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Library General Public
- * License as published by the Free Software Foundation; either
- * version 2 of the License, or (at your option) any later version.
- *
- * This library is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Library General Public License for more details.
- *
- * You should have received a copy of the GNU Library General Public
- * License along with this library; if not, write to the
- * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
- * Boston, MA 02111-1307, USA.
- */
-
-#ifdef HAVE_CONFIG_H
-#include "config.h"
-#endif
-
-#include <string.h>
-
-#include "gstswitchsrc.h"
-
-GST_DEBUG_CATEGORY_STATIC (switch_debug);
-#define GST_CAT_DEFAULT switch_debug
-
-static void gst_switch_src_dispose (GObject * object);
-static GstStateChangeReturn
-gst_switch_src_change_state (GstElement * element, GstStateChange transition);
-
-GST_BOILERPLATE (GstSwitchSrc, gst_switch_src, GstBin, GST_TYPE_BIN);
-
-static void
-gst_switch_src_base_init (gpointer klass)
-{
- GST_DEBUG_CATEGORY_INIT (switch_debug, "switchsrc", 0, "switchsrc element");
-}
-
-static void
-gst_switch_src_class_init (GstSwitchSrcClass * klass)
-{
- GObjectClass *oklass = G_OBJECT_CLASS (klass);
- GstElementClass *eklass = GST_ELEMENT_CLASS (klass);
- static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
- GST_PAD_SRC,
- GST_PAD_ALWAYS,
- GST_STATIC_CAPS_ANY);
- GstPadTemplate *child_pad_templ;
-
- oklass->dispose = gst_switch_src_dispose;
- eklass->change_state = gst_switch_src_change_state;
-
- /* Provide a default pad template if the child didn't */
- child_pad_templ = gst_element_class_get_pad_template (eklass, "src");
- if (child_pad_templ == NULL) {
- gst_element_class_add_pad_template (eklass,
- gst_static_pad_template_get (&src_template));
- }
-}
-
-static gboolean
-gst_switch_src_reset (GstSwitchSrc * src)
-{
- /* this will install fakesrc if no other child has been set,
- * otherwise we rely on the subclass to know when to unset its
- * custom kid */
- if (src->kid == NULL) {
- return gst_switch_src_set_child (src, NULL);
- }
-
- return TRUE;
-}
-
-static void
-gst_switch_src_init (GstSwitchSrc * src, GstSwitchSrcClass * g_class)
-{
- GstElementClass *eklass = GST_ELEMENT_GET_CLASS (src);
- GstPadTemplate *templ;
-
- templ = gst_element_class_get_pad_template (eklass, "src");
- src->pad = gst_ghost_pad_new_no_target_from_template ("src", templ);
- gst_element_add_pad (GST_ELEMENT (src), src->pad);
-
- gst_switch_src_reset (src);
-
- GST_OBJECT_FLAG_SET (src, GST_ELEMENT_IS_SOURCE);
-}
-
-static void
-gst_switch_src_dispose (GObject * object)
-{
- GstSwitchSrc *src = GST_SWITCH_SRC (object);
- GstObject *new_kid, *kid;
-
- GST_OBJECT_LOCK (src);
- new_kid = GST_OBJECT_CAST (src->new_kid);
- src->new_kid = NULL;
-
- kid = GST_OBJECT_CAST (src->kid);
- src->kid = NULL;
- GST_OBJECT_UNLOCK (src);
-
- gst_object_replace (&new_kid, NULL);
- gst_object_replace (&kid, NULL);
-
- GST_CALL_PARENT (G_OBJECT_CLASS, dispose, (object));
-}
-
-static gboolean
-gst_switch_src_commit_new_kid (GstSwitchSrc * src)
-{
- GstPad *targetpad;
- GstState kid_state;
- GstElement *new_kid, *old_kid;
- gboolean is_fakesrc = FALSE;
- GstBus *bus;
-
- /* need locking around member accesses */
- GST_OBJECT_LOCK (src);
- /* If we're currently changing state, set the child to the next state
- * we're transitioning too, rather than our current state which is
- * about to change */
- if (GST_STATE_NEXT (src) != GST_STATE_VOID_PENDING)
- kid_state = GST_STATE_NEXT (src);
- else
- kid_state = GST_STATE (src);
-
- new_kid = src->new_kid ? gst_object_ref (src->new_kid) : NULL;
- src->new_kid = NULL;
- GST_OBJECT_UNLOCK (src);
-
- /* Fakesrc by default if NULL is passed as the new child */
- if (new_kid == NULL) {
- GST_DEBUG_OBJECT (src, "Replacing kid with fakesrc");
- new_kid = gst_element_factory_make ("fakesrc", "testsrc");
- if (new_kid == NULL) {
- GST_ERROR_OBJECT (src, "Failed to create fakesrc");
- return FALSE;
- }
- /* Add a reference, as it would if the element came from src->new_kid */
- gst_object_ref (new_kid);
- is_fakesrc = TRUE;
- } else {
- GST_DEBUG_OBJECT (src, "Setting new kid");
- }
-
- /* set temporary bus of our own to catch error messages from the child
- * (could we just set our own bus on it, or would the state change messages
- * from the not-yet-added element confuse the state change algorithm? Let's
- * play it safe for now) */
- bus = gst_bus_new ();
- gst_element_set_bus (new_kid, bus);
- gst_object_unref (bus);
-
- if (gst_element_set_state (new_kid, kid_state) == GST_STATE_CHANGE_FAILURE) {
- GstMessage *msg;
-
- /* check if child posted an error message and if so re-post it on our bus
- * so that the application gets to see a decent error and not our generic
- * fallback error message which is completely indecipherable to the user */
- msg = gst_bus_pop_filtered (GST_ELEMENT_BUS (new_kid), GST_MESSAGE_ERROR);
- if (msg) {
- GST_INFO_OBJECT (src, "Forwarding kid error: %" GST_PTR_FORMAT, msg);
- gst_element_post_message (GST_ELEMENT (src), msg);
- }
- GST_ELEMENT_ERROR (src, CORE, STATE_CHANGE, (NULL),
- ("Failed to set state on new child."));
- gst_element_set_bus (new_kid, NULL);
- gst_object_unref (new_kid);
- return FALSE;
- }
- gst_element_set_bus (new_kid, NULL);
- gst_bin_add (GST_BIN (src), new_kid);
-
- /* Now, replace the existing child */
- GST_OBJECT_LOCK (src);
- old_kid = src->kid;
- src->kid = new_kid;
- /* Mark whether a custom kid or fakesrc has been installed */
- src->have_kid = !is_fakesrc;
- GST_OBJECT_UNLOCK (src);
-
- /* kill old element */
- if (old_kid) {
- GST_DEBUG_OBJECT (src, "Removing old kid %" GST_PTR_FORMAT, old_kid);
- gst_element_set_state (old_kid, GST_STATE_NULL);
- gst_bin_remove (GST_BIN (src), old_kid);
- gst_object_unref (old_kid);
- /* Don't lose the SOURCE flag */
- GST_OBJECT_FLAG_SET (src, GST_ELEMENT_IS_SOURCE);
- }
-
- /* re-attach ghostpad */
- GST_DEBUG_OBJECT (src, "Creating new ghostpad");
- targetpad = gst_element_get_static_pad (src->kid, "src");
- gst_ghost_pad_set_target (GST_GHOST_PAD (src->pad), targetpad);
- gst_object_unref (targetpad);
- GST_DEBUG_OBJECT (src, "done changing child of switchsrc");
-
- return TRUE;
-}
-
-gboolean
-gst_switch_src_set_child (GstSwitchSrc * src, GstElement * new_kid)
-{
- GstState cur, next;
- GstElement **p_kid;
-
- /* Nothing to do if clearing the child and we've already installed fakesrc */
- if (new_kid == NULL && src->kid != NULL && src->have_kid == FALSE)
- return TRUE;
-
- /* Store the new kid to be committed later */
- GST_OBJECT_LOCK (src);
- cur = GST_STATE (src);
- next = GST_STATE_NEXT (src);
- p_kid = &src->new_kid;
- gst_object_replace ((GstObject **) p_kid, (GstObject *) new_kid);
- GST_OBJECT_UNLOCK (src);
- if (new_kid)
- gst_object_unref (new_kid);
-
- /* Sometime, it would be lovely to allow src changes even when
- * already running */
- /* FIXME: Block the pad and replace the kid when it completes */
- if (cur > GST_STATE_READY || next == GST_STATE_PAUSED) {
- GST_DEBUG_OBJECT (src,
- "Switch-src is already running. Ignoring change of child.");
- gst_object_unref (new_kid);
- return TRUE;
- }
-
- return gst_switch_src_commit_new_kid (src);
-}
-
-static GstStateChangeReturn
-gst_switch_src_change_state (GstElement * element, GstStateChange transition)
-{
- GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
- GstSwitchSrc *src = GST_SWITCH_SRC (element);
-
- ret = GST_CALL_PARENT_WITH_DEFAULT (GST_ELEMENT_CLASS, change_state,
- (element, transition), GST_STATE_CHANGE_SUCCESS);
-
- switch (transition) {
- case GST_STATE_CHANGE_READY_TO_NULL:
- if (!gst_switch_src_reset (src))
- ret = GST_STATE_CHANGE_FAILURE;
- break;
- default:
- break;
- }
-
- return ret;
-}
+++ /dev/null
-/* GStreamer
- *
- * Copyright (c) 2005 Ronald S. Bultje <rbultje@ronald.bitfreak.net>
- * Copyright (c) 2005 Tim-Philipp Müller <tim centricular net>
- * Copyright (c) 2007 Jan Schmidt <thaytan@mad.scientist.com>
- * Copyright (c) 2010 Sebastian Dröge <sebastian.droege@collabora.co.uk>
- *
- * This library is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Library General Public
- * License as published by the Free Software Foundation; either
- * version 2 of the License, or (at your option) any later version.
- *
- * This library is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Library General Public License for more details.
- *
- * You should have received a copy of the GNU Library General Public
- * License along with this library; if not, write to the
- * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
- * Boston, MA 02111-1307, USA.
- */
-
-#ifndef __GST_SWITCH_SRC_H__
-#define __GST_SWITCH_SRC_H__
-
-#include <gst/gst.h>
-
-G_BEGIN_DECLS
-
-#define GST_TYPE_SWITCH_SRC (gst_switch_src_get_type ())
-#define GST_SWITCH_SRC(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_SWITCH_SRC, GstSwitchSrc))
-#define GST_SWITCH_SRC_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_SWITCH_SRC, GstSwitchSrcClass))
-#define GST_IS_SWITCH_SRC(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_SWITCH_SRC))
-#define GST_IS_SWITCH_SRC_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_SWITCH_SRC))
-
-typedef struct _GstSwitchSrc {
- GstBin parent;
-
- GstElement *kid;
- GstElement *new_kid;
- GstPad *pad;
-
- /* If a custom child has been set... */
- gboolean have_kid;
-} GstSwitchSrc;
-
-typedef struct _GstSwitchSrcClass {
- GstBinClass parent_class;
-} GstSwitchSrcClass;
-
-GType gst_switch_src_get_type (void);
-gboolean gst_switch_src_set_child (GstSwitchSrc *ssrc, GstElement *new_kid);
-
-G_END_DECLS
-
-#endif /* __GST_SWITCH_SRC_H__ */
buffer_size, spec->segsize, spec->segtotal);
/* allocate the ringbuffer memory now */
- buf->data = gst_buffer_new_and_alloc (spec->segtotal * spec->segsize);
- memset (GST_BUFFER_DATA (buf->data), 0, GST_BUFFER_SIZE (buf->data));
+ buf->size = spec->segtotal * spec->segsize;
+ buf->memory = g_malloc0 (buf->size);
if ((res = gst_jack_audio_client_set_active (sink->client, TRUE)))
goto could_not_activate;
abuf->sample_rate = -1;
/* free the buffer */
- gst_buffer_unref (buf->data);
- buf->data = NULL;
+ g_free (buf->memory);
+ buf->memory = NULL;
return TRUE;
}
PROP_LAST
};
-#define _do_init(bla) \
- GST_DEBUG_CATEGORY_INIT (gst_jack_audio_sink_debug, "jacksink", 0, "jacksink element");
-
-GST_BOILERPLATE_FULL (GstJackAudioSink, gst_jack_audio_sink, GstBaseAudioSink,
- GST_TYPE_BASE_AUDIO_SINK, _do_init);
+#define gst_jack_audio_sink_parent_class parent_class
+G_DEFINE_TYPE (GstJackAudioSink, gst_jack_audio_sink, GST_TYPE_BASE_AUDIO_SINK);
static void gst_jack_audio_sink_dispose (GObject * object);
static void gst_jack_audio_sink_set_property (GObject * object, guint prop_id,
static void gst_jack_audio_sink_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
-static GstCaps *gst_jack_audio_sink_getcaps (GstBaseSink * bsink);
+static GstCaps *gst_jack_audio_sink_getcaps (GstBaseSink * bsink,
+ GstCaps * filter);
static GstRingBuffer *gst_jack_audio_sink_create_ringbuffer (GstBaseAudioSink *
sink);
static void
-gst_jack_audio_sink_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_set_details_simple (element_class, "Audio Sink (Jack)",
- "Sink/Audio", "Output audio to a JACK server",
- "Wim Taymans <wim.taymans@gmail.com>");
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&jackaudiosink_sink_factory));
-}
-
-static void
gst_jack_audio_sink_class_init (GstJackAudioSinkClass * klass)
{
GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
GstBaseSinkClass *gstbasesink_class;
GstBaseAudioSinkClass *gstbaseaudiosink_class;
+ GST_DEBUG_CATEGORY_INIT (gst_jack_audio_sink_debug, "jacksink", 0,
+ "jacksink element");
+
gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
gstbasesink_class = (GstBaseSinkClass *) klass;
gstbaseaudiosink_class = (GstBaseAudioSinkClass *) klass;
GST_PARAM_MUTABLE_READY | G_PARAM_READWRITE |
G_PARAM_STATIC_STRINGS));
+ gst_element_class_set_details_simple (gstelement_class, "Audio Sink (Jack)",
+ "Sink/Audio", "Output audio to a JACK server",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&jackaudiosink_sink_factory));
+
gstbasesink_class->get_caps = GST_DEBUG_FUNCPTR (gst_jack_audio_sink_getcaps);
gstbaseaudiosink_class->create_ringbuffer =
}
static void
-gst_jack_audio_sink_init (GstJackAudioSink * sink,
- GstJackAudioSinkClass * g_class)
+gst_jack_audio_sink_init (GstJackAudioSink * sink)
{
sink->connect = DEFAULT_PROP_CONNECT;
sink->server = g_strdup (DEFAULT_PROP_SERVER);
}
static GstCaps *
-gst_jack_audio_sink_getcaps (GstBaseSink * bsink)
+gst_jack_audio_sink_getcaps (GstBaseSink * bsink, GstCaps * filter)
{
GstJackAudioSink *sink = GST_JACK_AUDIO_SINK (bsink);
const char **ports;
buffer_size, spec->segsize, spec->segtotal);
/* allocate the ringbuffer memory now */
- buf->data = gst_buffer_new_and_alloc (spec->segtotal * spec->segsize);
- memset (GST_BUFFER_DATA (buf->data), 0, GST_BUFFER_SIZE (buf->data));
+ buf->size = spec->segtotal * spec->segsize;
+ buf->memory = g_malloc0 (buf->size);
if ((res = gst_jack_audio_client_set_active (src->client, TRUE)))
goto could_not_activate;
abuf->sample_rate = -1;
/* free the buffer */
- gst_buffer_unref (buf->data);
- buf->data = NULL;
+ g_free (buf->memory);
+ buf->memory = NULL;
return TRUE;
}
"rate = (int) [ 1, MAX ], " "channels = (int) [ 1, MAX ]")
);
-#define _do_init(bla) \
- GST_DEBUG_CATEGORY_INIT(gst_jack_audio_src_debug, "jacksrc", 0, "jacksrc element");
-
-GST_BOILERPLATE_FULL (GstJackAudioSrc, gst_jack_audio_src, GstBaseAudioSrc,
- GST_TYPE_BASE_AUDIO_SRC, _do_init);
+#define gst_jack_audio_src_parent_class parent_class
+G_DEFINE_TYPE (GstJackAudioSrc, gst_jack_audio_src, GST_TYPE_BASE_AUDIO_SRC);
static void gst_jack_audio_src_dispose (GObject * object);
static void gst_jack_audio_src_set_property (GObject * object, guint prop_id,
static void gst_jack_audio_src_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
-static GstCaps *gst_jack_audio_src_getcaps (GstBaseSrc * bsrc);
+static GstCaps *gst_jack_audio_src_getcaps (GstBaseSrc * bsrc,
+ GstCaps * filter);
static GstRingBuffer *gst_jack_audio_src_create_ringbuffer (GstBaseAudioSrc *
src);
/* GObject vmethod implementations */
-static void
-gst_jack_audio_src_base_init (gpointer gclass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (gclass);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&src_factory));
- gst_element_class_set_details_simple (element_class, "Audio Source (Jack)",
- "Source/Audio", "Captures audio from a JACK server",
- "Tristan Matthews <tristan@sat.qc.ca>");
-}
-
/* initialize the jack_audio_src's class */
static void
gst_jack_audio_src_class_init (GstJackAudioSrcClass * klass)
{
GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
GstBaseSrcClass *gstbasesrc_class;
GstBaseAudioSrcClass *gstbaseaudiosrc_class;
- gobject_class = (GObjectClass *) klass;
+ GST_DEBUG_CATEGORY_INIT (gst_jack_audio_src_debug, "jacksrc", 0,
+ "jacksrc element");
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
gstbasesrc_class = (GstBaseSrcClass *) klass;
gstbaseaudiosrc_class = (GstBaseAudioSrcClass *) klass;
GST_PARAM_MUTABLE_READY | G_PARAM_READWRITE |
G_PARAM_STATIC_STRINGS));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&src_factory));
+
+ gst_element_class_set_details_simple (gstelement_class, "Audio Source (Jack)",
+ "Source/Audio", "Captures audio from a JACK server",
+ "Tristan Matthews <tristan@sat.qc.ca>");
+
gstbasesrc_class->get_caps = GST_DEBUG_FUNCPTR (gst_jack_audio_src_getcaps);
gstbaseaudiosrc_class->create_ringbuffer =
GST_DEBUG_FUNCPTR (gst_jack_audio_src_create_ringbuffer);
* initialize instance structure
*/
static void
-gst_jack_audio_src_init (GstJackAudioSrc * src, GstJackAudioSrcClass * gclass)
+gst_jack_audio_src_init (GstJackAudioSrc * src)
{
//gst_base_src_set_live(GST_BASE_SRC (src), TRUE);
src->connect = DEFAULT_PROP_CONNECT;
}
static GstCaps *
-gst_jack_audio_src_getcaps (GstBaseSrc * bsrc)
+gst_jack_audio_src_getcaps (GstBaseSrc * bsrc, GstCaps * filter)
{
GstJackAudioSrc *src = GST_JACK_AUDIO_SRC (bsrc);
const char **ports;
GST_OBJECT_LOCK (dec);
dec->proportion = proportion;
if (G_LIKELY (ts != GST_CLOCK_TIME_NONE)) {
- if (G_UNLIKELY (diff > 0))
+ if (G_UNLIKELY (diff > dec->qos_duration))
dec->earliest_time = ts + 2 * diff + dec->qos_duration;
else
dec->earliest_time = ts + diff;
static GstStateChangeReturn gst_pulsemixer_change_state (GstElement * element,
GstStateChange transition);
-static void gst_pulsemixer_init_interfaces (GType type);
-
GST_IMPLEMENT_PULSEMIXER_CTRL_METHODS (GstPulseMixer, gst_pulsemixer);
GST_IMPLEMENT_PULSEPROBE_METHODS (GstPulseMixer, gst_pulsemixer);
-GST_BOILERPLATE_FULL (GstPulseMixer, gst_pulsemixer, GstElement,
- GST_TYPE_ELEMENT, gst_pulsemixer_init_interfaces);
-
-static gboolean
-gst_pulsemixer_interface_supported (GstImplementsInterface
- * iface, GType interface_type)
-{
- GstPulseMixer *this = GST_PULSEMIXER (iface);
-
- if (interface_type == GST_TYPE_MIXER && this->mixer)
- return TRUE;
-
- if (interface_type == GST_TYPE_PROPERTY_PROBE && this->probe)
- return TRUE;
-
- return FALSE;
-}
-static void
-gst_pulsemixer_implements_interface_init (GstImplementsInterfaceClass * klass)
-{
- klass->supported = gst_pulsemixer_interface_supported;
-}
-
-static void
-gst_pulsemixer_init_interfaces (GType type)
-{
- static const GInterfaceInfo implements_iface_info = {
- (GInterfaceInitFunc) gst_pulsemixer_implements_interface_init,
- NULL,
- NULL,
- };
- static const GInterfaceInfo mixer_iface_info = {
- (GInterfaceInitFunc) gst_pulsemixer_mixer_interface_init,
- NULL,
- NULL,
- };
- static const GInterfaceInfo probe_iface_info = {
- (GInterfaceInitFunc) gst_pulsemixer_property_probe_interface_init,
- NULL,
- NULL,
- };
-
- g_type_add_interface_static (type, GST_TYPE_IMPLEMENTS_INTERFACE,
- &implements_iface_info);
- g_type_add_interface_static (type, GST_TYPE_MIXER, &mixer_iface_info);
- g_type_add_interface_static (type, GST_TYPE_PROPERTY_PROBE,
- &probe_iface_info);
-}
-static void
-gst_pulsemixer_base_init (gpointer g_class)
-{
- gst_element_class_set_details_simple (GST_ELEMENT_CLASS (g_class),
- "PulseAudio Mixer",
- "Generic/Audio",
- "Control sound input and output levels for PulseAudio",
- "Lennart Poettering");
-}
+#define gst_pulsemixer_parent_class parent_class
+G_DEFINE_TYPE_WITH_CODE (GstPulseMixer, gst_pulsemixer, GST_TYPE_ELEMENT,
+ G_IMPLEMENT_INTERFACE (GST_TYPE_MIXER, gst_pulsemixer_mixer_interface_init);
+ G_IMPLEMENT_INTERFACE (GST_TYPE_PROPERTY_PROBE,
+ gst_pulsemixer_property_probe_interface_init));
static void
gst_pulsemixer_class_init (GstPulseMixerClass * g_class)
g_param_spec_string ("device-name", "Device name",
"Human-readable name of the sound device", NULL,
G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+
+ gst_element_class_set_details_simple (GST_ELEMENT_CLASS (g_class),
+ "PulseAudio Mixer",
+ "Generic/Audio",
+ "Control sound input and output levels for PulseAudio",
+ "Lennart Poettering");
}
static void
-gst_pulsemixer_init (GstPulseMixer * this, GstPulseMixerClass * g_class)
+gst_pulsemixer_init (GstPulseMixer * this)
{
this->mixer = NULL;
this->server = NULL;
/* EOS needs running clock */
if (GST_BASE_SINK_CAST (psink)->eos ||
- g_atomic_int_get (&GST_BASE_AUDIO_SINK (psink)->abidata.ABI.
- eos_rendering))
+ g_atomic_int_get (&GST_BASE_AUDIO_SINK (psink)->eos_rendering))
gst_pulsering_set_corked (pbuf, FALSE, FALSE);
pa_threaded_mainloop_unlock (mainloop);
if (G_UNLIKELY (g_atomic_int_get (&buf->state) !=
GST_RING_BUFFER_STATE_STARTED)) {
/* see if we are allowed to start it */
- if (G_UNLIKELY (g_atomic_int_get (&buf->abidata.ABI.may_start) == FALSE))
+ if (G_UNLIKELY (g_atomic_int_get (&buf->may_start) == FALSE))
goto no_start;
GST_DEBUG_OBJECT (buf, "start!");
static GstStateChangeReturn gst_pulsesink_change_state (GstElement * element,
GstStateChange transition);
-static void gst_pulsesink_init_interfaces (GType type);
-
#if (G_BYTE_ORDER == G_LITTLE_ENDIAN)
# define ENDIANNESS "LITTLE_ENDIAN, BIG_ENDIAN"
#else
# define ENDIANNESS "BIG_ENDIAN, LITTLE_ENDIAN"
#endif
+static GstStaticPadTemplate pad_template = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-raw-int, "
+ "endianness = (int) { " ENDIANNESS " }, "
+ "signed = (boolean) TRUE, "
+ "width = (int) 16, "
+ "depth = (int) 16, "
+ "rate = (int) [ 1, MAX ], "
+ "channels = (int) [ 1, 32 ];"
+ "audio/x-raw-float, "
+ "endianness = (int) { " ENDIANNESS " }, "
+ "width = (int) 32, "
+ "rate = (int) [ 1, MAX ], "
+ "channels = (int) [ 1, 32 ];"
+ "audio/x-raw-int, "
+ "endianness = (int) { " ENDIANNESS " }, "
+ "signed = (boolean) TRUE, "
+ "width = (int) 32, "
+ "depth = (int) 32, "
+ "rate = (int) [ 1, MAX ], " "channels = (int) [ 1, 32 ];"
+#ifdef HAVE_PULSE_0_9_15
+ "audio/x-raw-int, "
+ "endianness = (int) { " ENDIANNESS " }, "
+ "signed = (boolean) TRUE, "
+ "width = (int) 24, "
+ "depth = (int) 24, "
+ "rate = (int) [ 1, MAX ], "
+ "channels = (int) [ 1, 32 ];"
+ "audio/x-raw-int, "
+ "endianness = (int) { " ENDIANNESS " }, "
+ "signed = (boolean) TRUE, "
+ "width = (int) 32, "
+ "depth = (int) 24, "
+ "rate = (int) [ 1, MAX ], " "channels = (int) [ 1, 32 ];"
+#endif
+ "audio/x-raw-int, "
+ "signed = (boolean) FALSE, "
+ "width = (int) 8, "
+ "depth = (int) 8, "
+ "rate = (int) [ 1, MAX ], "
+ "channels = (int) [ 1, 32 ];"
+ "audio/x-alaw, "
+ "rate = (int) [ 1, MAX], "
+ "channels = (int) [ 1, 32 ];"
+ "audio/x-mulaw, "
+ "rate = (int) [ 1, MAX], " "channels = (int) [ 1, 32 ]")
+ );
+
GST_IMPLEMENT_PULSEPROBE_METHODS (GstPulseSink, gst_pulsesink);
#define _do_init(type) \
gst_pulsesink_init_contexts (); \
gst_pulsesink_init_interfaces (type);
-GST_BOILERPLATE_FULL (GstPulseSink, gst_pulsesink, GstBaseAudioSink,
- GST_TYPE_BASE_AUDIO_SINK, _do_init);
-
-static gboolean
-gst_pulsesink_interface_supported (GstImplementsInterface *
- iface, GType interface_type)
-{
- GstPulseSink *this = GST_PULSESINK_CAST (iface);
-
- if (interface_type == GST_TYPE_PROPERTY_PROBE && this->probe)
- return TRUE;
- if (interface_type == GST_TYPE_STREAM_VOLUME)
- return TRUE;
-
- return FALSE;
-}
-
-static void
-gst_pulsesink_implements_interface_init (GstImplementsInterfaceClass * klass)
-{
- klass->supported = gst_pulsesink_interface_supported;
-}
-
-static void
-gst_pulsesink_init_interfaces (GType type)
-{
- static const GInterfaceInfo implements_iface_info = {
- (GInterfaceInitFunc) gst_pulsesink_implements_interface_init,
- NULL,
- NULL,
- };
- static const GInterfaceInfo probe_iface_info = {
- (GInterfaceInitFunc) gst_pulsesink_property_probe_interface_init,
- NULL,
- NULL,
- };
- static const GInterfaceInfo svol_iface_info = {
- NULL, NULL, NULL
- };
-
- g_type_add_interface_static (type, GST_TYPE_STREAM_VOLUME, &svol_iface_info);
- g_type_add_interface_static (type, GST_TYPE_IMPLEMENTS_INTERFACE,
- &implements_iface_info);
- g_type_add_interface_static (type, GST_TYPE_PROPERTY_PROBE,
- &probe_iface_info);
-}
-
-static void
-gst_pulsesink_base_init (gpointer g_class)
-{
- static GstStaticPadTemplate pad_template = GST_STATIC_PAD_TEMPLATE ("sink",
- GST_PAD_SINK,
- GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("audio/x-raw-int, "
- "endianness = (int) { " ENDIANNESS " }, "
- "signed = (boolean) TRUE, "
- "width = (int) 16, "
- "depth = (int) 16, "
- "rate = (int) [ 1, MAX ], "
- "channels = (int) [ 1, 32 ];"
- "audio/x-raw-float, "
- "endianness = (int) { " ENDIANNESS " }, "
- "width = (int) 32, "
- "rate = (int) [ 1, MAX ], "
- "channels = (int) [ 1, 32 ];"
- "audio/x-raw-int, "
- "endianness = (int) { " ENDIANNESS " }, "
- "signed = (boolean) TRUE, "
- "width = (int) 32, "
- "depth = (int) 32, "
- "rate = (int) [ 1, MAX ], " "channels = (int) [ 1, 32 ];"
- "audio/x-raw-int, "
- "endianness = (int) { " ENDIANNESS " }, "
- "signed = (boolean) TRUE, "
- "width = (int) 24, "
- "depth = (int) 24, "
- "rate = (int) [ 1, MAX ], "
- "channels = (int) [ 1, 32 ];"
- "audio/x-raw-int, "
- "endianness = (int) { " ENDIANNESS " }, "
- "signed = (boolean) TRUE, "
- "width = (int) 32, "
- "depth = (int) 24, "
- "rate = (int) [ 1, MAX ], " "channels = (int) [ 1, 32 ];"
- "audio/x-raw-int, "
- "signed = (boolean) FALSE, "
- "width = (int) 8, "
- "depth = (int) 8, "
- "rate = (int) [ 1, MAX ], "
- "channels = (int) [ 1, 32 ];"
- "audio/x-alaw, "
- "rate = (int) [ 1, MAX], "
- "channels = (int) [ 1, 32 ];"
- "audio/x-mulaw, "
- "rate = (int) [ 1, MAX], " "channels = (int) [ 1, 32 ]")
- );
-
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_set_details_simple (element_class,
- "PulseAudio Audio Sink",
- "Sink/Audio", "Plays audio to a PulseAudio server", "Lennart Poettering");
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&pad_template));
-}
+#define gst_pulsesink_parent_class parent_class
+G_DEFINE_TYPE_WITH_CODE (GstPulseSink, gst_pulsesink, GST_TYPE_BASE_AUDIO_SINK,
+ gst_pulsesink_init_contexts ();
+ G_IMPLEMENT_INTERFACE (GST_TYPE_PROPERTY_PROBE,
+ gst_pulsesink_property_probe_interface_init);
+ G_IMPLEMENT_INTERFACE (GST_TYPE_STREAM_VOLUME, NULL)
+ );
static GstRingBuffer *
gst_pulsesink_create_ringbuffer (GstBaseAudioSink * sink)
g_param_spec_boxed ("stream-properties", "stream properties",
"list of pulseaudio stream properties",
GST_TYPE_STRUCTURE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "PulseAudio Audio Sink",
+ "Sink/Audio", "Plays audio to a PulseAudio server", "Lennart Poettering");
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&pad_template));
}
/* returns the current time of the sink ringbuffer */
}
static void
-gst_pulsesink_init (GstPulseSink * pulsesink, GstPulseSinkClass * klass)
+gst_pulsesink_init (GstPulseSink * pulsesink)
{
pulsesink->server = NULL;
pulsesink->device = NULL;
static GstStateChangeReturn gst_pulsesrc_change_state (GstElement *
element, GstStateChange transition);
-static void gst_pulsesrc_init_interfaces (GType type);
-
#if (G_BYTE_ORDER == G_LITTLE_ENDIAN)
# define ENDIANNESS "LITTLE_ENDIAN, BIG_ENDIAN"
#else
# define ENDIANNESS "BIG_ENDIAN, LITTLE_ENDIAN"
#endif
-GST_IMPLEMENT_PULSEMIXER_CTRL_METHODS (GstPulseSrc, gst_pulsesrc);
-GST_IMPLEMENT_PULSEPROBE_METHODS (GstPulseSrc, gst_pulsesrc);
-GST_BOILERPLATE_FULL (GstPulseSrc, gst_pulsesrc, GstAudioSrc,
- GST_TYPE_AUDIO_SRC, gst_pulsesrc_init_interfaces);
-
-static gboolean
-gst_pulsesrc_interface_supported (GstImplementsInterface *
- iface, GType interface_type)
-{
- GstPulseSrc *this = GST_PULSESRC_CAST (iface);
-
- if (interface_type == GST_TYPE_MIXER && this->mixer)
- return TRUE;
-
- if (interface_type == GST_TYPE_PROPERTY_PROBE && this->probe)
- return TRUE;
-
- return FALSE;
-}
-
-static void
-gst_pulsesrc_implements_interface_init (GstImplementsInterfaceClass * klass)
-{
- klass->supported = gst_pulsesrc_interface_supported;
-}
+static GstStaticPadTemplate pad_template = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-raw-int, "
+ "endianness = (int) { " ENDIANNESS " }, "
+ "signed = (boolean) TRUE, "
+ "width = (int) 16, "
+ "depth = (int) 16, "
+ "rate = (int) [ 1, MAX ], "
+ "channels = (int) [ 1, 32 ];"
+ "audio/x-raw-float, "
+ "endianness = (int) { " ENDIANNESS " }, "
+ "width = (int) 32, "
+ "rate = (int) [ 1, MAX ], "
+ "channels = (int) [ 1, 32 ];"
+ "audio/x-raw-int, "
+ "endianness = (int) { " ENDIANNESS " }, "
+ "signed = (boolean) TRUE, "
+ "width = (int) 32, "
+ "depth = (int) 32, "
+ "rate = (int) [ 1, MAX ], "
+ "channels = (int) [ 1, 32 ];"
+ "audio/x-raw-int, "
+ "signed = (boolean) FALSE, "
+ "width = (int) 8, "
+ "depth = (int) 8, "
+ "rate = (int) [ 1, MAX ], "
+ "channels = (int) [ 1, 32 ];"
+ "audio/x-alaw, "
+ "rate = (int) [ 1, MAX], "
+ "channels = (int) [ 1, 32 ];"
+ "audio/x-mulaw, "
+ "rate = (int) [ 1, MAX], " "channels = (int) [ 1, 32 ]")
+ );
-static void
-gst_pulsesrc_init_interfaces (GType type)
-{
- static const GInterfaceInfo implements_iface_info = {
- (GInterfaceInitFunc) gst_pulsesrc_implements_interface_init,
- NULL,
- NULL,
- };
- static const GInterfaceInfo mixer_iface_info = {
- (GInterfaceInitFunc) gst_pulsesrc_mixer_interface_init,
- NULL,
- NULL,
- };
- static const GInterfaceInfo probe_iface_info = {
- (GInterfaceInitFunc) gst_pulsesrc_property_probe_interface_init,
- NULL,
- NULL,
- };
-
- g_type_add_interface_static (type, GST_TYPE_IMPLEMENTS_INTERFACE,
- &implements_iface_info);
- g_type_add_interface_static (type, GST_TYPE_MIXER, &mixer_iface_info);
- g_type_add_interface_static (type, GST_TYPE_PROPERTY_PROBE,
- &probe_iface_info);
-}
-static void
-gst_pulsesrc_base_init (gpointer g_class)
-{
+GST_IMPLEMENT_PULSEMIXER_CTRL_METHODS (GstPulseSrc, gst_pulsesrc);
+GST_IMPLEMENT_PULSEPROBE_METHODS (GstPulseSrc, gst_pulsesrc);
- static GstStaticPadTemplate pad_template = GST_STATIC_PAD_TEMPLATE ("src",
- GST_PAD_SRC,
- GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("audio/x-raw-int, "
- "endianness = (int) { " ENDIANNESS " }, "
- "signed = (boolean) TRUE, "
- "width = (int) 16, "
- "depth = (int) 16, "
- "rate = (int) [ 1, MAX ], "
- "channels = (int) [ 1, 32 ];"
- "audio/x-raw-float, "
- "endianness = (int) { " ENDIANNESS " }, "
- "width = (int) 32, "
- "rate = (int) [ 1, MAX ], "
- "channels = (int) [ 1, 32 ];"
- "audio/x-raw-int, "
- "endianness = (int) { " ENDIANNESS " }, "
- "signed = (boolean) TRUE, "
- "width = (int) 32, "
- "depth = (int) 32, "
- "rate = (int) [ 1, MAX ], "
- "channels = (int) [ 1, 32 ];"
- "audio/x-raw-int, "
- "signed = (boolean) FALSE, "
- "width = (int) 8, "
- "depth = (int) 8, "
- "rate = (int) [ 1, MAX ], "
- "channels = (int) [ 1, 32 ];"
- "audio/x-alaw, "
- "rate = (int) [ 1, MAX], "
- "channels = (int) [ 1, 32 ];"
- "audio/x-mulaw, "
- "rate = (int) [ 1, MAX], " "channels = (int) [ 1, 32 ]")
- );
-
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_set_details_simple (element_class,
- "PulseAudio Audio Source",
- "Source/Audio",
- "Captures audio from a PulseAudio server", "Lennart Poettering");
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&pad_template));
-}
+#define gst_pulsesrc_parent_class parent_class
+G_DEFINE_TYPE_WITH_CODE (GstPulseSrc, gst_pulsesrc, GST_TYPE_AUDIO_SRC,
+ G_IMPLEMENT_INTERFACE (GST_TYPE_MIXER, gst_pulsesrc_mixer_interface_init);
+ G_IMPLEMENT_INTERFACE (GST_TYPE_PROPERTY_PROBE,
+ gst_pulsesrc_property_probe_interface_init));
static void
gst_pulsesrc_class_init (GstPulseSrcClass * klass)
g_param_spec_boxed ("stream-properties", "stream properties",
"list of pulseaudio stream properties",
GST_TYPE_STRUCTURE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "PulseAudio Audio Source",
+ "Source/Audio",
+ "Captures audio from a PulseAudio server", "Lennart Poettering");
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&pad_template));
}
static void
-gst_pulsesrc_init (GstPulseSrc * pulsesrc, GstPulseSrcClass * klass)
+gst_pulsesrc_init (GstPulseSrc * pulsesrc)
{
pulsesrc->server = NULL;
pulsesrc->device = NULL;
gboolean result = FALSE;
/* first see what is possible on our source pad */
- thiscaps = gst_pad_get_caps_reffed (GST_BASE_SRC_PAD (basesrc));
+ thiscaps = gst_pad_get_caps (GST_BASE_SRC_PAD (basesrc), NULL);
GST_DEBUG_OBJECT (basesrc, "caps of src: %" GST_PTR_FORMAT, thiscaps);
/* nothing or anything is allowed, we're done */
if (thiscaps == NULL || gst_caps_is_any (thiscaps))
goto no_nego_needed;
/* get the peer caps */
- peercaps = gst_pad_peer_get_caps_reffed (GST_BASE_SRC_PAD (basesrc));
+ peercaps = gst_pad_peer_get_caps (GST_BASE_SRC_PAD (basesrc), NULL);
GST_DEBUG_OBJECT (basesrc, "caps of peer: %" GST_PTR_FORMAT, peercaps);
if (peercaps) {
/* get intersection */
{
GstPulseSrc *pulsesrc = GST_PULSESRC_CAST (userdata);
- pulsesrc->operation_success = ! !success;
+ pulsesrc->operation_success = !!success;
pa_threaded_mainloop_signal (pulsesrc->mainloop, 0);
}
GstBuffer *buf;
val = gst_tag_list_get_value_index (list, tag, i);
- buf = (GstBuffer *) gst_value_get_mini_object (val);
+ buf = (GstBuffer *) g_value_get_boxed (val);
if (buf && GST_BUFFER_CAPS (buf)) {
GstStructure *s;
GST_DEBUG ("image %u/%u", n + 1, num_tags);
val = gst_tag_list_get_value_index (list, tag, n);
- image = (GstBuffer *) gst_value_get_mini_object (val);
+ image = (GstBuffer *) g_value_get_boxed (val);
if (GST_IS_BUFFER (image) && GST_BUFFER_SIZE (image) > 0 &&
GST_BUFFER_CAPS (image) != NULL &&
+++ /dev/null
-gstreamer-*.schemas
-gstreamer.schemas
+++ /dev/null
-GST_SCHEMA_FILES = gstreamer-@GST_MAJORMINOR@.schemas
-
-if USE_GCONF
-schemadir = @GCONF_SCHEMA_FILE_DIR@
-schema_DATA = $(GST_SCHEMA_FILES)
-endif
-
-gstreamer-@GST_MAJORMINOR@.schemas: gstreamer.schemas
- cp gstreamer.schemas gstreamer-@GST_MAJORMINOR@.schemas
-
-if USE_GCONF
-if GCONF_SCHEMAS_INSTALL
-install-data-local:
- @GCONF_CONFIG_SOURCE=$(GCONF_SCHEMA_CONFIG_SOURCE) $(GCONFTOOL) \
- --makefile-install-rule $(builddir)/$(schema_DATA) || \
- (echo ;\
- echo "*****************************************************"; \
- echo "Installation of schemas failed, install them manually"; \
- echo "*****************************************************";)
- @true
-else
-install-data-local:
- @echo "***************************************************************"
- @echo "Not installing schemas, disabled with --disable-schemas-install"
- @echo "***************************************************************"
- @true
-endif
-endif # USE_GCONF
-
-CLEANFILES = $(GST_SCHEMA_FILES)
-EXTRA_DIST = $(GST_SCHEMA_FILES)
+++ /dev/null
-<gconfschemafile>
- <schemalist>
- <schema>
- <key>/schemas/system/gstreamer/@GST_MAJORMINOR@/default/audiosink</key>
- <applyto>/system/gstreamer/@GST_MAJORMINOR@/default/audiosink</applyto>
- <owner>gstreamer</owner>
- <type>string</type>
- <default>@DEFAULT_AUDIOSINK@</default>
- <locale name="C">
- <short>default GStreamer audiosink</short>
- <long>GStreamer can play audio using any number of output elements. Some possible choices are osssink, esdsink and alsasink. The audiosink can be a partial pipeline instead of just one element.</long>
- </locale>
- </schema>
- <schema>
- <key>/schemas/system/gstreamer/@GST_MAJORMINOR@/default/audiosink_description</key>
- <applyto>/system/gstreamer/@GST_MAJORMINOR@/default/audiosink_description</applyto>
- <owner>gstreamer</owner>
- <type>string</type>
- <default>Default</default>
- <locale name="C">
- <short>description for default GStreamer audiosink</short>
- <long>Describes the selected output element.</long>
- </locale>
- </schema>
- <schema>
- <key>/schemas/system/gstreamer/@GST_MAJORMINOR@/default/musicaudiosink</key>
- <applyto>/system/gstreamer/@GST_MAJORMINOR@/default/musicaudiosink</applyto>
- <owner>gstreamer</owner>
- <type>string</type>
- <default>@DEFAULT_AUDIOSINK@</default>
- <locale name="C">
- <short>GStreamer audiosink for Music and Movies</short>
- <long>GStreamer can play audio using any number of output elements. Some possible choices are osssink, esdsink and alsasink. The audiosink can be a partial pipeline instead of just one element.</long>
- </locale>
- </schema>
- <schema>
- <key>/schemas/system/gstreamer/@GST_MAJORMINOR@/default/musicaudiosink_description</key>
- <applyto>/system/gstreamer/@GST_MAJORMINOR@/default/musicaudiosink_description</applyto>
- <owner>gstreamer</owner>
- <type>string</type>
- <default>Default</default>
- <locale name="C">
- <short>description for GStreamer audiosink for Music and Movies</short>
- <long>Describes the selected output element for Music and Movies.</long>
- </locale>
- </schema>
- <schema>
- <key>/schemas/system/gstreamer/@GST_MAJORMINOR@/default/chataudiosink</key>
- <applyto>/system/gstreamer/@GST_MAJORMINOR@/default/chataudiosink</applyto>
- <owner>gstreamer</owner>
- <type>string</type>
- <default>@DEFAULT_AUDIOSINK@</default>
- <locale name="C">
- <short>GStreamer audiosink for Audio/Video Conferencing</short>
- <long>GStreamer can play audio using any number of output elements. Some possible choices are osssink, esdsink and alsasink. The audiosink can be a partial pipeline instead of just one element.</long>
- </locale>
- </schema>
- <schema>
- <key>/schemas/system/gstreamer/@GST_MAJORMINOR@/default/chataudiosink_description</key>
- <applyto>/system/gstreamer/@GST_MAJORMINOR@/default/chataudiosink_description</applyto>
- <owner>gstreamer</owner>
- <type>string</type>
- <default>Default</default>
- <locale name="C">
- <short>description for GStreamer audiosink for Audio/Video Conferencing</short>
- <long>Describes the selected output element for Audio/Video Conferencing.</long>
- </locale>
- </schema>
- <schema>
- <key>/schemas/system/gstreamer/@GST_MAJORMINOR@/default/audiosink_description</key>
- <applyto>/system/gstreamer/@GST_MAJORMINOR@/default/audiosink_description</applyto>
- <owner>gstreamer</owner>
- <type>string</type>
- <default>Default</default>
- <locale name="C">
- <short>description for default GStreamer audiosink</short>
- <long>Describes the selected output element.</long>
- </locale>
- </schema>
- <schema>
- <key>/schemas/system/gstreamer/@GST_MAJORMINOR@/default/musicaudiosink</key>
- <applyto>/system/gstreamer/@GST_MAJORMINOR@/default/musicaudiosink</applyto>
- <owner>gstreamer</owner>
- <type>string</type>
- <default>@DEFAULT_AUDIOSINK@</default>
- <locale name="C">
- <short>GStreamer audiosink for Music and Movies</short>
- <long>GStreamer can play audio using any number of output elements. Some possible choices are osssink, esdsink and alsasink. The audiosink can be a partial pipeline instead of just one element.</long>
- </locale>
- </schema>
- <schema>
- <key>/schemas/system/gstreamer/@GST_MAJORMINOR@/default/musicaudiosink_description</key>
- <applyto>/system/gstreamer/@GST_MAJORMINOR@/default/musicaudiosink_description</applyto>
- <owner>gstreamer</owner>
- <type>string</type>
- <default>Default</default>
- <locale name="C">
- <short>description for GStreamer audiosink for Music and Movies</short>
- <long>Describes the selected output element for Music and Movies.</long>
- </locale>
- </schema>
- <schema>
- <key>/schemas/system/gstreamer/@GST_MAJORMINOR@/default/chataudiosink</key>
- <applyto>/system/gstreamer/@GST_MAJORMINOR@/default/chataudiosink</applyto>
- <owner>gstreamer</owner>
- <type>string</type>
- <default>@DEFAULT_AUDIOSINK@</default>
- <locale name="C">
- <short>GStreamer audiosink for Audio/Video Conferencing</short>
- <long>GStreamer can play audio using any number of output elements. Some possible choices are osssink, esdsink and alsasink. The audiosink can be a partial pipeline instead of just one element.</long>
- </locale>
- </schema>
- <schema>
- <key>/schemas/system/gstreamer/@GST_MAJORMINOR@/default/chataudiosink_description</key>
- <applyto>/system/gstreamer/@GST_MAJORMINOR@/default/chataudiosink_description</applyto>
- <owner>gstreamer</owner>
- <type>string</type>
- <default>Default</default>
- <locale name="C">
- <short>description for GStreamer audiosink for Audio/Video Conferencing</short>
- <long>Describes the selected output element for Audio/Video Conferencing.</long>
- </locale>
- </schema>
- <schema>
- <key>/schemas/system/gstreamer/@GST_MAJORMINOR@/default/videosink</key>
- <applyto>/system/gstreamer/@GST_MAJORMINOR@/default/videosink</applyto>
- <owner>gstreamer</owner>
- <type>string</type>
- <default>@DEFAULT_VIDEOSINK@</default>
- <locale name="C">
- <short>default GStreamer videosink</short>
- <long>GStreamer can play video using any number of output elements. Some possible choices are xvimagesink, ximagesink, sdlvideosink and aasink. The videosink can be a partial pipeline instead of just one element.</long>
- </locale>
- </schema>
- <schema>
- <key>/schemas/system/gstreamer/@GST_MAJORMINOR@/default/audiosrc</key>
- <applyto>/system/gstreamer/@GST_MAJORMINOR@/default/audiosrc</applyto>
- <owner>gstreamer</owner>
- <type>string</type>
- <default>@DEFAULT_AUDIOSRC@</default>
- <locale name="C">
- <short>default GStreamer audio source</short>
- <long>GStreamer can record audio using any number of input elements. Some possible choices are osssrc, esdsrc and alsasrc. The audio source can be a partial pipeline instead of just one element.</long>
- </locale>
- </schema>
- <schema>
- <key>/schemas/system/gstreamer/@GST_MAJORMINOR@/default/audiosrc_description</key>
- <applyto>/system/gstreamer/@GST_MAJORMINOR@/default/audiosrc_description</applyto>
- <owner>gstreamer</owner>
- <type>string</type>
- <default>Default</default>
- <locale name="C">
- <short>description for default GStreamer audiosrc</short>
- <long>Describes the selected input element.</long>
- </locale>
- </schema>
- <schema>
- <key>/schemas/system/gstreamer/@GST_MAJORMINOR@/default/audiosrc_description</key>
- <applyto>/system/gstreamer/@GST_MAJORMINOR@/default/audiosrc_description</applyto>
- <owner>gstreamer</owner>
- <type>string</type>
- <default>Default</default>
- <locale name="C">
- <short>description for default GStreamer audiosrc</short>
- <long>Describes the selected input element.</long>
- </locale>
- </schema>
- <schema>
- <key>/schemas/system/gstreamer/@GST_MAJORMINOR@/default/videosrc</key>
- <applyto>/system/gstreamer/@GST_MAJORMINOR@/default/videosrc</applyto>
- <owner>gstreamer</owner>
- <type>string</type>
- <default>@DEFAULT_VIDEOSRC@</default>
- <locale name="C">
- <short>default GStreamer video source</short>
- <long>GStreamer can record video from any number of input elements. Some possible choices are v4lsrc and videotestsrc. The video source can be a partial pipeline instead of just one element.</long>
- </locale>
- </schema>
- <schema>
- <key>/schemas/system/gstreamer/@GST_MAJORMINOR@/default/visualization</key>
- <applyto>/system/gstreamer/@GST_MAJORMINOR@/default/visualization</applyto>
- <owner>gstreamer</owner>
- <type>string</type>
- <default>@DEFAULT_VISUALIZER@</default>
- <locale name="C">
- <short>default GStreamer visualization plugin</short>
- <long>GStreamer can put visualization plugins in a pipeline to transform audio stream in video frames. Default is goom but more visualization plugins will be ported soon. The visualization plugin can be a partial pipeline instead of just one element.</long>
- </locale>
- </schema>
- </schemalist>
-</gconfschemafile>
BuildRequires: gcc-c++
-@USE_ESD_TRUE@BuildRequires: esound-devel >= 0.2.8
-@USE_ESD_TRUE@Obsoletes: gstreamer-esd
-@USE_ESD_TRUE@
-@USE_ESD_TRUE@Provides: gstreamer-audiosrc
-@USE_ESD_TRUE@Provides: gstreamer-audiosink
@USE_FLAC_TRUE@BuildRequires: flac-devel >= 1.0.3
-@USE_GCONF_TRUE@BuildRequires: GConf2-devel
@USE_JPEG_TRUE@BuildRequires: libjpeg-devel
@USE_LIBCACA_TRUE@BuildRequires: libcaca-devel
@USE_LIBDV_TRUE@BuildRequires: libdv-devel
%install
rm -rf $RPM_BUILD_ROOT
-export GCONF_DISABLE_MAKEFILE_SCHEMA_INSTALL=1
%makeinstall
-unset GCONF_DISABLE_MAKEFILE_SCHEMA_INSTALL
# Clean out files that should not be part of the rpm.
rm -f $RPM_BUILD_ROOT%{_libdir}/gstreamer-%{majorminor}/*.la
rm -rf $RPM_BUILD_ROOT
%post
-@USE_GCONF_TRUE@export GCONF_CONFIG_SOURCE=`gconftool-2 --get-default-source`
-@USE_GCONF_TRUE@gconftool-2 --makefile-install-rule %{_sysconfdir}/gconf/schemas/gstreamer-%{majorminor}.schemas > /dev/null
%files -f gst-plugins-good-%{majorminor}.lang
%defattr(-, root, root)
# gstreamer-plugins with external dependencies but in the main package
@USE_LIBCACA_TRUE@%{_libdir}/gstreamer-%{majorminor}/libgstcacasink.so
-@USE_ESD_TRUE@%{_libdir}/gstreamer-%{majorminor}/libgstesd.so
@USE_FLAC_TRUE@%{_libdir}/gstreamer-%{majorminor}/libgstflac.so
@USE_JACK_TRUE@%{_libdir}/gstreamer-%{majorminor}/libgstjack.so
@USE_JPEG_TRUE@%{_libdir}/gstreamer-%{majorminor}/libgstjpeg.so
@USE_LIBPNG_TRUE@%{_libdir}/gstreamer-%{majorminor}/libgstpng.so
@USE_OSS_TRUE@%{_libdir}/gstreamer-%{majorminor}/libgstossaudio.so
@USE_SPEEX_TRUE@%{_libdir}/gstreamer-%{majorminor}/libgstspeex.so
-@USE_GCONF_TRUE@%{_libdir}/gstreamer-%{majorminor}/libgstgconfelements.so
@USE_HAL_TRUE@%{_libdir}/gstreamer-%{majorminor}/libgsthalelements.so
@USE_SHOUT2_TRUE@%{_libdir}/gstreamer-%{majorminor}/libgstshout2.so
@USE_AALIB_TRUE@%{_libdir}/gstreamer-%{majorminor}/libgstaasink.so
@USE_SOUP_TRUE@%{_libdir}/gstreamer-%{majorminor}/libgstsouphttpsrc.so
@USE_PULSE_TRUE@%{_libdir}/gstreamer-%{majorminor}/libgstpulse.so
-# schema files
-@USE_GCONF_TRUE@%{_sysconfdir}/gconf/schemas/gstreamer-%{majorminor}.schemas
-
%changelog
* Tue Jun 12 2007 Jan Schmidt <jan at fluendo dot com>
- wavpack and qtdemux have moved from bad
* Sample pipeline:
* |[
* gst-launch videotestsrc pattern=smpte75 ! alpha method=green ! \
- * videomixer name=mixer ! ffmpegcolorspace ! autovideosink \
+ * videomixer name=mixer ! videoconvert ! autovideosink \
* videotestsrc pattern=snow ! mixer.
* ]| This pipeline adds a alpha channel to the SMPTE color bars
* with green as the transparent color and mixes the output with
};
static GstStaticPadTemplate gst_alpha_src_template =
- GST_STATIC_PAD_TEMPLATE ("src",
+GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("AYUV") ";"
- GST_VIDEO_CAPS_ARGB ";" GST_VIDEO_CAPS_BGRA ";"
- GST_VIDEO_CAPS_ABGR ";" GST_VIDEO_CAPS_RGBA
- ";" GST_VIDEO_CAPS_YUV ("Y444")
- ";" GST_VIDEO_CAPS_xRGB ";" GST_VIDEO_CAPS_BGRx ";" GST_VIDEO_CAPS_xBGR
- ";" GST_VIDEO_CAPS_RGBx ";" GST_VIDEO_CAPS_RGB ";" GST_VIDEO_CAPS_BGR
- ";" GST_VIDEO_CAPS_YUV ("Y42B") ";" GST_VIDEO_CAPS_YUV ("YUY2")
- ";" GST_VIDEO_CAPS_YUV ("YVYU") ";" GST_VIDEO_CAPS_YUV ("UYVY")
- ";" GST_VIDEO_CAPS_YUV ("I420") ";" GST_VIDEO_CAPS_YUV ("YV12")
- ";" GST_VIDEO_CAPS_YUV ("Y41B"))
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ AYUV, "
+ "ARGB, BGRA, ABGR, RGBA, Y444, xRGB, BGRx, xBGR, "
+ "RGBx, RGB, BGR, Y42B, YUY2, YVYU, UYVY, I420, YV12, Y41B } "))
);
static GstStaticPadTemplate gst_alpha_sink_template =
- GST_STATIC_PAD_TEMPLATE ("sink",
+GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("AYUV")
- ";" GST_VIDEO_CAPS_ARGB ";" GST_VIDEO_CAPS_BGRA ";" GST_VIDEO_CAPS_ABGR
- ";" GST_VIDEO_CAPS_RGBA ";" GST_VIDEO_CAPS_YUV ("Y444")
- ";" GST_VIDEO_CAPS_xRGB ";" GST_VIDEO_CAPS_BGRx ";" GST_VIDEO_CAPS_xBGR
- ";" GST_VIDEO_CAPS_RGBx ";" GST_VIDEO_CAPS_RGB ";" GST_VIDEO_CAPS_BGR
- ";" GST_VIDEO_CAPS_YUV ("Y42B") ";" GST_VIDEO_CAPS_YUV ("YUY2")
- ";" GST_VIDEO_CAPS_YUV ("YVYU") ";" GST_VIDEO_CAPS_YUV ("UYVY")
- ";" GST_VIDEO_CAPS_YUV ("I420") ";" GST_VIDEO_CAPS_YUV ("YV12")
- ";" GST_VIDEO_CAPS_YUV ("Y41B")
- )
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ AYUV, "
+ "ARGB, BGRA, ABGR, RGBA, Y444, xRGB, BGRx, xBGR, "
+ "RGBx, RGB, BGR, Y42B, YUY2, YVYU, UYVY, I420, YV12, " "Y41B } "))
);
static GstStaticCaps gst_alpha_alpha_caps =
- GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("AYUV")
- ";" GST_VIDEO_CAPS_ARGB ";" GST_VIDEO_CAPS_BGRA ";" GST_VIDEO_CAPS_ABGR ";"
- GST_VIDEO_CAPS_RGBA);
+GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ AYUV, ARGB, BGRA, ABGR, RGBA }"));
#define GST_ALPHA_LOCK(alpha) G_STMT_START { \
GST_LOG_OBJECT (alpha, "Locking alpha from thread %p", g_thread_self ()); \
static gboolean gst_alpha_start (GstBaseTransform * trans);
static gboolean gst_alpha_get_unit_size (GstBaseTransform * btrans,
- GstCaps * caps, guint * size);
+ GstCaps * caps, gsize * size);
static GstCaps *gst_alpha_transform_caps (GstBaseTransform * btrans,
- GstPadDirection direction, GstCaps * caps);
+ GstPadDirection direction, GstCaps * caps, GstCaps * filter);
static gboolean gst_alpha_set_caps (GstBaseTransform * btrans,
GstCaps * incaps, GstCaps * outcaps);
static GstFlowReturn gst_alpha_transform (GstBaseTransform * btrans,
GValue * value, GParamSpec * pspec);
static void gst_alpha_finalize (GObject * object);
-GST_BOILERPLATE (GstAlpha, gst_alpha, GstVideoFilter, GST_TYPE_VIDEO_FILTER);
+#define gst_alpha_parent_class parent_class
+G_DEFINE_TYPE (GstAlpha, gst_alpha, GST_TYPE_VIDEO_FILTER);
#define GST_TYPE_ALPHA_METHOD (gst_alpha_method_get_type())
static GType
}
static void
-gst_alpha_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_set_details_simple (element_class, "Alpha filter",
- "Filter/Effect/Video",
- "Adds an alpha channel to video - uniform or via chroma-keying",
- "Wim Taymans <wim@fluendo.com>\n"
- "Edward Hervey <edward.hervey@collabora.co.uk>\n"
- "Jan Schmidt <thaytan@noraisin.net>");
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_alpha_sink_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_alpha_src_template));
-
- GST_DEBUG_CATEGORY_INIT (gst_alpha_debug, "alpha", 0,
- "alpha - Element for adding alpha channel to streams");
-}
-
-static void
gst_alpha_class_init (GstAlphaClass * klass)
{
GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
GstBaseTransformClass *btrans_class = (GstBaseTransformClass *) klass;
+ GST_DEBUG_CATEGORY_INIT (gst_alpha_debug, "alpha", 0,
+ "alpha - Element for adding alpha channel to streams");
+
gobject_class->set_property = gst_alpha_set_property;
gobject_class->get_property = gst_alpha_get_property;
gobject_class->finalize = gst_alpha_finalize;
DEFAULT_PREFER_PASSTHROUGH,
G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
+ gst_element_class_set_details_simple (gstelement_class, "Alpha filter",
+ "Filter/Effect/Video",
+ "Adds an alpha channel to video - uniform or via chroma-keying",
+ "Wim Taymans <wim.taymans@gmail.com>\n"
+ "Edward Hervey <edward.hervey@collabora.co.uk>\n"
+ "Jan Schmidt <thaytan@noraisin.net>");
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_alpha_sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_alpha_src_template));
+
btrans_class->start = GST_DEBUG_FUNCPTR (gst_alpha_start);
btrans_class->transform = GST_DEBUG_FUNCPTR (gst_alpha_transform);
btrans_class->before_transform =
}
static void
-gst_alpha_init (GstAlpha * alpha, GstAlphaClass * klass)
+gst_alpha_init (GstAlpha * alpha)
{
alpha->alpha = DEFAULT_ALPHA;
alpha->method = DEFAULT_METHOD;
case PROP_PREFER_PASSTHROUGH:{
gboolean prefer_passthrough = g_value_get_boolean (value);
- reconfigure = ((! !prefer_passthrough) != (! !alpha->prefer_passthrough))
+ reconfigure = ((!!prefer_passthrough) != (!!alpha->prefer_passthrough))
&& (alpha->method == ALPHA_METHOD_SET) && (alpha->alpha == 1.0);
alpha->prefer_passthrough = prefer_passthrough;
break;
static gboolean
gst_alpha_get_unit_size (GstBaseTransform * btrans,
- GstCaps * caps, guint * size)
+ GstCaps * caps, gsize * size)
{
- GstVideoFormat format;
- gint width, height;
+ GstVideoInfo info;
- if (!gst_video_format_parse_caps (caps, &format, &width, &height))
+ if (!gst_video_info_from_caps (&info, caps))
return FALSE;
- *size = gst_video_format_get_size (format, width, height);
+ *size = info.size;
- GST_DEBUG_OBJECT (btrans, "unit size = %d for format %d w %d height %d",
- *size, format, width, height);
+ GST_DEBUG_OBJECT (btrans, "unit size = %d for format %s w %d height %d",
+ *size, GST_VIDEO_INFO_NAME (&info), GST_VIDEO_INFO_WIDTH (&info),
+ GST_VIDEO_INFO_HEIGHT (&info));
return TRUE;
}
static GstCaps *
gst_alpha_transform_caps (GstBaseTransform * btrans,
- GstPadDirection direction, GstCaps * caps)
+ GstPadDirection direction, GstCaps * caps, GstCaps * filter)
{
GstAlpha *alpha = GST_ALPHA (btrans);
GstCaps *ret, *tmp, *tmp2;
structure = gst_structure_copy (gst_caps_get_structure (caps, i));
gst_structure_remove_field (structure, "format");
- gst_structure_remove_field (structure, "endianness");
- gst_structure_remove_field (structure, "depth");
- gst_structure_remove_field (structure, "bpp");
- gst_structure_remove_field (structure, "red_mask");
- gst_structure_remove_field (structure, "green_mask");
- gst_structure_remove_field (structure, "blue_mask");
- gst_structure_remove_field (structure, "alpha_mask");
gst_structure_remove_field (structure, "color-matrix");
gst_structure_remove_field (structure, "chroma-site");
- gst_structure_set_name (structure, "video/x-raw-yuv");
- gst_caps_append_structure (tmp, gst_structure_copy (structure));
- gst_structure_set_name (structure, "video/x-raw-rgb");
gst_caps_append_structure (tmp, structure);
}
GST_DEBUG_OBJECT (alpha,
"Transformed %" GST_PTR_FORMAT " -> %" GST_PTR_FORMAT, caps, ret);
+ if (filter) {
+ GstCaps *intersection;
+
+ GST_DEBUG_OBJECT (alpha, "Using filter caps %" GST_PTR_FORMAT, filter);
+ intersection =
+ gst_caps_intersect_full (filter, ret, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (ret);
+ ret = intersection;
+ GST_DEBUG_OBJECT (alpha, "Intersection %" GST_PTR_FORMAT, ret);
+ }
+
+
GST_ALPHA_UNLOCK (alpha);
return ret;
GstCaps * incaps, GstCaps * outcaps)
{
GstAlpha *alpha = GST_ALPHA (btrans);
- const gchar *matrix;
+ GstVideoInfo in_info, out_info;
gboolean passthrough;
- GST_ALPHA_LOCK (alpha);
-
- if (!gst_video_format_parse_caps (incaps, &alpha->in_format,
- &alpha->width, &alpha->height) ||
- !gst_video_format_parse_caps (outcaps, &alpha->out_format,
- &alpha->width, &alpha->height)) {
- GST_WARNING_OBJECT (alpha,
- "Failed to parse caps %" GST_PTR_FORMAT " -> %" GST_PTR_FORMAT, incaps,
- outcaps);
- GST_ALPHA_UNLOCK (alpha);
- return FALSE;
- }
+ if (!gst_video_info_from_caps (&in_info, incaps) ||
+ !gst_video_info_from_caps (&out_info, outcaps))
+ goto invalid_format;
- matrix = gst_video_parse_caps_color_matrix (incaps);
- alpha->in_sdtv = matrix ? g_str_equal (matrix, "sdtv") : TRUE;
+ GST_ALPHA_LOCK (alpha);
- matrix = gst_video_parse_caps_color_matrix (outcaps);
- alpha->out_sdtv = matrix ? g_str_equal (matrix, "sdtv") : TRUE;
+ alpha->in_sdtv =
+ in_info.color_matrix ? g_str_equal (in_info.color_matrix, "sdtv") : TRUE;
+ alpha->out_sdtv =
+ out_info.color_matrix ? g_str_equal (out_info.color_matrix,
+ "sdtv") : TRUE;
passthrough = alpha->prefer_passthrough &&
- alpha->in_format == alpha->out_format && alpha->in_sdtv == alpha->out_sdtv
- && alpha->method == ALPHA_METHOD_SET && alpha->alpha == 1.0;
+ GST_VIDEO_INFO_FORMAT (&in_info) == GST_VIDEO_INFO_FORMAT (&out_info)
+ && alpha->in_sdtv == alpha->out_sdtv && alpha->method == ALPHA_METHOD_SET
+ && alpha->alpha == 1.0;
GST_DEBUG_OBJECT (alpha,
"Setting caps %" GST_PTR_FORMAT " -> %" GST_PTR_FORMAT
" (passthrough: %d)", incaps, outcaps, passthrough);
gst_base_transform_set_passthrough (btrans, passthrough);
- if (!gst_alpha_set_process_function (alpha) && !passthrough) {
- GST_WARNING_OBJECT (alpha,
- "No processing function for this caps and no passthrough mode");
- GST_ALPHA_UNLOCK (alpha);
- return FALSE;
- }
+ alpha->in_info = in_info;
+ alpha->out_info = out_info;
+
+ if (!gst_alpha_set_process_function (alpha) && !passthrough)
+ goto no_process;
gst_alpha_init_params (alpha);
GST_ALPHA_UNLOCK (alpha);
return TRUE;
+
+ /* ERRORS */
+invalid_format:
+ {
+ GST_WARNING_OBJECT (alpha,
+ "Failed to parse caps %" GST_PTR_FORMAT " -> %" GST_PTR_FORMAT, incaps,
+ outcaps);
+ return FALSE;
+ }
+no_process:
+ {
+ GST_WARNING_OBJECT (alpha,
+ "No processing function for this caps and no passthrough mode");
+ GST_ALPHA_UNLOCK (alpha);
+ return FALSE;
+ }
}
/* based on http://www.cs.utah.edu/~michael/chroma/
#define APPLY_MATRIX(m,o,v1,v2,v3) ((m[o*4] * v1 + m[o*4+1] * v2 + m[o*4+2] * v3 + m[o*4+3]) >> 8)
static void
-gst_alpha_set_argb_ayuv (const guint8 * src, guint8 * dest, gint width,
- gint height, GstAlpha * alpha)
+gst_alpha_set_argb_ayuv (const GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame, GstAlpha * alpha)
{
gint s_alpha = CLAMP ((gint) (alpha->alpha * 256), 0, 256);
+ const guint8 *src;
+ guint8 *dest;
+ gint width, height;
gint i, j;
gint matrix[12];
gint y, u, v;
gint o[4];
- o[0] =
- gst_video_format_get_component_offset (alpha->in_format, 3, width,
- height);
- o[1] =
- gst_video_format_get_component_offset (alpha->in_format, 0, width,
- height);
- o[2] =
- gst_video_format_get_component_offset (alpha->in_format, 1, width,
- height);
- o[3] =
- gst_video_format_get_component_offset (alpha->in_format, 2, width,
- height);
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (in_frame);
+
+ o[0] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 3);
+ o[1] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 0);
+ o[2] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 1);
+ o[3] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 2);
memcpy (matrix,
alpha->out_sdtv ? cog_rgb_to_ycbcr_matrix_8bit_sdtv :
}
static void
-gst_alpha_chroma_key_argb_ayuv (const guint8 * src, guint8 * dest, gint width,
- gint height, GstAlpha * alpha)
+gst_alpha_chroma_key_argb_ayuv (const GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame, GstAlpha * alpha)
{
+ const guint8 *src;
+ guint8 *dest;
+ gint width, height;
gint i, j;
gint a, y, u, v;
gint r, g, b;
gint matrix[12];
gint o[4];
- o[0] =
- gst_video_format_get_component_offset (alpha->in_format, 3, width,
- height);
- o[1] =
- gst_video_format_get_component_offset (alpha->in_format, 0, width,
- height);
- o[2] =
- gst_video_format_get_component_offset (alpha->in_format, 1, width,
- height);
- o[3] =
- gst_video_format_get_component_offset (alpha->in_format, 2, width,
- height);
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (in_frame);
+
+ o[0] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 3);
+ o[1] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 0);
+ o[2] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 1);
+ o[3] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 2);
smin = 128 - alpha->black_sensitivity;
smax = 128 + alpha->white_sensitivity;
}
static void
-gst_alpha_set_argb_argb (const guint8 * src, guint8 * dest, gint width,
- gint height, GstAlpha * alpha)
+gst_alpha_set_argb_argb (const GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame, GstAlpha * alpha)
{
+ const guint8 *src;
+ guint8 *dest;
+ gint width, height;
gint s_alpha = CLAMP ((gint) (alpha->alpha * 256), 0, 256);
gint i, j;
gint p[4], o[4];
- p[0] =
- gst_video_format_get_component_offset (alpha->out_format, 3, width,
- height);
- p[1] =
- gst_video_format_get_component_offset (alpha->out_format, 0, width,
- height);
- p[2] =
- gst_video_format_get_component_offset (alpha->out_format, 1, width,
- height);
- p[3] =
- gst_video_format_get_component_offset (alpha->out_format, 2, width,
- height);
-
- o[0] =
- gst_video_format_get_component_offset (alpha->in_format, 3, width,
- height);
- o[1] =
- gst_video_format_get_component_offset (alpha->in_format, 0, width,
- height);
- o[2] =
- gst_video_format_get_component_offset (alpha->in_format, 1, width,
- height);
- o[3] =
- gst_video_format_get_component_offset (alpha->in_format, 2, width,
- height);
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (in_frame);
+
+ p[0] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 3);
+ p[1] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 0);
+ p[2] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 1);
+ p[3] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 2);
+
+ o[0] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 3);
+ o[1] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 0);
+ o[2] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 1);
+ o[3] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 2);
for (i = 0; i < height; i++) {
for (j = 0; j < width; j++) {
}
static void
-gst_alpha_chroma_key_argb_argb (const guint8 * src, guint8 * dest, gint width,
- gint height, GstAlpha * alpha)
+gst_alpha_chroma_key_argb_argb (const GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame, GstAlpha * alpha)
{
+ const guint8 *src;
+ guint8 *dest;
+ gint width, height;
gint i, j;
gint a, y, u, v;
gint r, g, b;
gint matrix[12], matrix2[12];
gint p[4], o[4];
- p[0] =
- gst_video_format_get_component_offset (alpha->out_format, 3, width,
- height);
- p[1] =
- gst_video_format_get_component_offset (alpha->out_format, 0, width,
- height);
- p[2] =
- gst_video_format_get_component_offset (alpha->out_format, 1, width,
- height);
- p[3] =
- gst_video_format_get_component_offset (alpha->out_format, 2, width,
- height);
-
- o[0] =
- gst_video_format_get_component_offset (alpha->in_format, 3, width,
- height);
- o[1] =
- gst_video_format_get_component_offset (alpha->in_format, 0, width,
- height);
- o[2] =
- gst_video_format_get_component_offset (alpha->in_format, 1, width,
- height);
- o[3] =
- gst_video_format_get_component_offset (alpha->in_format, 2, width,
- height);
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (in_frame);
+
+ p[0] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 3);
+ p[1] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 0);
+ p[2] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 1);
+ p[3] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 2);
+
+ o[0] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 3);
+ o[1] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 0);
+ o[2] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 1);
+ o[3] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 2);
smin = 128 - alpha->black_sensitivity;
smax = 128 + alpha->white_sensitivity;
}
static void
-gst_alpha_set_ayuv_argb (const guint8 * src, guint8 * dest, gint width,
- gint height, GstAlpha * alpha)
+gst_alpha_set_ayuv_argb (const GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame, GstAlpha * alpha)
{
+ const guint8 *src;
+ guint8 *dest;
+ gint width, height;
gint s_alpha = CLAMP ((gint) (alpha->alpha * 256), 0, 256);
gint y, x;
gint matrix[12];
gint r, g, b;
gint p[4];
- p[0] =
- gst_video_format_get_component_offset (alpha->out_format, 3, width,
- height);
- p[1] =
- gst_video_format_get_component_offset (alpha->out_format, 0, width,
- height);
- p[2] =
- gst_video_format_get_component_offset (alpha->out_format, 1, width,
- height);
- p[3] =
- gst_video_format_get_component_offset (alpha->out_format, 2, width,
- height);
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (in_frame);
+
+ p[0] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 3);
+ p[1] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 0);
+ p[2] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 1);
+ p[3] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 2);
memcpy (matrix,
alpha->in_sdtv ? cog_ycbcr_to_rgb_matrix_8bit_sdtv :
}
static void
-gst_alpha_chroma_key_ayuv_argb (const guint8 * src, guint8 * dest, gint width,
- gint height, GstAlpha * alpha)
+gst_alpha_chroma_key_ayuv_argb (const GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame, GstAlpha * alpha)
{
+ const guint8 *src;
+ guint8 *dest;
+ gint width, height;
gint i, j;
gint a, y, u, v;
gint r, g, b;
gint matrix[12];
gint p[4];
- p[0] =
- gst_video_format_get_component_offset (alpha->out_format, 3, width,
- height);
- p[1] =
- gst_video_format_get_component_offset (alpha->out_format, 0, width,
- height);
- p[2] =
- gst_video_format_get_component_offset (alpha->out_format, 1, width,
- height);
- p[3] =
- gst_video_format_get_component_offset (alpha->out_format, 2, width,
- height);
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (in_frame);
+
+ p[0] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 3);
+ p[1] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 0);
+ p[2] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 1);
+ p[3] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 2);
smin = 128 - alpha->black_sensitivity;
smax = 128 + alpha->white_sensitivity;
}
static void
-gst_alpha_set_ayuv_ayuv (const guint8 * src, guint8 * dest, gint width,
- gint height, GstAlpha * alpha)
+gst_alpha_set_ayuv_ayuv (const GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame, GstAlpha * alpha)
{
+ const guint8 *src;
+ guint8 *dest;
+ gint width, height;
gint s_alpha = CLAMP ((gint) (alpha->alpha * 256), 0, 256);
gint y, x;
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (in_frame);
+
if (alpha->in_sdtv == alpha->out_sdtv) {
for (y = 0; y < height; y++) {
for (x = 0; x < width; x++) {
}
static void
-gst_alpha_chroma_key_ayuv_ayuv (const guint8 * src, guint8 * dest,
- gint width, gint height, GstAlpha * alpha)
+gst_alpha_chroma_key_ayuv_ayuv (const GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame, GstAlpha * alpha)
{
+ const guint8 *src;
+ guint8 *dest;
+ gint width, height;
gint i, j;
gint a, y, u, v;
gint smin, smax;
guint8 kfgy_scale = alpha->kfgy_scale;
guint noise_level2 = alpha->noise_level2;
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (in_frame);
+
smin = 128 - alpha->black_sensitivity;
smax = 128 + alpha->white_sensitivity;
}
static void
-gst_alpha_set_rgb_ayuv (const guint8 * src, guint8 * dest, gint width,
- gint height, GstAlpha * alpha)
+gst_alpha_set_rgb_ayuv (const GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame, GstAlpha * alpha)
{
+ const guint8 *src;
+ guint8 *dest;
+ gint width, height;
gint s_alpha = CLAMP ((gint) (alpha->alpha * 255), 0, 255);
gint i, j;
gint matrix[12];
gint o[3];
gint bpp;
- bpp = gst_video_format_get_pixel_stride (alpha->in_format, 0);
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
- o[0] =
- gst_video_format_get_component_offset (alpha->in_format, 0, width,
- height);
- o[1] =
- gst_video_format_get_component_offset (alpha->in_format, 1, width,
- height);
- o[2] =
- gst_video_format_get_component_offset (alpha->in_format, 2, width,
- height);
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (in_frame);
+
+ bpp = GST_VIDEO_FRAME_COMP_PSTRIDE (in_frame, 0);
+ o[0] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 0);
+ o[1] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 1);
+ o[2] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 2);
memcpy (matrix,
alpha->out_sdtv ? cog_rgb_to_ycbcr_matrix_8bit_sdtv :
}
static void
-gst_alpha_chroma_key_rgb_ayuv (const guint8 * src, guint8 * dest, gint width,
- gint height, GstAlpha * alpha)
+gst_alpha_chroma_key_rgb_ayuv (const GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame, GstAlpha * alpha)
{
+ const guint8 *src;
+ guint8 *dest;
+ gint width, height;
gint i, j;
gint a, y, u, v;
gint r, g, b;
gint o[3];
gint bpp;
- bpp = gst_video_format_get_pixel_stride (alpha->in_format, 0);
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (in_frame);
- o[0] =
- gst_video_format_get_component_offset (alpha->in_format, 0, width,
- height);
- o[1] =
- gst_video_format_get_component_offset (alpha->in_format, 1, width,
- height);
- o[2] =
- gst_video_format_get_component_offset (alpha->in_format, 2, width,
- height);
+ bpp = GST_VIDEO_FRAME_COMP_PSTRIDE (in_frame, 0);
+
+ o[0] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 0);
+ o[1] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 1);
+ o[2] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 2);
smin = 128 - alpha->black_sensitivity;
smax = 128 + alpha->white_sensitivity;
}
static void
-gst_alpha_set_rgb_argb (const guint8 * src, guint8 * dest, gint width,
- gint height, GstAlpha * alpha)
+gst_alpha_set_rgb_argb (const GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame, GstAlpha * alpha)
{
+ const guint8 *src;
+ guint8 *dest;
+ gint width, height;
gint s_alpha = CLAMP ((gint) (alpha->alpha * 255), 0, 255);
gint i, j;
gint p[4], o[3];
gint bpp;
- bpp = gst_video_format_get_pixel_stride (alpha->in_format, 0);
-
- o[0] =
- gst_video_format_get_component_offset (alpha->in_format, 0, width,
- height);
- o[1] =
- gst_video_format_get_component_offset (alpha->in_format, 1, width,
- height);
- o[2] =
- gst_video_format_get_component_offset (alpha->in_format, 2, width,
- height);
-
- p[0] =
- gst_video_format_get_component_offset (alpha->out_format, 3, width,
- height);
- p[1] =
- gst_video_format_get_component_offset (alpha->out_format, 0, width,
- height);
- p[2] =
- gst_video_format_get_component_offset (alpha->out_format, 1, width,
- height);
- p[3] =
- gst_video_format_get_component_offset (alpha->out_format, 2, width,
- height);
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (in_frame);
+
+ bpp = GST_VIDEO_FRAME_COMP_PSTRIDE (in_frame, 0);
+
+ o[0] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 0);
+ o[1] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 1);
+ o[2] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 2);
+
+ p[0] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 3);
+ p[1] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 0);
+ p[2] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 1);
+ p[3] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 2);
for (i = 0; i < height; i++) {
for (j = 0; j < width; j++) {
}
static void
-gst_alpha_chroma_key_rgb_argb (const guint8 * src, guint8 * dest, gint width,
- gint height, GstAlpha * alpha)
+gst_alpha_chroma_key_rgb_argb (const GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame, GstAlpha * alpha)
{
+ const guint8 *src;
+ guint8 *dest;
+ gint width, height;
gint i, j;
gint a, y, u, v;
gint r, g, b;
gint p[4], o[3];
gint bpp;
- bpp = gst_video_format_get_pixel_stride (alpha->in_format, 0);
-
- o[0] =
- gst_video_format_get_component_offset (alpha->in_format, 0, width,
- height);
- o[1] =
- gst_video_format_get_component_offset (alpha->in_format, 1, width,
- height);
- o[2] =
- gst_video_format_get_component_offset (alpha->in_format, 2, width,
- height);
-
- p[0] =
- gst_video_format_get_component_offset (alpha->out_format, 3, width,
- height);
- p[1] =
- gst_video_format_get_component_offset (alpha->out_format, 0, width,
- height);
- p[2] =
- gst_video_format_get_component_offset (alpha->out_format, 1, width,
- height);
- p[3] =
- gst_video_format_get_component_offset (alpha->out_format, 2, width,
- height);
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (in_frame);
+
+ bpp = GST_VIDEO_FRAME_COMP_PSTRIDE (in_frame, 0);
+
+ o[0] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 0);
+ o[1] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 1);
+ o[2] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 2);
+
+ p[0] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 3);
+ p[1] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 0);
+ p[2] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 1);
+ p[3] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 2);
smin = 128 - alpha->black_sensitivity;
smax = 128 + alpha->white_sensitivity;
}
static void
-gst_alpha_set_planar_yuv_ayuv (const guint8 * src, guint8 * dest, gint width,
- gint height, GstAlpha * alpha)
+gst_alpha_set_planar_yuv_ayuv (const GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame, GstAlpha * alpha)
{
+ const guint8 *src;
+ guint8 *dest;
+ gint width, height;
gint b_alpha = CLAMP ((gint) (alpha->alpha * 255), 0, 255);
const guint8 *srcY, *srcY_tmp;
const guint8 *srcU, *srcU_tmp;
gint y_stride, uv_stride;
gint v_subs, h_subs;
- y_stride = gst_video_format_get_row_stride (alpha->in_format, 0, width);
- uv_stride = gst_video_format_get_row_stride (alpha->in_format, 1, width);
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (in_frame);
+
+ y_stride = GST_VIDEO_FRAME_COMP_STRIDE (in_frame, 0);
+ uv_stride = GST_VIDEO_FRAME_COMP_STRIDE (in_frame, 1);
srcY_tmp = srcY = src;
- srcU_tmp = srcU =
- src + gst_video_format_get_component_offset (alpha->in_format, 1, width,
- height);
- srcV_tmp = srcV =
- src + gst_video_format_get_component_offset (alpha->in_format, 2, width,
- height);
-
- switch (alpha->in_format) {
+ srcU_tmp = srcU = src + GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 1);
+ srcV_tmp = srcV = src + GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 2);
+
+ switch (GST_VIDEO_FRAME_FORMAT (in_frame)) {
case GST_VIDEO_FORMAT_I420:
case GST_VIDEO_FORMAT_YV12:
v_subs = h_subs = 2;
}
static void
-gst_alpha_chroma_key_planar_yuv_ayuv (const guint8 * src, guint8 * dest,
- gint width, gint height, GstAlpha * alpha)
+gst_alpha_chroma_key_planar_yuv_ayuv (const GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame, GstAlpha * alpha)
{
+ const guint8 *src;
+ guint8 *dest;
+ gint width, height;
gint b_alpha = CLAMP ((gint) (alpha->alpha * 255), 0, 255);
const guint8 *srcY, *srcY_tmp;
const guint8 *srcU, *srcU_tmp;
guint8 kfgy_scale = alpha->kfgy_scale;
guint noise_level2 = alpha->noise_level2;
- y_stride = gst_video_format_get_row_stride (alpha->in_format, 0, width);
- uv_stride = gst_video_format_get_row_stride (alpha->in_format, 1, width);
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (in_frame);
+
+ y_stride = GST_VIDEO_FRAME_COMP_STRIDE (in_frame, 0);
+ uv_stride = GST_VIDEO_FRAME_COMP_STRIDE (in_frame, 1);
srcY_tmp = srcY = src;
- srcU_tmp = srcU =
- src + gst_video_format_get_component_offset (alpha->in_format, 1, width,
- height);
- srcV_tmp = srcV =
- src + gst_video_format_get_component_offset (alpha->in_format, 2, width,
- height);
-
- switch (alpha->in_format) {
+ srcU_tmp = srcU = src + GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 1);
+ srcV_tmp = srcV = src + GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 2);
+
+ switch (GST_VIDEO_FRAME_FORMAT (in_frame)) {
case GST_VIDEO_FORMAT_I420:
case GST_VIDEO_FORMAT_YV12:
v_subs = h_subs = 2;
}
static void
-gst_alpha_set_planar_yuv_argb (const guint8 * src, guint8 * dest, gint width,
- gint height, GstAlpha * alpha)
+gst_alpha_set_planar_yuv_argb (const GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame, GstAlpha * alpha)
{
+ const guint8 *src;
+ guint8 *dest;
+ gint width, height;
gint b_alpha = CLAMP ((gint) (alpha->alpha * 255), 0, 255);
const guint8 *srcY, *srcY_tmp;
const guint8 *srcU, *srcU_tmp;
gint r, g, b;
gint p[4];
- p[0] =
- gst_video_format_get_component_offset (alpha->out_format, 3, width,
- height);
- p[1] =
- gst_video_format_get_component_offset (alpha->out_format, 0, width,
- height);
- p[2] =
- gst_video_format_get_component_offset (alpha->out_format, 1, width,
- height);
- p[3] =
- gst_video_format_get_component_offset (alpha->out_format, 2, width,
- height);
-
- y_stride = gst_video_format_get_row_stride (alpha->in_format, 0, width);
- uv_stride = gst_video_format_get_row_stride (alpha->in_format, 1, width);
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (in_frame);
+
+ p[0] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 3);
+ p[1] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 0);
+ p[2] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 1);
+ p[3] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 2);
+
+ y_stride = GST_VIDEO_FRAME_COMP_STRIDE (in_frame, 0);
+ uv_stride = GST_VIDEO_FRAME_COMP_STRIDE (in_frame, 1);
srcY_tmp = srcY = src;
- srcU_tmp = srcU =
- src + gst_video_format_get_component_offset (alpha->in_format, 1, width,
- height);
- srcV_tmp = srcV =
- src + gst_video_format_get_component_offset (alpha->in_format, 2, width,
- height);
-
- switch (alpha->in_format) {
+ srcU_tmp = srcU = src + GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 1);
+ srcV_tmp = srcV = src + GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 2);
+
+ switch (GST_VIDEO_FRAME_FORMAT (in_frame)) {
case GST_VIDEO_FORMAT_I420:
case GST_VIDEO_FORMAT_YV12:
v_subs = h_subs = 2;
}
static void
-gst_alpha_chroma_key_planar_yuv_argb (const guint8 * src, guint8 * dest,
- gint width, gint height, GstAlpha * alpha)
+gst_alpha_chroma_key_planar_yuv_argb (const GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame, GstAlpha * alpha)
{
+ const guint8 *src;
+ guint8 *dest;
+ gint width, height;
gint b_alpha = CLAMP ((gint) (alpha->alpha * 255), 0, 255);
const guint8 *srcY, *srcY_tmp;
const guint8 *srcU, *srcU_tmp;
gint matrix[12];
gint p[4];
- p[0] =
- gst_video_format_get_component_offset (alpha->out_format, 3, width,
- height);
- p[1] =
- gst_video_format_get_component_offset (alpha->out_format, 0, width,
- height);
- p[2] =
- gst_video_format_get_component_offset (alpha->out_format, 1, width,
- height);
- p[3] =
- gst_video_format_get_component_offset (alpha->out_format, 2, width,
- height);
-
- y_stride = gst_video_format_get_row_stride (alpha->in_format, 0, width);
- uv_stride = gst_video_format_get_row_stride (alpha->in_format, 1, width);
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (in_frame);
+
+ p[0] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 3);
+ p[1] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 0);
+ p[2] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 1);
+ p[3] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 2);
+
+ y_stride = GST_VIDEO_FRAME_COMP_STRIDE (in_frame, 0);
+ uv_stride = GST_VIDEO_FRAME_COMP_STRIDE (in_frame, 1);
srcY_tmp = srcY = src;
- srcU_tmp = srcU =
- src + gst_video_format_get_component_offset (alpha->in_format, 1, width,
- height);
- srcV_tmp = srcV =
- src + gst_video_format_get_component_offset (alpha->in_format, 2, width,
- height);
-
- switch (alpha->in_format) {
+ srcU_tmp = srcU = src + GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 1);
+ srcV_tmp = srcV = src + GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 2);
+
+ switch (GST_VIDEO_FRAME_FORMAT (in_frame)) {
case GST_VIDEO_FORMAT_I420:
case GST_VIDEO_FORMAT_YV12:
v_subs = h_subs = 2;
}
static void
-gst_alpha_set_packed_422_ayuv (const guint8 * src, guint8 * dest, gint width,
- gint height, GstAlpha * alpha)
+gst_alpha_set_packed_422_ayuv (const GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame, GstAlpha * alpha)
{
+ const guint8 *src;
+ guint8 *dest;
+ gint width, height;
gint s_alpha = CLAMP ((gint) (alpha->alpha * 255), 0, 255);
gint i, j;
gint y, u, v;
gint src_stride;
const guint8 *src_tmp;
- src_stride = gst_video_format_get_row_stride (alpha->in_format, 0, width);
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (in_frame);
- p[0] =
- gst_video_format_get_component_offset (alpha->in_format, 0, width,
- height);
+ src_stride = GST_VIDEO_FRAME_COMP_STRIDE (in_frame, 0);
+
+ p[0] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 0);
p[2] = p[0] + 2;
- p[1] =
- gst_video_format_get_component_offset (alpha->in_format, 1, width,
- height);
- p[3] =
- gst_video_format_get_component_offset (alpha->in_format, 2, width,
- height);
+ p[1] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 1);
+ p[3] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 2);
if (alpha->in_sdtv != alpha->out_sdtv) {
gint matrix[12];
}
static void
-gst_alpha_chroma_key_packed_422_ayuv (const guint8 * src, guint8 * dest,
- gint width, gint height, GstAlpha * alpha)
+gst_alpha_chroma_key_packed_422_ayuv (const GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame, GstAlpha * alpha)
{
+ const guint8 *src;
+ guint8 *dest;
+ gint width, height;
gint i, j;
gint a, y, u, v;
gint smin, smax;
gint src_stride;
const guint8 *src_tmp;
- src_stride = gst_video_format_get_row_stride (alpha->in_format, 0, width);
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
- p[0] =
- gst_video_format_get_component_offset (alpha->in_format, 0, width,
- height);
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (in_frame);
+
+ src_stride = GST_VIDEO_FRAME_COMP_STRIDE (in_frame, 0);
+
+ p[0] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 0);
p[2] = p[0] + 2;
- p[1] =
- gst_video_format_get_component_offset (alpha->in_format, 1, width,
- height);
- p[3] =
- gst_video_format_get_component_offset (alpha->in_format, 2, width,
- height);
+ p[1] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 1);
+ p[3] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 2);
smin = 128 - alpha->black_sensitivity;
smax = 128 + alpha->white_sensitivity;
}
static void
-gst_alpha_set_packed_422_argb (const guint8 * src, guint8 * dest, gint width,
- gint height, GstAlpha * alpha)
+gst_alpha_set_packed_422_argb (const GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame, GstAlpha * alpha)
{
+ const guint8 *src;
+ guint8 *dest;
+ gint width, height;
gint s_alpha = CLAMP ((gint) (alpha->alpha * 255), 0, 255);
gint i, j;
gint p[4], o[4];
gint matrix[12];
gint r, g, b;
- src_stride = gst_video_format_get_row_stride (alpha->in_format, 0, width);
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (in_frame);
+
+ src_stride = GST_VIDEO_FRAME_COMP_STRIDE (in_frame, 0);
- o[0] =
- gst_video_format_get_component_offset (alpha->in_format, 0, width,
- height);
+ o[0] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 0);
o[2] = o[0] + 2;
- o[1] =
- gst_video_format_get_component_offset (alpha->in_format, 1, width,
- height);
- o[3] =
- gst_video_format_get_component_offset (alpha->in_format, 2, width,
- height);
-
- p[0] =
- gst_video_format_get_component_offset (alpha->out_format, 3, width,
- height);
- p[1] =
- gst_video_format_get_component_offset (alpha->out_format, 0, width,
- height);
- p[2] =
- gst_video_format_get_component_offset (alpha->out_format, 1, width,
- height);
- p[3] =
- gst_video_format_get_component_offset (alpha->out_format, 2, width,
- height);
+ o[1] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 1);
+ o[3] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 2);
+
+ p[0] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 3);
+ p[1] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 0);
+ p[2] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 1);
+ p[3] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 2);
memcpy (matrix,
alpha->in_sdtv ? cog_ycbcr_to_rgb_matrix_8bit_sdtv :
}
static void
-gst_alpha_chroma_key_packed_422_argb (const guint8 * src, guint8 * dest,
- gint width, gint height, GstAlpha * alpha)
+gst_alpha_chroma_key_packed_422_argb (const GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame, GstAlpha * alpha)
{
+ const guint8 *src;
+ guint8 *dest;
+ gint width, height;
gint i, j;
gint a, y, u, v;
gint r, g, b;
const guint8 *src_tmp;
gint matrix[12];
- src_stride = gst_video_format_get_row_stride (alpha->in_format, 0, width);
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (in_frame);
+
+ src_stride = GST_VIDEO_FRAME_COMP_STRIDE (in_frame, 0);
- o[0] =
- gst_video_format_get_component_offset (alpha->in_format, 0, width,
- height);
+ o[0] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 0);
o[2] = o[0] + 2;
- o[1] =
- gst_video_format_get_component_offset (alpha->in_format, 1, width,
- height);
- o[3] =
- gst_video_format_get_component_offset (alpha->in_format, 2, width,
- height);
-
- p[0] =
- gst_video_format_get_component_offset (alpha->out_format, 3, width,
- height);
- p[1] =
- gst_video_format_get_component_offset (alpha->out_format, 0, width,
- height);
- p[2] =
- gst_video_format_get_component_offset (alpha->out_format, 1, width,
- height);
- p[3] =
- gst_video_format_get_component_offset (alpha->out_format, 2, width,
- height);
+ o[1] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 1);
+ o[3] = GST_VIDEO_FRAME_COMP_OFFSET (in_frame, 2);
+
+ p[0] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 3);
+ p[1] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 0);
+ p[2] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 1);
+ p[3] = GST_VIDEO_FRAME_COMP_OFFSET (out_frame, 2);
memcpy (matrix,
alpha->in_sdtv ? cog_ycbcr_to_rgb_matrix_8bit_sdtv :
gfloat tmp;
gfloat tmp1, tmp2;
gfloat y;
+ const GstVideoFormatInfo *in_info, *out_info;
const gint *matrix;
+ in_info = alpha->in_info.finfo;
+ out_info = alpha->out_info.finfo;
+
/* RGB->RGB: convert to SDTV YUV, chroma keying, convert back
* YUV->RGB: chroma keying, convert to RGB
* RGB->YUV: convert to YUV, chroma keying
* YUV->YUV: convert matrix, chroma keying
*/
- if (gst_video_format_is_rgb (alpha->in_format)
- && gst_video_format_is_rgb (alpha->out_format))
+ if (GST_VIDEO_FORMAT_INFO_IS_RGB (in_info)
+ && GST_VIDEO_FORMAT_INFO_IS_RGB (out_info))
matrix = cog_rgb_to_ycbcr_matrix_8bit_sdtv;
- else if (gst_video_format_is_yuv (alpha->in_format)
- && gst_video_format_is_rgb (alpha->out_format))
+ else if (GST_VIDEO_FORMAT_INFO_IS_YUV (in_info)
+ && GST_VIDEO_FORMAT_INFO_IS_RGB (out_info))
matrix =
(alpha->in_sdtv) ? cog_rgb_to_ycbcr_matrix_8bit_sdtv :
cog_rgb_to_ycbcr_matrix_8bit_hdtv;
- else if (gst_video_format_is_rgb (alpha->in_format)
- && gst_video_format_is_yuv (alpha->out_format))
+ else if (GST_VIDEO_FORMAT_INFO_IS_RGB (in_info)
+ && GST_VIDEO_FORMAT_INFO_IS_YUV (out_info))
matrix =
(alpha->out_sdtv) ? cog_rgb_to_ycbcr_matrix_8bit_sdtv :
cog_rgb_to_ycbcr_matrix_8bit_hdtv;
switch (alpha->method) {
case ALPHA_METHOD_SET:
- switch (alpha->out_format) {
+ switch (GST_VIDEO_INFO_FORMAT (&alpha->out_info)) {
case GST_VIDEO_FORMAT_AYUV:
- switch (alpha->in_format) {
+ switch (GST_VIDEO_INFO_FORMAT (&alpha->in_info)) {
case GST_VIDEO_FORMAT_AYUV:
alpha->process = gst_alpha_set_ayuv_ayuv;
break;
case GST_VIDEO_FORMAT_ABGR:
case GST_VIDEO_FORMAT_RGBA:
case GST_VIDEO_FORMAT_BGRA:
- switch (alpha->in_format) {
+ switch (GST_VIDEO_INFO_FORMAT (&alpha->in_info)) {
case GST_VIDEO_FORMAT_AYUV:
alpha->process = gst_alpha_set_ayuv_argb;
break;
case ALPHA_METHOD_GREEN:
case ALPHA_METHOD_BLUE:
case ALPHA_METHOD_CUSTOM:
- switch (alpha->out_format) {
+ switch (GST_VIDEO_INFO_FORMAT (&alpha->out_info)) {
case GST_VIDEO_FORMAT_AYUV:
- switch (alpha->in_format) {
+ switch (GST_VIDEO_INFO_FORMAT (&alpha->in_info)) {
case GST_VIDEO_FORMAT_AYUV:
alpha->process = gst_alpha_chroma_key_ayuv_ayuv;
break;
case GST_VIDEO_FORMAT_ABGR:
case GST_VIDEO_FORMAT_RGBA:
case GST_VIDEO_FORMAT_BGRA:
- switch (alpha->in_format) {
+ switch (GST_VIDEO_INFO_FORMAT (&alpha->in_info)) {
case GST_VIDEO_FORMAT_AYUV:
alpha->process = gst_alpha_chroma_key_ayuv_argb;
break;
gst_alpha_transform (GstBaseTransform * btrans, GstBuffer * in, GstBuffer * out)
{
GstAlpha *alpha = GST_ALPHA (btrans);
- gint width, height;
+ GstVideoFrame in_frame, out_frame;
GST_ALPHA_LOCK (alpha);
- if (G_UNLIKELY (!alpha->process)) {
- GST_ERROR_OBJECT (alpha, "Not negotiated yet");
- GST_ALPHA_UNLOCK (alpha);
- return GST_FLOW_NOT_NEGOTIATED;
- }
+ if (G_UNLIKELY (!alpha->process))
+ goto not_negotiated;
+
+ if (!gst_video_frame_map (&in_frame, &alpha->in_info, in, GST_MAP_READ))
+ goto invalid_in;
+
+ if (!gst_video_frame_map (&out_frame, &alpha->out_info, out, GST_MAP_WRITE))
+ goto invalid_out;
- width = alpha->width;
- height = alpha->height;
+ alpha->process (&in_frame, &out_frame, alpha);
- alpha->process (GST_BUFFER_DATA (in),
- GST_BUFFER_DATA (out), width, height, alpha);
+ gst_video_frame_unmap (&out_frame);
+ gst_video_frame_unmap (&in_frame);
GST_ALPHA_UNLOCK (alpha);
return GST_FLOW_OK;
+
+ /* ERRORS */
+not_negotiated:
+ {
+ GST_ERROR_OBJECT (alpha, "Not negotiated yet");
+ GST_ALPHA_UNLOCK (alpha);
+ return GST_FLOW_NOT_NEGOTIATED;
+ }
+invalid_in:
+ {
+ GST_ERROR_OBJECT (alpha, "Invalid input frame");
+ GST_ALPHA_UNLOCK (alpha);
+ return GST_FLOW_OK;
+ }
+invalid_out:
+ {
+ GST_ERROR_OBJECT (alpha, "Invalid output frame");
+ gst_video_frame_unmap (&in_frame);
+ GST_ALPHA_UNLOCK (alpha);
+ return GST_FLOW_OK;
+ }
}
static gboolean
/* caps */
GStaticMutex lock;
- GstVideoFormat in_format, out_format;
- gint width, height;
+ GstVideoInfo in_info, out_info;
gboolean in_sdtv, out_sdtv;
/* properties */
gboolean prefer_passthrough;
/* processing function */
- void (*process) (const guint8 *src, guint8 *dest, gint width, gint height, GstAlpha *alpha);
+ void (*process) (const GstVideoFrame *in_frame, GstVideoFrame *out_frame, GstAlpha *alpha);
/* precalculated values for chroma keying */
gint8 cb, cr;
*
* Sample pipeline:
* |[
- * gst-launch videotestsrc ! "video/x-raw-yuv,format=(fourcc)AYUV" ! \
- * alphacolor ! "video/x-raw-rgb" ! ffmpegcolorspace ! autovideosink
+ * gst-launch videotestsrc ! "video/x-raw,format=(fourcc)AYUV" ! \
+ * alphacolor ! videoconvert ! autovideosink
* ]|
*/
static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS (GST_VIDEO_CAPS_RGBA ";" GST_VIDEO_CAPS_BGRA ";"
- GST_VIDEO_CAPS_ARGB ";" GST_VIDEO_CAPS_ABGR ";"
- GST_VIDEO_CAPS_YUV ("AYUV"))
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ RGBA, BGRA, ARGB, ABGR, AYUV }"))
);
static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS (GST_VIDEO_CAPS_RGBA ";" GST_VIDEO_CAPS_BGRA ";"
- GST_VIDEO_CAPS_ARGB ";" GST_VIDEO_CAPS_ABGR ";"
- GST_VIDEO_CAPS_YUV ("AYUV"))
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ RGBA, BGRA, ARGB, ABGR, AYUV }"))
);
-GST_BOILERPLATE (GstAlphaColor, gst_alpha_color, GstVideoFilter,
- GST_TYPE_VIDEO_FILTER);
+G_DEFINE_TYPE (GstAlphaColor, gst_alpha_color, GST_TYPE_VIDEO_FILTER);
static GstCaps *gst_alpha_color_transform_caps (GstBaseTransform * btrans,
- GstPadDirection direction, GstCaps * caps);
+ GstPadDirection direction, GstCaps * caps, GstCaps * filter);
static gboolean gst_alpha_color_set_caps (GstBaseTransform * btrans,
GstCaps * incaps, GstCaps * outcaps);
static GstFlowReturn gst_alpha_color_transform_ip (GstBaseTransform * btrans,
GstBuffer * inbuf);
static void
-gst_alpha_color_base_init (gpointer g_class)
+gst_alpha_color_class_init (GstAlphaColorClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
+ GstBaseTransformClass *gstbasetransform_class =
+ (GstBaseTransformClass *) klass;
- gst_element_class_set_details_simple (element_class, "Alpha color filter",
+ GST_DEBUG_CATEGORY_INIT (alpha_color_debug, "alphacolor", 0,
+ "ARGB<->AYUV colorspace conversion preserving the alpha channels");
+
+ gst_element_class_set_details_simple (gstelement_class, "Alpha color filter",
"Filter/Converter/Video",
"ARGB from/to AYUV colorspace conversion preserving the alpha channel",
- "Wim Taymans <wim@fluendo.com>");
+ "Wim Taymans <wim.taymans@gmail.com>");
- gst_element_class_add_pad_template (element_class,
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&sink_template));
- gst_element_class_add_pad_template (element_class,
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&src_template));
-}
-
-static void
-gst_alpha_color_class_init (GstAlphaColorClass * klass)
-{
- GstBaseTransformClass *gstbasetransform_class =
- (GstBaseTransformClass *) klass;
gstbasetransform_class->transform_caps =
GST_DEBUG_FUNCPTR (gst_alpha_color_transform_caps);
GST_DEBUG_FUNCPTR (gst_alpha_color_set_caps);
gstbasetransform_class->transform_ip =
GST_DEBUG_FUNCPTR (gst_alpha_color_transform_ip);
-
- GST_DEBUG_CATEGORY_INIT (alpha_color_debug, "alphacolor", 0,
- "ARGB<->AYUV colorspace conversion preserving the alpha channels");
}
static void
-gst_alpha_color_init (GstAlphaColor * alpha, GstAlphaColorClass * g_class)
+gst_alpha_color_init (GstAlphaColor * alpha)
{
GstBaseTransform *btrans = GST_BASE_TRANSFORM (alpha);
static GstCaps *
gst_alpha_color_transform_caps (GstBaseTransform * btrans,
- GstPadDirection direction, GstCaps * caps)
+ GstPadDirection direction, GstCaps * caps, GstCaps * filter)
{
const GstCaps *tmpl_caps = NULL;
GstCaps *result = NULL, *local_caps = NULL;
GST_LOG_OBJECT (btrans, "transformed %" GST_PTR_FORMAT " to %" GST_PTR_FORMAT,
caps, result);
+ if (filter) {
+ GstCaps *intersection;
+
+ GST_DEBUG_OBJECT (btrans, "Using filter caps %" GST_PTR_FORMAT, filter);
+ intersection =
+ gst_caps_intersect_full (filter, result, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (result);
+ result = intersection;
+ GST_DEBUG_OBJECT (btrans, "Intersection %" GST_PTR_FORMAT, result);
+ }
+
+
return result;
}
#define DEFINE_ARGB_AYUV_FUNCTIONS(name, A, R, G, B) \
static void \
-transform_##name##_ayuv (guint8 * data, gint size, const gint *matrix) \
+transform_##name##_ayuv (GstVideoFrame * frame, const gint *matrix) \
{ \
+ guint8 *data; \
+ gsize size; \
gint y, u, v; \
gint yc[4]; \
gint uc[4]; \
gint vc[4]; \
\
+ data = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);\
+ size = GST_VIDEO_FRAME_SIZE (frame);\
+ \
memcpy (yc, matrix, 4 * sizeof (gint)); \
memcpy (uc, matrix + 4, 4 * sizeof (gint)); \
memcpy (vc, matrix + 8, 4 * sizeof (gint)); \
} \
\
static void \
-transform_ayuv_##name (guint8 * data, gint size, const gint *matrix) \
+transform_ayuv_##name (GstVideoFrame * frame, const gint *matrix) \
{ \
+ guint8 *data; \
+ gsize size; \
gint r, g, b; \
gint rc[4]; \
gint gc[4]; \
gint bc[4]; \
\
+ data = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);\
+ size = GST_VIDEO_FRAME_SIZE (frame);\
+ \
memcpy (rc, matrix, 4 * sizeof (gint)); \
memcpy (gc, matrix + 4, 4 * sizeof (gint)); \
memcpy (bc, matrix + 8, 4 * sizeof (gint)); \
DEFINE_ARGB_AYUV_FUNCTIONS (abgr, 0, 3, 2, 1);
static void
-transform_ayuv_ayuv (guint8 * data, gint size, const gint * matrix)
+transform_ayuv_ayuv (GstVideoFrame * frame, const gint * matrix)
{
+ guint8 *data;
+ gsize size;
gint y, u, v;
gint yc[4];
gint uc[4];
if (matrix == NULL)
return;
+ data = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
+ size = GST_VIDEO_FRAME_SIZE (frame);
+
memcpy (yc, matrix, 4 * sizeof (gint));
memcpy (uc, matrix + 4, 4 * sizeof (gint));
memcpy (vc, matrix + 8, 4 * sizeof (gint));
}
static void
-transform_argb_bgra (guint8 * data, gint size, const gint * matrix)
+transform_argb_bgra (GstVideoFrame * frame, const gint * matrix)
{
+ guint8 *data;
+ gsize size;
gint r, g, b;
+ data = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
+ size = GST_VIDEO_FRAME_SIZE (frame);
+
while (size > 0) {
r = data[1];
g = data[2];
#define transform_abgr_rgba transform_argb_bgra
static void
-transform_argb_abgr (guint8 * data, gint size, const gint * matrix)
+transform_argb_abgr (GstVideoFrame * frame, const gint * matrix)
{
+ guint8 *data;
+ gsize size;
gint r, g, b;
+ data = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
+ size = GST_VIDEO_FRAME_SIZE (frame);
+
while (size > 0) {
r = data[1];
g = data[2];
#define transform_abgr_argb transform_argb_abgr
static void
-transform_rgba_bgra (guint8 * data, gint size, const gint * matrix)
+transform_rgba_bgra (GstVideoFrame * frame, const gint * matrix)
{
+ guint8 *data;
+ gsize size;
gint r, g, b;
+ data = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
+ size = GST_VIDEO_FRAME_SIZE (frame);
+
while (size > 0) {
r = data[0];
g = data[1];
#define transform_bgra_rgba transform_rgba_bgra
static void
-transform_argb_rgba (guint8 * data, gint size, const gint * matrix)
+transform_argb_rgba (GstVideoFrame * frame, const gint * matrix)
{
+ guint8 *data;
+ gsize size;
gint r, g, b;
+ data = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
+ size = GST_VIDEO_FRAME_SIZE (frame);
+
while (size > 0) {
r = data[1];
g = data[2];
#define transform_abgr_bgra transform_argb_rgba
static void
-transform_bgra_argb (guint8 * data, gint size, const gint * matrix)
+transform_bgra_argb (GstVideoFrame * frame, const gint * matrix)
{
+ guint8 *data;
+ gsize size;
gint r, g, b;
+ data = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
+ size = GST_VIDEO_FRAME_SIZE (frame);
+
while (size > 0) {
r = data[2];
g = data[1];
#define transform_rgba_abgr transform_bgra_argb
static void
-transform_rgba_argb (guint8 * data, gint size, const gint * matrix)
+transform_rgba_argb (GstVideoFrame * frame, const gint * matrix)
{
+ guint8 *data;
+ gsize size;
gint r, g, b;
+ data = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
+ size = GST_VIDEO_FRAME_SIZE (frame);
+
while (size > 0) {
r = data[0];
g = data[1];
{
GstAlphaColor *alpha = GST_ALPHA_COLOR (btrans);
gboolean ret;
- gint w, h;
- gint w2, h2;
- GstVideoFormat in_format, out_format;
- const gchar *matrix;
+ GstVideoInfo in_info, out_info;
gboolean in_sdtv, out_sdtv;
alpha->process = NULL;
alpha->matrix = NULL;
- ret = gst_video_format_parse_caps (incaps, &in_format, &w, &h);
- ret &= gst_video_format_parse_caps (outcaps, &out_format, &w2, &h2);
+ ret = gst_video_info_from_caps (&in_info, incaps);
+ ret &= gst_video_info_from_caps (&out_info, outcaps);
+ if (!ret)
+ goto invalid_caps;
- if (!ret || w != w2 || h != h2) {
- GST_DEBUG_OBJECT (alpha, "incomplete or invalid caps!");
- return FALSE;
- }
-
- matrix = gst_video_parse_caps_color_matrix (incaps);
- in_sdtv = matrix ? g_str_equal (matrix, "sdtv") : TRUE;
- matrix = gst_video_parse_caps_color_matrix (outcaps);
- out_sdtv = matrix ? g_str_equal (matrix, "sdtv") : TRUE;
+ if (GST_VIDEO_INFO_WIDTH (&in_info) != GST_VIDEO_INFO_WIDTH (&out_info) ||
+ GST_VIDEO_INFO_HEIGHT (&in_info) != GST_VIDEO_INFO_HEIGHT (&out_info))
+ goto invalid_caps;
- alpha->in_format = in_format;
- alpha->out_format = out_format;
- alpha->width = w;
- alpha->height = h;
+ in_sdtv =
+ in_info.color_matrix ? g_str_equal (in_info.color_matrix, "sdtv") : TRUE;
+ out_sdtv =
+ out_info.color_matrix ? g_str_equal (out_info.color_matrix,
+ "sdtv") : TRUE;
- switch (alpha->in_format) {
+ switch (GST_VIDEO_INFO_FORMAT (&in_info)) {
case GST_VIDEO_FORMAT_ARGB:
- switch (alpha->out_format) {
+ switch (GST_VIDEO_INFO_FORMAT (&out_info)) {
case GST_VIDEO_FORMAT_ARGB:
alpha->process = NULL;
alpha->matrix = NULL;
}
break;
case GST_VIDEO_FORMAT_BGRA:
- switch (alpha->out_format) {
+ switch (GST_VIDEO_INFO_FORMAT (&out_info)) {
case GST_VIDEO_FORMAT_BGRA:
alpha->process = NULL;
alpha->matrix = NULL;
}
break;
case GST_VIDEO_FORMAT_ABGR:
- switch (alpha->out_format) {
+ switch (GST_VIDEO_INFO_FORMAT (&out_info)) {
case GST_VIDEO_FORMAT_ABGR:
alpha->process = NULL;
alpha->matrix = NULL;
}
break;
case GST_VIDEO_FORMAT_RGBA:
- switch (alpha->out_format) {
+ switch (GST_VIDEO_INFO_FORMAT (&out_info)) {
case GST_VIDEO_FORMAT_RGBA:
alpha->process = NULL;
alpha->matrix = NULL;
}
break;
case GST_VIDEO_FORMAT_AYUV:
- switch (alpha->out_format) {
+ switch (GST_VIDEO_INFO_FORMAT (&out_info)) {
case GST_VIDEO_FORMAT_AYUV:
if (in_sdtv == out_sdtv) {
alpha->process = transform_ayuv_ayuv;
break;
}
- if (in_format == out_format && in_sdtv == out_sdtv)
+ if (GST_VIDEO_INFO_FORMAT (&in_info) == GST_VIDEO_INFO_FORMAT (&out_info)
+ && in_sdtv == out_sdtv)
gst_base_transform_set_passthrough (btrans, TRUE);
else if (!alpha->process)
- return FALSE;
+ goto no_process;
return TRUE;
+
+ /* ERRORS */
+invalid_caps:
+ {
+ GST_DEBUG_OBJECT (alpha, "incomplete or invalid caps");
+ return FALSE;
+ }
+no_process:
+ {
+ GST_DEBUG_OBJECT (alpha, "could not find process function");
+ return FALSE;
+ }
}
static GstFlowReturn
gst_alpha_color_transform_ip (GstBaseTransform * btrans, GstBuffer * inbuf)
{
GstAlphaColor *alpha = GST_ALPHA_COLOR (btrans);
-
- if (G_UNLIKELY (GST_BUFFER_SIZE (inbuf) != 4 * alpha->width * alpha->height)) {
- GST_ERROR_OBJECT (alpha, "Invalid buffer size (was %u, expected %u)",
- GST_BUFFER_SIZE (inbuf), alpha->width * alpha->height);
- return GST_FLOW_ERROR;
- }
+ GstVideoFrame frame;
if (gst_base_transform_is_passthrough (btrans))
return GST_FLOW_OK;
- if (G_UNLIKELY (!alpha->process)) {
- GST_ERROR_OBJECT (alpha, "Not negotiated yet");
- return GST_FLOW_NOT_NEGOTIATED;
- }
+ if (G_UNLIKELY (!alpha->process))
+ goto not_negotiated;
+
+ if (!gst_video_frame_map (&frame, &alpha->in_info, inbuf, GST_MAP_READWRITE))
+ goto invalid_buffer;
/* Transform in place */
- alpha->process (GST_BUFFER_DATA (inbuf), GST_BUFFER_SIZE (inbuf),
- alpha->matrix);
+ alpha->process (&frame, alpha->matrix);
+
+ gst_video_frame_unmap (&frame);
return GST_FLOW_OK;
+
+ /* ERRORS */
+not_negotiated:
+ {
+ GST_ERROR_OBJECT (alpha, "Not negotiated yet");
+ return GST_FLOW_NOT_NEGOTIATED;
+ }
+invalid_buffer:
+ {
+ GST_ERROR_OBJECT (alpha, "Invalid buffer received");
+ return GST_FLOW_ERROR;
+ }
}
static gboolean
/*< private >*/
/* caps */
- GstVideoFormat in_format, out_format;
- gint width, height;
+ GstVideoInfo in_info, out_info;
- void (*process) (guint8 * data, gint size, const gint * matrix);
+ void (*process) (GstVideoFrame * frame, const gint * matrix);
const gint *matrix;
};
GstBuffer * buffer, gboolean start_tag, guint * tag_size,
GstTagList ** tags);
-GST_BOILERPLATE (GstApeDemux, gst_ape_demux, GstTagDemux, GST_TYPE_TAG_DEMUX);
+G_DEFINE_TYPE (GstApeDemux, gst_ape_demux, GST_TYPE_TAG_DEMUX);
static void
-gst_ape_demux_base_init (gpointer klass)
+gst_ape_demux_class_init (GstApeDemuxClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstElementClass *element_class;
+ GstTagDemuxClass *tagdemux_class;
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&sink_factory));
+ GST_DEBUG_CATEGORY_INIT (apedemux_debug, "apedemux", 0,
+ "GStreamer APE tag demuxer");
+
+ tagdemux_class = GST_TAG_DEMUX_CLASS (klass);
+ element_class = GST_ELEMENT_CLASS (klass);
gst_element_class_set_details_simple (element_class, "APE tag demuxer",
"Codec/Demuxer/Metadata",
"Read and output APE tags while demuxing the contents",
"Tim-Philipp Müller <tim centricular net>");
- GST_DEBUG_CATEGORY_INIT (apedemux_debug, "apedemux", 0,
- "GStreamer APE tag demuxer");
-}
-
-static void
-gst_ape_demux_class_init (GstApeDemuxClass * klass)
-{
- GstTagDemuxClass *tagdemux_class;
-
- tagdemux_class = GST_TAG_DEMUX_CLASS (klass);
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&sink_factory));
tagdemux_class->identify_tag = GST_DEBUG_FUNCPTR (gst_ape_demux_identify_tag);
tagdemux_class->parse_tag = GST_DEBUG_FUNCPTR (gst_ape_demux_parse_tag);
}
static void
-gst_ape_demux_init (GstApeDemux * apedemux, GstApeDemuxClass * gclass)
+gst_ape_demux_init (GstApeDemux * apedemux)
{
/* nothing to do here */
}
gst_ape_demux_identify_tag (GstTagDemux * demux, GstBuffer * buffer,
gboolean start_tag, guint * tag_size)
{
- if (memcmp (GST_BUFFER_DATA (buffer), "APETAGEX", 8) != 0) {
+ guint8 *data;
+ gsize size;
+
+ data = gst_buffer_map (buffer, &size, NULL, GST_MAP_READ);
+
+ if (memcmp (data, "APETAGEX", 8) != 0) {
GST_DEBUG_OBJECT (demux, "No APETAGEX marker at %s - not an APE file",
(start_tag) ? "start" : "end");
+ gst_buffer_unmap (buffer, data, size);
return FALSE;
}
- *tag_size = GST_READ_UINT32_LE (GST_BUFFER_DATA (buffer) + 12);
+ *tag_size = GST_READ_UINT32_LE (data + 12);
/* size is without header, so add 32 to account for that */
*tag_size += 32;
+ gst_buffer_unmap (buffer, data, size);
+
return TRUE;
}
gst_ape_demux_parse_tag (GstTagDemux * demux, GstBuffer * buffer,
gboolean start_tag, guint * tag_size, GstTagList ** tags)
{
- const guint8 *data;
- const guint8 *footer;
+ guint8 *data_start, *data;
+ guint8 *footer;
gboolean have_header;
gboolean end_tag = !start_tag;
GstCaps *sink_caps;
guint version, footer_size;
+ gsize size;
+
+ data_start = data = gst_buffer_map (buffer, &size, NULL, GST_MAP_READ);
- GST_LOG_OBJECT (demux, "Parsing buffer of size %u", GST_BUFFER_SIZE (buffer));
+ GST_LOG_OBJECT (demux, "Parsing buffer of size %" G_GSIZE_FORMAT, size);
- data = GST_BUFFER_DATA (buffer);
- footer = GST_BUFFER_DATA (buffer) + GST_BUFFER_SIZE (buffer) - 32;
+ footer = data + size - 32;
GST_LOG_OBJECT (demux, "Checking for footer at offset 0x%04x",
(guint) (footer - data));
GST_TAG_CONTAINER_FORMAT, sink_caps);
gst_caps_unref (sink_caps);
+ gst_buffer_unmap (buffer, data_start, size);
+
return GST_TAG_DEMUX_RESULT_OK;
}
" rate=(int)[1,MAX]," \
" channels=(int)[1,MAX]"
-#define DEBUG_INIT(bla) \
- GST_DEBUG_CATEGORY_INIT (gst_audio_amplify_debug, "audioamplify", 0, "audioamplify element");
-
-GST_BOILERPLATE_FULL (GstAudioAmplify, gst_audio_amplify, GstAudioFilter,
- GST_TYPE_AUDIO_FILTER, DEBUG_INIT);
+G_DEFINE_TYPE (GstAudioAmplify, gst_audio_amplify, GST_TYPE_AUDIO_FILTER);
static gboolean gst_audio_amplify_set_process_function (GstAudioAmplify *
filter, gint clipping, gint format, gint width);
/* GObject vmethod implementations */
static void
-gst_audio_amplify_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
- GstCaps *caps;
-
- gst_element_class_set_details_simple (element_class, "Audio amplifier",
- "Filter/Effect/Audio",
- "Amplifies an audio stream by a given factor",
- "Sebastian Dröge <slomo@circular-chaos.org>");
-
- caps = gst_caps_from_string (ALLOWED_CAPS);
- gst_audio_filter_class_add_pad_templates (GST_AUDIO_FILTER_CLASS (klass),
- caps);
- gst_caps_unref (caps);
-}
-
-static void
gst_audio_amplify_class_init (GstAudioAmplifyClass * klass)
{
GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstCaps *caps;
+
+ GST_DEBUG_CATEGORY_INIT (gst_audio_amplify_debug, "audioamplify", 0,
+ "audioamplify element");
gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+
gobject_class->set_property = gst_audio_amplify_set_property;
gobject_class->get_property = gst_audio_amplify_get_property;
GST_TYPE_AUDIO_AMPLIFY_CLIPPING_METHOD, METHOD_CLIP,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ gst_element_class_set_details_simple (gstelement_class, "Audio amplifier",
+ "Filter/Effect/Audio",
+ "Amplifies an audio stream by a given factor",
+ "Sebastian Dröge <slomo@circular-chaos.org>");
+
+ caps = gst_caps_from_string (ALLOWED_CAPS);
+ gst_audio_filter_class_add_pad_templates (GST_AUDIO_FILTER_CLASS (klass),
+ caps);
+ gst_caps_unref (caps);
+
GST_AUDIO_FILTER_CLASS (klass)->setup =
GST_DEBUG_FUNCPTR (gst_audio_amplify_setup);
GST_BASE_TRANSFORM_CLASS (klass)->transform_ip =
}
static void
-gst_audio_amplify_init (GstAudioAmplify * filter, GstAudioAmplifyClass * klass)
+gst_audio_amplify_init (GstAudioAmplify * filter)
{
filter->amplification = 1.0;
gst_audio_amplify_set_process_function (filter, METHOD_CLIP,
GstAudioAmplify *filter = GST_AUDIO_AMPLIFY (base);
guint num_samples;
GstClockTime timestamp, stream_time;
+ guint8 *data;
+ gsize size;
timestamp = GST_BUFFER_TIMESTAMP (buf);
stream_time =
if (GST_CLOCK_TIME_IS_VALID (stream_time))
gst_object_sync_values (G_OBJECT (filter), stream_time);
- num_samples =
- GST_BUFFER_SIZE (buf) / (GST_AUDIO_FILTER (filter)->format.width / 8);
-
if (gst_base_transform_is_passthrough (base) ||
G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_GAP)))
return GST_FLOW_OK;
- filter->process (filter, GST_BUFFER_DATA (buf), num_samples);
+ data = gst_buffer_map (buf, &size, NULL, GST_MAP_READWRITE);
+ num_samples = size / (GST_AUDIO_FILTER (filter)->format.width / 8);
+
+ filter->process (filter, data, num_samples);
+
+ gst_buffer_unmap (buf, data, size);
return GST_FLOW_OK;
}
PROP_POLES
};
-#define DEBUG_INIT(bla) \
- GST_DEBUG_CATEGORY_INIT (gst_audio_cheb_band_debug, "audiochebband", 0, "audiochebband element");
-
-GST_BOILERPLATE_FULL (GstAudioChebBand, gst_audio_cheb_band,
- GstAudioFXBaseIIRFilter, GST_TYPE_AUDIO_FX_BASE_IIR_FILTER, DEBUG_INIT);
+#define gst_audio_cheb_band_parent_class parent_class
+G_DEFINE_TYPE (GstAudioChebBand, gst_audio_cheb_band,
+ GST_TYPE_AUDIO_FX_BASE_IIR_FILTER);
static void gst_audio_cheb_band_set_property (GObject * object,
guint prop_id, const GValue * value, GParamSpec * pspec);
/* GObject vmethod implementations */
static void
-gst_audio_cheb_band_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_set_details_simple (element_class,
- "Band pass & band reject filter", "Filter/Effect/Audio",
- "Chebyshev band pass and band reject filter",
- "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
-}
-
-static void
gst_audio_cheb_band_class_init (GstAudioChebBandClass * klass)
{
GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
GstAudioFilterClass *filter_class = (GstAudioFilterClass *) klass;
+ GST_DEBUG_CATEGORY_INIT (gst_audio_cheb_band_debug, "audiochebband", 0,
+ "audiochebband element");
+
gobject_class->set_property = gst_audio_cheb_band_set_property;
gobject_class->get_property = gst_audio_cheb_band_get_property;
gobject_class->finalize = gst_audio_cheb_band_finalize;
4, 32, 4,
G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
+ gst_element_class_set_details_simple (gstelement_class,
+ "Band pass & band reject filter", "Filter/Effect/Audio",
+ "Chebyshev band pass and band reject filter",
+ "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
+
filter_class->setup = GST_DEBUG_FUNCPTR (gst_audio_cheb_band_setup);
}
static void
-gst_audio_cheb_band_init (GstAudioChebBand * filter,
- GstAudioChebBandClass * klass)
+gst_audio_cheb_band_init (GstAudioChebBand * filter)
{
filter->lower_frequency = filter->upper_frequency = 0.0;
filter->mode = MODE_BAND_PASS;
PROP_POLES
};
-#define DEBUG_INIT(bla) \
- GST_DEBUG_CATEGORY_INIT (gst_audio_cheb_limit_debug, "audiocheblimit", 0, "audiocheblimit element");
-
-GST_BOILERPLATE_FULL (GstAudioChebLimit,
- gst_audio_cheb_limit, GstAudioFXBaseIIRFilter,
- GST_TYPE_AUDIO_FX_BASE_IIR_FILTER, DEBUG_INIT);
+#define gst_audio_cheb_limit_parent_class parent_class
+G_DEFINE_TYPE (GstAudioChebLimit,
+ gst_audio_cheb_limit, GST_TYPE_AUDIO_FX_BASE_IIR_FILTER);
static void gst_audio_cheb_limit_set_property (GObject * object,
guint prop_id, const GValue * value, GParamSpec * pspec);
/* GObject vmethod implementations */
static void
-gst_audio_cheb_limit_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_set_details_simple (element_class,
- "Low pass & high pass filter",
- "Filter/Effect/Audio",
- "Chebyshev low pass and high pass filter",
- "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
-}
-
-static void
gst_audio_cheb_limit_class_init (GstAudioChebLimitClass * klass)
{
GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
GstAudioFilterClass *filter_class = (GstAudioFilterClass *) klass;
+ GST_DEBUG_CATEGORY_INIT (gst_audio_cheb_limit_debug, "audiocheblimit", 0,
+ "audiocheblimit element");
+
gobject_class->set_property = gst_audio_cheb_limit_set_property;
gobject_class->get_property = gst_audio_cheb_limit_get_property;
gobject_class->finalize = gst_audio_cheb_limit_finalize;
2, 32, 4,
G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
+ gst_element_class_set_details_simple (gstelement_class,
+ "Low pass & high pass filter",
+ "Filter/Effect/Audio",
+ "Chebyshev low pass and high pass filter",
+ "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
+
filter_class->setup = GST_DEBUG_FUNCPTR (gst_audio_cheb_limit_setup);
}
static void
-gst_audio_cheb_limit_init (GstAudioChebLimit * filter,
- GstAudioChebLimitClass * klass)
+gst_audio_cheb_limit_init (GstAudioChebLimit * filter)
{
filter->cutoff = 0.0;
filter->mode = MODE_LOW_PASS;
" rate=(int)[1,MAX]," \
" channels=(int)[1,MAX]"
-#define DEBUG_INIT(bla) \
- GST_DEBUG_CATEGORY_INIT (gst_audio_dynamic_debug, "audiodynamic", 0, "audiodynamic element");
-
-GST_BOILERPLATE_FULL (GstAudioDynamic, gst_audio_dynamic, GstAudioFilter,
- GST_TYPE_AUDIO_FILTER, DEBUG_INIT);
+G_DEFINE_TYPE (GstAudioDynamic, gst_audio_dynamic, GST_TYPE_AUDIO_FILTER);
static void gst_audio_dynamic_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
/* GObject vmethod implementations */
static void
-gst_audio_dynamic_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
- GstCaps *caps;
-
- gst_element_class_set_details_simple (element_class,
- "Dynamic range controller", "Filter/Effect/Audio",
- "Compressor and Expander", "Sebastian Dröge <slomo@circular-chaos.org>");
-
- caps = gst_caps_from_string (ALLOWED_CAPS);
- gst_audio_filter_class_add_pad_templates (GST_AUDIO_FILTER_CLASS (klass),
- caps);
- gst_caps_unref (caps);
-}
-
-static void
gst_audio_dynamic_class_init (GstAudioDynamicClass * klass)
{
GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstCaps *caps;
+
+ GST_DEBUG_CATEGORY_INIT (gst_audio_dynamic_debug, "audiodynamic", 0,
+ "audiodynamic element");
gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+
gobject_class->set_property = gst_audio_dynamic_set_property;
gobject_class->get_property = gst_audio_dynamic_get_property;
1.0,
G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
+ gst_element_class_set_details_simple (gstelement_class,
+ "Dynamic range controller", "Filter/Effect/Audio",
+ "Compressor and Expander", "Sebastian Dröge <slomo@circular-chaos.org>");
+
+ caps = gst_caps_from_string (ALLOWED_CAPS);
+ gst_audio_filter_class_add_pad_templates (GST_AUDIO_FILTER_CLASS (klass),
+ caps);
+ gst_caps_unref (caps);
+
GST_AUDIO_FILTER_CLASS (klass)->setup =
GST_DEBUG_FUNCPTR (gst_audio_dynamic_setup);
GST_BASE_TRANSFORM_CLASS (klass)->transform_ip =
}
static void
-gst_audio_dynamic_init (GstAudioDynamic * filter, GstAudioDynamicClass * klass)
+gst_audio_dynamic_init (GstAudioDynamic * filter)
{
filter->ratio = 1.0;
filter->threshold = 0.0;
GstAudioDynamic *filter = GST_AUDIO_DYNAMIC (base);
guint num_samples;
GstClockTime timestamp, stream_time;
+ guint8 *data;
+ gsize size;
timestamp = GST_BUFFER_TIMESTAMP (buf);
stream_time =
if (GST_CLOCK_TIME_IS_VALID (stream_time))
gst_object_sync_values (G_OBJECT (filter), stream_time);
- num_samples =
- GST_BUFFER_SIZE (buf) / (GST_AUDIO_FILTER (filter)->format.width / 8);
-
if (gst_base_transform_is_passthrough (base) ||
G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_GAP)))
return GST_FLOW_OK;
- filter->process (filter, GST_BUFFER_DATA (buf), num_samples);
+ data = gst_buffer_map (buf, &size, NULL, GST_MAP_READWRITE);
+ num_samples = size / (GST_AUDIO_FILTER (filter)->format.width / 8);
+
+ filter->process (filter, data, num_samples);
+
+ gst_buffer_unmap (buf, data, size);
return GST_FLOW_OK;
}
" rate=(int)[1,MAX]," \
" channels=(int)[1,MAX]"
-#define DEBUG_INIT(bla) \
- GST_DEBUG_CATEGORY_INIT (gst_audio_echo_debug, "audioecho", 0, "audioecho element");
-
-GST_BOILERPLATE_FULL (GstAudioEcho, gst_audio_echo, GstAudioFilter,
- GST_TYPE_AUDIO_FILTER, DEBUG_INIT);
+#define gst_audio_echo_parent_class parent_class
+G_DEFINE_TYPE (GstAudioEcho, gst_audio_echo, GST_TYPE_AUDIO_FILTER);
static void gst_audio_echo_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
/* GObject vmethod implementations */
static void
-gst_audio_echo_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
- GstCaps *caps;
-
- gst_element_class_set_details_simple (element_class, "Audio echo",
- "Filter/Effect/Audio",
- "Adds an echo or reverb effect to an audio stream",
- "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
-
- caps = gst_caps_from_string (ALLOWED_CAPS);
- gst_audio_filter_class_add_pad_templates (GST_AUDIO_FILTER_CLASS (klass),
- caps);
- gst_caps_unref (caps);
-}
-
-static void
gst_audio_echo_class_init (GstAudioEchoClass * klass)
{
GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
GstBaseTransformClass *basetransform_class = (GstBaseTransformClass *) klass;
GstAudioFilterClass *audioself_class = (GstAudioFilterClass *) klass;
+ GstCaps *caps;
+
+ GST_DEBUG_CATEGORY_INIT (gst_audio_echo_debug, "audioecho", 0,
+ "audioecho element");
gobject_class->set_property = gst_audio_echo_set_property;
gobject_class->get_property = gst_audio_echo_get_property;
0.0, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS
| GST_PARAM_CONTROLLABLE));
+ gst_element_class_set_details_simple (gstelement_class, "Audio echo",
+ "Filter/Effect/Audio",
+ "Adds an echo or reverb effect to an audio stream",
+ "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
+
+ caps = gst_caps_from_string (ALLOWED_CAPS);
+ gst_audio_filter_class_add_pad_templates (GST_AUDIO_FILTER_CLASS (klass),
+ caps);
+ gst_caps_unref (caps);
+
audioself_class->setup = GST_DEBUG_FUNCPTR (gst_audio_echo_setup);
basetransform_class->transform_ip =
GST_DEBUG_FUNCPTR (gst_audio_echo_transform_ip);
}
static void
-gst_audio_echo_init (GstAudioEcho * self, GstAudioEchoClass * klass)
+gst_audio_echo_init (GstAudioEcho * self)
{
self->delay = 1;
self->max_delay = 1;
GstAudioEcho *self = GST_AUDIO_ECHO (base);
guint num_samples;
GstClockTime timestamp, stream_time;
+ guint8 *data;
+ gsize size;
timestamp = GST_BUFFER_TIMESTAMP (buf);
stream_time =
if (GST_CLOCK_TIME_IS_VALID (stream_time))
gst_object_sync_values (G_OBJECT (self), stream_time);
- num_samples =
- GST_BUFFER_SIZE (buf) / (GST_AUDIO_FILTER (self)->format.width / 8);
-
if (self->buffer == NULL) {
guint width, rate, channels;
}
}
- self->process (self, GST_BUFFER_DATA (buf), num_samples);
+ data = gst_buffer_map (buf, &size, NULL, GST_MAP_READWRITE);
+ num_samples = size / (GST_AUDIO_FILTER (self)->format.width / 8);
+
+ self->process (self, data, num_samples);
+
+ gst_buffer_unmap (buf, data, size);
return GST_FLOW_OK;
}
static guint gst_audio_fir_filter_signals[LAST_SIGNAL] = { 0, };
-#define DEBUG_INIT(bla) \
- GST_DEBUG_CATEGORY_INIT (gst_audio_fir_filter_debug, "audiofirfilter", 0, \
- "Generic audio FIR filter plugin");
-
-GST_BOILERPLATE_FULL (GstAudioFIRFilter, gst_audio_fir_filter, GstAudioFilter,
- GST_TYPE_AUDIO_FX_BASE_FIR_FILTER, DEBUG_INIT);
+#define gst_audio_fir_filter_parent_class parent_class
+G_DEFINE_TYPE (GstAudioFIRFilter, gst_audio_fir_filter,
+ GST_TYPE_AUDIO_FX_BASE_FIR_FILTER);
static void gst_audio_fir_filter_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static gboolean gst_audio_fir_filter_setup (GstAudioFilter * base,
GstRingBufferSpec * format);
-/* Element class */
-static void
-gst_audio_fir_filter_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_set_details_simple (element_class,
- "Audio FIR filter", "Filter/Effect/Audio",
- "Generic audio FIR filter with custom filter kernel",
- "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
-}
static void
gst_audio_fir_filter_class_init (GstAudioFIRFilterClass * klass)
{
GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
GstAudioFilterClass *filter_class = (GstAudioFilterClass *) klass;
+ GST_DEBUG_CATEGORY_INIT (gst_audio_fir_filter_debug, "audiofirfilter", 0,
+ "Generic audio FIR filter plugin");
+
gobject_class->set_property = gst_audio_fir_filter_set_property;
gobject_class->get_property = gst_audio_fir_filter_get_property;
gobject_class->finalize = gst_audio_fir_filter_finalize;
g_signal_new ("rate-changed", G_TYPE_FROM_CLASS (klass),
G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstAudioFIRFilterClass, rate_changed),
NULL, NULL, gst_marshal_VOID__INT, G_TYPE_NONE, 1, G_TYPE_INT);
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "Audio FIR filter", "Filter/Effect/Audio",
+ "Generic audio FIR filter with custom filter kernel",
+ "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
}
static void
}
static void
-gst_audio_fir_filter_init (GstAudioFIRFilter * self,
- GstAudioFIRFilterClass * g_class)
+gst_audio_fir_filter_init (GstAudioFIRFilter * self)
{
GValue v = { 0, };
GValueArray *va;
" rate = (int) [ 1, MAX ], " \
" channels = (int) [ 1, MAX ]"
-#define DEBUG_INIT(bla) \
- GST_DEBUG_CATEGORY_INIT (gst_audio_fx_base_fir_filter_debug, "audiofxbasefirfilter", 0, \
- "FIR filter base class");
-
/* Switch from time-domain to FFT convolution for kernels >= this */
#define FFT_THRESHOLD 32
#define DEFAULT_LOW_LATENCY FALSE
#define DEFAULT_DRAIN_ON_CHANGES TRUE
-GST_BOILERPLATE_FULL (GstAudioFXBaseFIRFilter, gst_audio_fx_base_fir_filter,
- GstAudioFilter, GST_TYPE_AUDIO_FILTER, DEBUG_INIT);
+#define gst_audio_fx_base_fir_filter_parent_class parent_class
+G_DEFINE_TYPE (GstAudioFXBaseFIRFilter, gst_audio_fx_base_fir_filter,
+ GST_TYPE_AUDIO_FILTER);
static GstFlowReturn gst_audio_fx_base_fir_filter_transform (GstBaseTransform *
base, GstBuffer * inbuf, GstBuffer * outbuf);
static gboolean gst_audio_fx_base_fir_filter_event (GstBaseTransform * base,
GstEvent * event);
static gboolean gst_audio_fx_base_fir_filter_transform_size (GstBaseTransform *
- base, GstPadDirection direction, GstCaps * caps, guint size,
- GstCaps * othercaps, guint * othersize);
+ base, GstPadDirection direction, GstCaps * caps, gsize size,
+ GstCaps * othercaps, gsize * othersize);
static gboolean gst_audio_fx_base_fir_filter_setup (GstAudioFilter * base,
GstRingBufferSpec * format);
}
static void
-gst_audio_fx_base_fir_filter_base_init (gpointer g_class)
-{
- GstCaps *caps;
-
- caps = gst_caps_from_string (ALLOWED_CAPS);
- gst_audio_filter_class_add_pad_templates (GST_AUDIO_FILTER_CLASS (g_class),
- caps);
- gst_caps_unref (caps);
-}
-
-static void
gst_audio_fx_base_fir_filter_class_init (GstAudioFXBaseFIRFilterClass * klass)
{
GObjectClass *gobject_class = (GObjectClass *) klass;
GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass;
GstAudioFilterClass *filter_class = (GstAudioFilterClass *) klass;
+ GstCaps *caps;
+
+ GST_DEBUG_CATEGORY_INIT (gst_audio_fx_base_fir_filter_debug,
+ "audiofxbasefirfilter", 0, "FIR filter base class");
gobject_class->dispose = gst_audio_fx_base_fir_filter_dispose;
gobject_class->set_property = gst_audio_fx_base_fir_filter_set_property;
DEFAULT_DRAIN_ON_CHANGES,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ caps = gst_caps_from_string (ALLOWED_CAPS);
+ gst_audio_filter_class_add_pad_templates (GST_AUDIO_FILTER_CLASS (klass),
+ caps);
+ gst_caps_unref (caps);
+
trans_class->transform =
GST_DEBUG_FUNCPTR (gst_audio_fx_base_fir_filter_transform);
trans_class->start = GST_DEBUG_FUNCPTR (gst_audio_fx_base_fir_filter_start);
}
static void
-gst_audio_fx_base_fir_filter_init (GstAudioFXBaseFIRFilter * self,
- GstAudioFXBaseFIRFilterClass * g_class)
+gst_audio_fx_base_fir_filter_init (GstAudioFXBaseFIRFilter * self)
{
self->kernel = NULL;
self->buffer = NULL;
gint channels = GST_AUDIO_FILTER_CAST (self)->format.channels;
gint width = GST_AUDIO_FILTER_CAST (self)->format.width / 8;
gint outsize, outsamples;
- guint8 *in, *out;
+ guint8 *in, *out, *data;
+ gsize size;
if (channels == 0 || rate == 0 || self->nsamples_in == 0) {
self->buffer_fill = 0;
g_free (out);
}
- res = gst_pad_alloc_buffer (GST_BASE_TRANSFORM_CAST (self)->srcpad,
- GST_BUFFER_OFFSET_NONE, outsize,
- GST_PAD_CAPS (GST_BASE_TRANSFORM_CAST (self)->srcpad), &outbuf);
-
- if (G_UNLIKELY (res != GST_FLOW_OK)) {
- GST_WARNING_OBJECT (self, "failed allocating buffer of %d bytes",
- outsize);
- self->buffer_fill = 0;
- return;
- }
+ outbuf = gst_buffer_new_and_alloc (outsize);
/* Convolve the residue with zeros to get the actual remaining data */
in = g_new0 (guint8, outsize);
- self->nsamples_out +=
- self->process (self, in, GST_BUFFER_DATA (outbuf), outsamples);
+ data = gst_buffer_map (outbuf, &size, NULL, GST_MAP_READWRITE);
+ self->nsamples_out += self->process (self, in, data, outsamples);
+ gst_buffer_unmap (outbuf, data, size);
+
g_free (in);
} else {
guint gensamples = 0;
- guint8 *data;
outbuf = gst_buffer_new_and_alloc (outsize);
- data = GST_BUFFER_DATA (outbuf);
+ data = gst_buffer_map (outbuf, &size, NULL, GST_MAP_READWRITE);
while (gensamples < outsamples) {
guint step_insamples = self->block_length - self->buffer_fill;
g_free (out);
}
self->nsamples_out += gensamples;
+
+ gst_buffer_unmap (outbuf, data, size);
}
/* Set timestamp, offset, etc from the values we
GST_DEBUG_OBJECT (self, "Pushing residue buffer of size %d with timestamp: %"
GST_TIME_FORMAT ", duration: %" GST_TIME_FORMAT ", offset: %"
G_GUINT64_FORMAT ", offset_end: %" G_GUINT64_FORMAT ", nsamples_out: %d",
- GST_BUFFER_SIZE (outbuf), GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)),
+ gst_buffer_get_size (outbuf),
+ GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)),
GST_TIME_ARGS (GST_BUFFER_DURATION (outbuf)), GST_BUFFER_OFFSET (outbuf),
GST_BUFFER_OFFSET_END (outbuf), outsamples);
static gboolean
gst_audio_fx_base_fir_filter_transform_size (GstBaseTransform * base,
- GstPadDirection direction, GstCaps * caps, guint size, GstCaps * othercaps,
- guint * othersize)
+ GstPadDirection direction, GstCaps * caps, gsize size, GstCaps * othercaps,
+ gsize * othersize)
{
GstAudioFXBaseFIRFilter *self = GST_AUDIO_FX_BASE_FIR_FILTER (base);
guint blocklen;
gint channels = GST_AUDIO_FILTER_CAST (self)->format.channels;
gint rate = GST_AUDIO_FILTER_CAST (self)->format.rate;
gint width = GST_AUDIO_FILTER_CAST (self)->format.width / 8;
- guint input_samples = (GST_BUFFER_SIZE (inbuf) / width) / channels;
- guint output_samples = (GST_BUFFER_SIZE (outbuf) / width) / channels;
+ guint8 *indata, *outdata;
+ gsize insize, outsize;
+ guint input_samples;
+ guint output_samples;
guint generated_samples;
guint64 output_offset;
gint64 diff = 0;
self->start_off = GST_BUFFER_OFFSET (inbuf);
}
+ indata = gst_buffer_map (inbuf, &insize, NULL, GST_MAP_READ);
+ outdata = gst_buffer_map (outbuf, &outsize, NULL, GST_MAP_WRITE);
+
+ input_samples = (insize / width) / channels;
+ output_samples = (outsize / width) / channels;
+
self->nsamples_in += input_samples;
- generated_samples =
- self->process (self, GST_BUFFER_DATA (inbuf), GST_BUFFER_DATA (outbuf),
- input_samples);
+ generated_samples = self->process (self, indata, outdata, input_samples);
+
+ gst_buffer_unmap (inbuf, indata, insize);
+ gst_buffer_unmap (outbuf, outdata, outsize);
g_assert (generated_samples <= output_samples);
self->nsamples_out += generated_samples;
gint64 tmp = diff;
diff = generated_samples - diff;
generated_samples = tmp;
- GST_BUFFER_DATA (outbuf) += diff * width * channels;
}
- GST_BUFFER_SIZE (outbuf) = generated_samples * width * channels;
+ gst_buffer_resize (outbuf, diff * width * channels,
+ generated_samples * width * channels);
output_offset = self->nsamples_out - self->latency - generated_samples;
GST_BUFFER_TIMESTAMP (outbuf) =
GST_DEBUG_OBJECT (self, "Pushing buffer of size %d with timestamp: %"
GST_TIME_FORMAT ", duration: %" GST_TIME_FORMAT ", offset: %"
G_GUINT64_FORMAT ", offset_end: %" G_GUINT64_FORMAT ", nsamples_out: %d",
- GST_BUFFER_SIZE (outbuf), GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)),
+ gst_buffer_get_size (outbuf),
+ GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)),
GST_TIME_ARGS (GST_BUFFER_DURATION (outbuf)), GST_BUFFER_OFFSET (outbuf),
GST_BUFFER_OFFSET_END (outbuf), generated_samples);
" rate = (int) [ 1, MAX ]," \
" channels = (int) [ 1, MAX ]"
-#define DEBUG_INIT(bla) \
- GST_DEBUG_CATEGORY_INIT (gst_audio_fx_base_iir_filter_debug, "audiofxbaseiirfilter", 0, "Audio IIR Filter Base Class");
-
-GST_BOILERPLATE_FULL (GstAudioFXBaseIIRFilter,
- gst_audio_fx_base_iir_filter, GstAudioFilter, GST_TYPE_AUDIO_FILTER,
- DEBUG_INIT);
+#define gst_audio_fx_base_iir_filter_parent_class parent_class
+G_DEFINE_TYPE (GstAudioFXBaseIIRFilter,
+ gst_audio_fx_base_iir_filter, GST_TYPE_AUDIO_FILTER);
static gboolean gst_audio_fx_base_iir_filter_setup (GstAudioFilter * filter,
GstRingBufferSpec * format);
/* GObject vmethod implementations */
static void
-gst_audio_fx_base_iir_filter_base_init (gpointer klass)
-{
- GstCaps *caps;
-
- caps = gst_caps_from_string (ALLOWED_CAPS);
- gst_audio_filter_class_add_pad_templates (GST_AUDIO_FILTER_CLASS (klass),
- caps);
- gst_caps_unref (caps);
-}
-
-static void
gst_audio_fx_base_iir_filter_dispose (GObject * object)
{
GstAudioFXBaseIIRFilter *filter = GST_AUDIO_FX_BASE_IIR_FILTER (object);
GObjectClass *gobject_class = (GObjectClass *) klass;
GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass;
GstAudioFilterClass *filter_class = (GstAudioFilterClass *) klass;
+ GstCaps *caps;
+
+ GST_DEBUG_CATEGORY_INIT (gst_audio_fx_base_iir_filter_debug,
+ "audiofxbaseiirfilter", 0, "Audio IIR Filter Base Class");
gobject_class->dispose = gst_audio_fx_base_iir_filter_dispose;
+ caps = gst_caps_from_string (ALLOWED_CAPS);
+ gst_audio_filter_class_add_pad_templates (GST_AUDIO_FILTER_CLASS (klass),
+ caps);
+ gst_caps_unref (caps);
+
filter_class->setup = GST_DEBUG_FUNCPTR (gst_audio_fx_base_iir_filter_setup);
trans_class->transform_ip =
}
static void
-gst_audio_fx_base_iir_filter_init (GstAudioFXBaseIIRFilter * filter,
- GstAudioFXBaseIIRFilterClass * klass)
+gst_audio_fx_base_iir_filter_init (GstAudioFXBaseIIRFilter * filter)
{
gst_base_transform_set_in_place (GST_BASE_TRANSFORM (filter), TRUE);
GstAudioFXBaseIIRFilter *filter = GST_AUDIO_FX_BASE_IIR_FILTER (base);
guint num_samples;
GstClockTime timestamp, stream_time;
+ guint8 *data;
+ gsize size;
timestamp = GST_BUFFER_TIMESTAMP (buf);
stream_time =
if (GST_CLOCK_TIME_IS_VALID (stream_time))
gst_object_sync_values (G_OBJECT (filter), stream_time);
- num_samples =
- GST_BUFFER_SIZE (buf) / (GST_AUDIO_FILTER (filter)->format.width / 8);
-
if (gst_base_transform_is_passthrough (base))
return GST_FLOW_OK;
g_return_val_if_fail (filter->a != NULL, GST_FLOW_ERROR);
- filter->process (filter, GST_BUFFER_DATA (buf), num_samples);
+ data = gst_buffer_map (buf, &size, NULL, GST_MAP_READWRITE);
+ num_samples = size / (GST_AUDIO_FILTER (filter)->format.width / 8);
+
+ filter->process (filter, data, num_samples);
+
+ gst_buffer_unmap (buf, data, size);
return GST_FLOW_OK;
}
static guint gst_audio_iir_filter_signals[LAST_SIGNAL] = { 0, };
-#define DEBUG_INIT(bla) \
- GST_DEBUG_CATEGORY_INIT (gst_audio_iir_filter_debug, "audioiirfilter", 0, \
- "Generic audio IIR filter plugin");
-
-GST_BOILERPLATE_FULL (GstAudioIIRFilter, gst_audio_iir_filter, GstAudioFilter,
- GST_TYPE_AUDIO_FX_BASE_IIR_FILTER, DEBUG_INIT);
+#define gst_audio_iir_filter_parent_class parent_class
+G_DEFINE_TYPE (GstAudioIIRFilter, gst_audio_iir_filter,
+ GST_TYPE_AUDIO_FX_BASE_IIR_FILTER);
static void gst_audio_iir_filter_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static gboolean gst_audio_iir_filter_setup (GstAudioFilter * base,
GstRingBufferSpec * format);
-/* Element class */
-static void
-gst_audio_iir_filter_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_set_details_simple (element_class,
- "Audio IIR filter", "Filter/Effect/Audio",
- "Generic audio IIR filter with custom filter kernel",
- "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
-}
-
static void
gst_audio_iir_filter_class_init (GstAudioIIRFilterClass * klass)
{
GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
GstAudioFilterClass *filter_class = (GstAudioFilterClass *) klass;
+ GST_DEBUG_CATEGORY_INIT (gst_audio_iir_filter_debug, "audioiirfilter", 0,
+ "Generic audio IIR filter plugin");
+
gobject_class->set_property = gst_audio_iir_filter_set_property;
gobject_class->get_property = gst_audio_iir_filter_get_property;
gobject_class->finalize = gst_audio_iir_filter_finalize;
g_signal_new ("rate-changed", G_TYPE_FROM_CLASS (klass),
G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstAudioIIRFilterClass, rate_changed),
NULL, NULL, gst_marshal_VOID__INT, G_TYPE_NONE, 1, G_TYPE_INT);
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "Audio IIR filter", "Filter/Effect/Audio",
+ "Generic audio IIR filter with custom filter kernel",
+ "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
}
static void
}
static void
-gst_audio_iir_filter_init (GstAudioIIRFilter * self,
- GstAudioIIRFilterClass * g_class)
+gst_audio_iir_filter_init (GstAudioIIRFilter * self)
{
GValue v = { 0, };
GValueArray *a, *b;
" rate=(int)[1,MAX]," \
" channels=(int)[1,MAX]"
-#define DEBUG_INIT(bla) \
- GST_DEBUG_CATEGORY_INIT (gst_audio_invert_debug, "audioinvert", 0, "audioinvert element");
-
-GST_BOILERPLATE_FULL (GstAudioInvert, gst_audio_invert, GstAudioFilter,
- GST_TYPE_AUDIO_FILTER, DEBUG_INIT);
+G_DEFINE_TYPE (GstAudioInvert, gst_audio_invert, GST_TYPE_AUDIO_FILTER);
static void gst_audio_invert_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
/* GObject vmethod implementations */
static void
-gst_audio_invert_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
- GstCaps *caps;
-
- gst_element_class_set_details_simple (element_class, "Audio inversion",
- "Filter/Effect/Audio",
- "Swaps upper and lower half of audio samples",
- "Sebastian Dröge <slomo@circular-chaos.org>");
-
- caps = gst_caps_from_string (ALLOWED_CAPS);
- gst_audio_filter_class_add_pad_templates (GST_AUDIO_FILTER_CLASS (klass),
- caps);
- gst_caps_unref (caps);
-}
-
-static void
gst_audio_invert_class_init (GstAudioInvertClass * klass)
{
GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstCaps *caps;
+
+ GST_DEBUG_CATEGORY_INIT (gst_audio_invert_debug, "audioinvert", 0,
+ "audioinvert element");
gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+
gobject_class->set_property = gst_audio_invert_set_property;
gobject_class->get_property = gst_audio_invert_get_property;
0.0,
G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
+ gst_element_class_set_details_simple (gstelement_class, "Audio inversion",
+ "Filter/Effect/Audio",
+ "Swaps upper and lower half of audio samples",
+ "Sebastian Dröge <slomo@circular-chaos.org>");
+
+ caps = gst_caps_from_string (ALLOWED_CAPS);
+ gst_audio_filter_class_add_pad_templates (GST_AUDIO_FILTER_CLASS (klass),
+ caps);
+ gst_caps_unref (caps);
+
GST_AUDIO_FILTER_CLASS (klass)->setup =
GST_DEBUG_FUNCPTR (gst_audio_invert_setup);
GST_BASE_TRANSFORM_CLASS (klass)->transform_ip =
}
static void
-gst_audio_invert_init (GstAudioInvert * filter, GstAudioInvertClass * klass)
+gst_audio_invert_init (GstAudioInvert * filter)
{
filter->degree = 0.0;
gst_base_transform_set_in_place (GST_BASE_TRANSFORM (filter), TRUE);
GstAudioInvert *filter = GST_AUDIO_INVERT (base);
guint num_samples;
GstClockTime timestamp, stream_time;
+ guint8 *data;
+ gsize size;
timestamp = GST_BUFFER_TIMESTAMP (buf);
stream_time =
if (GST_CLOCK_TIME_IS_VALID (stream_time))
gst_object_sync_values (G_OBJECT (filter), stream_time);
- num_samples =
- GST_BUFFER_SIZE (buf) / (GST_AUDIO_FILTER (filter)->format.width / 8);
-
if (gst_base_transform_is_passthrough (base) ||
G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_GAP)))
return GST_FLOW_OK;
- filter->process (filter, GST_BUFFER_DATA (buf), num_samples);
+ data = gst_buffer_map (buf, &size, NULL, GST_MAP_READWRITE);
+ num_samples = size / (GST_AUDIO_FILTER (filter)->format.width / 8);
+
+ filter->process (filter, data, num_samples);
+
+ gst_buffer_unmap (buf, data, size);
return GST_FLOW_OK;
}
" rate=(int)[1,MAX]," \
" channels=(int)[1,MAX]"
-#define DEBUG_INIT(bla) \
- GST_DEBUG_CATEGORY_INIT (gst_audio_karaoke_debug, "audiokaraoke", 0, "audiokaraoke element");
-
-GST_BOILERPLATE_FULL (GstAudioKaraoke, gst_audio_karaoke, GstAudioFilter,
- GST_TYPE_AUDIO_FILTER, DEBUG_INIT);
+G_DEFINE_TYPE (GstAudioKaraoke, gst_audio_karaoke, GST_TYPE_AUDIO_FILTER);
static void gst_audio_karaoke_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
/* GObject vmethod implementations */
static void
-gst_audio_karaoke_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
- GstCaps *caps;
-
- gst_element_class_set_details_simple (element_class, "AudioKaraoke",
- "Filter/Effect/Audio",
- "Removes voice from sound", "Wim Taymans <wim.taymans@gmail.com>");
-
- caps = gst_caps_from_string (ALLOWED_CAPS);
- gst_audio_filter_class_add_pad_templates (GST_AUDIO_FILTER_CLASS (klass),
- caps);
- gst_caps_unref (caps);
-}
-
-static void
gst_audio_karaoke_class_init (GstAudioKaraokeClass * klass)
{
GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstCaps *caps;
+
+ GST_DEBUG_CATEGORY_INIT (gst_audio_karaoke_debug, "audiokaraoke", 0,
+ "audiokaraoke element");
gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+
gobject_class->set_property = gst_audio_karaoke_set_property;
gobject_class->get_property = gst_audio_karaoke_get_property;
"The Frequency width of the filter", 0.0, 100.0, DEFAULT_FILTER_WIDTH,
G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
+ gst_element_class_set_details_simple (gstelement_class, "AudioKaraoke",
+ "Filter/Effect/Audio",
+ "Removes voice from sound", "Wim Taymans <wim.taymans@gmail.com>");
+
+ caps = gst_caps_from_string (ALLOWED_CAPS);
+ gst_audio_filter_class_add_pad_templates (GST_AUDIO_FILTER_CLASS (klass),
+ caps);
+ gst_caps_unref (caps);
+
GST_AUDIO_FILTER_CLASS (klass)->setup =
GST_DEBUG_FUNCPTR (gst_audio_karaoke_setup);
GST_BASE_TRANSFORM_CLASS (klass)->transform_ip =
}
static void
-gst_audio_karaoke_init (GstAudioKaraoke * filter, GstAudioKaraokeClass * klass)
+gst_audio_karaoke_init (GstAudioKaraoke * filter)
{
gst_base_transform_set_in_place (GST_BASE_TRANSFORM (filter), TRUE);
gst_base_transform_set_gap_aware (GST_BASE_TRANSFORM (filter), TRUE);
GstAudioKaraoke *filter = GST_AUDIO_KARAOKE (base);
guint num_samples;
GstClockTime timestamp, stream_time;
+ guint8 *data;
+ gsize size;
timestamp = GST_BUFFER_TIMESTAMP (buf);
stream_time =
if (GST_CLOCK_TIME_IS_VALID (stream_time))
gst_object_sync_values (G_OBJECT (filter), stream_time);
- num_samples =
- GST_BUFFER_SIZE (buf) / (GST_AUDIO_FILTER (filter)->format.width / 8);
-
if (gst_base_transform_is_passthrough (base) ||
G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_GAP)))
return GST_FLOW_OK;
- filter->process (filter, GST_BUFFER_DATA (buf), num_samples);
+ data = gst_buffer_map (buf, &size, NULL, GST_MAP_READWRITE);
+ num_samples = size / (GST_AUDIO_FILTER (filter)->format.width / 8);
+
+ filter->process (filter, data, num_samples);
+
+ gst_buffer_unmap (buf, data, size);
return GST_FLOW_OK;
}
"width = (int) 16, " "depth = (int) 16, " "signed = (boolean) true")
);
-#define DEBUG_INIT(bla) \
- GST_DEBUG_CATEGORY_INIT (gst_audio_panorama_debug, "audiopanorama", 0, "audiopanorama element");
-
-GST_BOILERPLATE_FULL (GstAudioPanorama, gst_audio_panorama, GstBaseTransform,
- GST_TYPE_BASE_TRANSFORM, DEBUG_INIT);
+G_DEFINE_TYPE (GstAudioPanorama, gst_audio_panorama, GST_TYPE_BASE_TRANSFORM);
static void gst_audio_panorama_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
GValue * value, GParamSpec * pspec);
static gboolean gst_audio_panorama_get_unit_size (GstBaseTransform * base,
- GstCaps * caps, guint * size);
+ GstCaps * caps, gsize * size);
static GstCaps *gst_audio_panorama_transform_caps (GstBaseTransform * base,
- GstPadDirection direction, GstCaps * caps);
+ GstPadDirection direction, GstCaps * caps, GstCaps * filter);
static gboolean gst_audio_panorama_set_caps (GstBaseTransform * base,
GstCaps * incaps, GstCaps * outcaps);
/* GObject vmethod implementations */
static void
-gst_audio_panorama_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&src_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&sink_template));
- gst_element_class_set_details_simple (element_class, "Stereo positioning",
- "Filter/Effect/Audio",
- "Positions audio streams in the stereo panorama",
- "Stefan Kost <ensonic@users.sf.net>");
-}
-
-static void
gst_audio_panorama_class_init (GstAudioPanoramaClass * klass)
{
GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+
+ GST_DEBUG_CATEGORY_INIT (gst_audio_panorama_debug, "audiopanorama", 0,
+ "audiopanorama element");
gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+
gobject_class->set_property = gst_audio_panorama_set_property;
gobject_class->get_property = gst_audio_panorama_get_property;
GST_TYPE_AUDIO_PANORAMA_METHOD, METHOD_PSYCHOACOUSTIC,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ gst_element_class_set_details_simple (gstelement_class, "Stereo positioning",
+ "Filter/Effect/Audio",
+ "Positions audio streams in the stereo panorama",
+ "Stefan Kost <ensonic@users.sf.net>");
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&sink_template));
+
GST_BASE_TRANSFORM_CLASS (klass)->get_unit_size =
GST_DEBUG_FUNCPTR (gst_audio_panorama_get_unit_size);
GST_BASE_TRANSFORM_CLASS (klass)->transform_caps =
}
static void
-gst_audio_panorama_init (GstAudioPanorama * filter,
- GstAudioPanoramaClass * klass)
+gst_audio_panorama_init (GstAudioPanorama * filter)
{
filter->panorama = 0;
static gboolean
gst_audio_panorama_get_unit_size (GstBaseTransform * base, GstCaps * caps,
- guint * size)
+ gsize * size)
{
gint width, channels;
GstStructure *structure;
static GstCaps *
gst_audio_panorama_transform_caps (GstBaseTransform * base,
- GstPadDirection direction, GstCaps * caps)
+ GstPadDirection direction, GstCaps * caps, GstCaps * filter)
{
GstCaps *res;
GstStructure *structure;
GstBuffer * outbuf)
{
GstAudioPanorama *filter = GST_AUDIO_PANORAMA (base);
- guint num_samples = GST_BUFFER_SIZE (outbuf) / (2 * filter->width);
GstClockTime timestamp, stream_time;
+ guint8 *indata, *outdata;
+ gsize insize, outsize;
timestamp = GST_BUFFER_TIMESTAMP (inbuf);
stream_time =
if (GST_CLOCK_TIME_IS_VALID (stream_time))
gst_object_sync_values (G_OBJECT (filter), stream_time);
+ indata = gst_buffer_map (inbuf, &insize, NULL, GST_MAP_READ);
+ outdata = gst_buffer_map (outbuf, &outsize, NULL, GST_MAP_WRITE);
+
if (G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (inbuf, GST_BUFFER_FLAG_GAP))) {
GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_GAP);
- memset (GST_BUFFER_DATA (outbuf), 0, GST_BUFFER_SIZE (outbuf));
- return GST_FLOW_OK;
+ memset (outdata, 0, outsize);
+ } else {
+ guint num_samples = outsize / (2 * filter->width);
+
+ filter->process (filter, indata, outdata, num_samples);
}
- filter->process (filter, GST_BUFFER_DATA (inbuf),
- GST_BUFFER_DATA (outbuf), num_samples);
+ gst_buffer_unmap (inbuf, indata, insize);
+ gst_buffer_unmap (outbuf, outdata, outsize);
return GST_FLOW_OK;
}
return gtype;
}
-#define DEBUG_INIT(bla) \
- GST_DEBUG_CATEGORY_INIT (gst_gst_audio_wsincband_debug, "audiowsincband", 0, \
- "Band-pass and Band-reject Windowed sinc filter plugin");
-
-GST_BOILERPLATE_FULL (GstAudioWSincBand, gst_audio_wsincband, GstAudioFilter,
- GST_TYPE_AUDIO_FX_BASE_FIR_FILTER, DEBUG_INIT);
+#define gst_audio_wsincband_parent_class parent_class
+G_DEFINE_TYPE (GstAudioWSincBand, gst_audio_wsincband,
+ GST_TYPE_AUDIO_FX_BASE_FIR_FILTER);
static void gst_audio_wsincband_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
#define POW2(x) (x)*(x)
-/* Element class */
-static void
-gst_audio_wsincband_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_set_details_simple (element_class,
- "Band pass & band reject filter", "Filter/Effect/Audio",
- "Band pass and band reject windowed sinc filter",
- "Thomas Vander Stichele <thomas at apestaart dot org>, "
- "Steven W. Smith, "
- "Dreamlab Technologies Ltd. <mathis.hofer@dreamlab.net>, "
- "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
-}
-
static void
gst_audio_wsincband_class_init (GstAudioWSincBandClass * klass)
{
GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
GstAudioFilterClass *filter_class = (GstAudioFilterClass *) klass;
+ GST_DEBUG_CATEGORY_INIT (gst_gst_audio_wsincband_debug, "audiowsincband", 0,
+ "Band-pass and Band-reject Windowed sinc filter plugin");
+
gobject_class->set_property = gst_audio_wsincband_set_property;
gobject_class->get_property = gst_audio_wsincband_get_property;
gobject_class->finalize = gst_audio_wsincband_finalize;
WINDOW_HAMMING,
G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
+ gst_element_class_set_details_simple (gstelement_class,
+ "Band pass & band reject filter", "Filter/Effect/Audio",
+ "Band pass and band reject windowed sinc filter",
+ "Thomas Vander Stichele <thomas at apestaart dot org>, "
+ "Steven W. Smith, "
+ "Dreamlab Technologies Ltd. <mathis.hofer@dreamlab.net>, "
+ "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
+
filter_class->setup = GST_DEBUG_FUNCPTR (gst_audio_wsincband_setup);
}
static void
-gst_audio_wsincband_init (GstAudioWSincBand * self,
- GstAudioWSincBandClass * g_class)
+gst_audio_wsincband_init (GstAudioWSincBand * self)
{
self->kernel_length = 101;
self->lower_frequency = 0.0;
return gtype;
}
-#define DEBUG_INIT(bla) \
- GST_DEBUG_CATEGORY_INIT (gst_audio_wsinclimit_debug, "audiowsinclimit", 0, \
- "Low-pass and High-pass Windowed sinc filter plugin");
-
-GST_BOILERPLATE_FULL (GstAudioWSincLimit, gst_audio_wsinclimit, GstAudioFilter,
- GST_TYPE_AUDIO_FX_BASE_FIR_FILTER, DEBUG_INIT);
+#define gst_audio_wsinclimit_parent_class parent_class
+G_DEFINE_TYPE (GstAudioWSincLimit, gst_audio_wsinclimit,
+ GST_TYPE_AUDIO_FX_BASE_FIR_FILTER);
static void gst_audio_wsinclimit_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
#define POW2(x) (x)*(x)
-/* Element class */
-
-static void
-gst_audio_wsinclimit_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_set_details_simple (element_class,
- "Low pass & high pass filter", "Filter/Effect/Audio",
- "Low pass and high pass windowed sinc filter",
- "Thomas Vander Stichele <thomas at apestaart dot org>, "
- "Steven W. Smith, "
- "Dreamlab Technologies Ltd. <mathis.hofer@dreamlab.net>, "
- "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
-}
-
static void
gst_audio_wsinclimit_class_init (GstAudioWSincLimitClass * klass)
{
GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
GstAudioFilterClass *filter_class = (GstAudioFilterClass *) klass;
+ GST_DEBUG_CATEGORY_INIT (gst_audio_wsinclimit_debug, "audiowsinclimit", 0,
+ "Low-pass and High-pass Windowed sinc filter plugin");
+
gobject_class->set_property = gst_audio_wsinclimit_set_property;
gobject_class->get_property = gst_audio_wsinclimit_get_property;
gobject_class->finalize = gst_audio_wsinclimit_finalize;
WINDOW_HAMMING,
G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
+ gst_element_class_set_details_simple (gstelement_class,
+ "Low pass & high pass filter", "Filter/Effect/Audio",
+ "Low pass and high pass windowed sinc filter",
+ "Thomas Vander Stichele <thomas at apestaart dot org>, "
+ "Steven W. Smith, "
+ "Dreamlab Technologies Ltd. <mathis.hofer@dreamlab.net>, "
+ "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
+
filter_class->setup = GST_DEBUG_FUNCPTR (gst_audio_wsinclimit_setup);
}
static void
-gst_audio_wsinclimit_init (GstAudioWSincLimit * self,
- GstAudioWSincLimitClass * g_class)
+gst_audio_wsinclimit_init (GstAudioWSincLimit * self)
{
self->mode = MODE_LOW_PASS;
self->window = WINDOW_HAMMING;
gboolean gst_aac_parse_event (GstBaseParse * parse, GstEvent * event);
-#define _do_init(bla) \
- GST_DEBUG_CATEGORY_INIT (aacparse_debug, "aacparse", 0, \
- "AAC audio stream parser");
-
-GST_BOILERPLATE_FULL (GstAacParse, gst_aac_parse, GstBaseParse,
- GST_TYPE_BASE_PARSE, _do_init);
+G_DEFINE_TYPE (GstAacParse, gst_aac_parse, GST_TYPE_BASE_PARSE);
static inline gint
gst_aac_parse_get_sample_rate_from_index (guint sr_idx)
}
/**
- * gst_aac_parse_base_init:
- * @klass: #GstElementClass.
+ * gst_aac_parse_class_init:
+ * @klass: #GstAacParseClass.
*
*/
static void
-gst_aac_parse_base_init (gpointer klass)
+gst_aac_parse_class_init (GstAacParseClass * klass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstBaseParseClass *parse_class = GST_BASE_PARSE_CLASS (klass);
+
+ GST_DEBUG_CATEGORY_INIT (aacparse_debug, "aacparse", 0,
+ "AAC audio stream parser");
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&sink_template));
gst_element_class_set_details_simple (element_class,
"AAC audio stream parser", "Codec/Parser/Audio",
"Advanced Audio Coding parser", "Stefan Kost <stefan.kost@nokia.com>");
-}
-
-
-/**
- * gst_aac_parse_class_init:
- * @klass: #GstAacParseClass.
- *
- */
-static void
-gst_aac_parse_class_init (GstAacParseClass * klass)
-{
- GstBaseParseClass *parse_class = GST_BASE_PARSE_CLASS (klass);
parse_class->start = GST_DEBUG_FUNCPTR (gst_aac_parse_start);
parse_class->stop = GST_DEBUG_FUNCPTR (gst_aac_parse_stop);
*
*/
static void
-gst_aac_parse_init (GstAacParse * aacparse, GstAacParseClass * klass)
+gst_aac_parse_init (GstAacParse * aacparse)
{
GST_DEBUG ("initialized");
}
GstBuffer *buf = gst_value_get_buffer (value);
if (buf) {
- const guint8 *buffer = GST_BUFFER_DATA (buf);
+ guint8 *data;
+ gsize size;
guint sr_idx;
- sr_idx = ((buffer[0] & 0x07) << 1) | ((buffer[1] & 0x80) >> 7);
- aacparse->object_type = (buffer[0] & 0xf8) >> 3;
+ data = gst_buffer_map (buf, &size, NULL, GST_MAP_READ);
+
+ sr_idx = ((data[0] & 0x07) << 1) | ((data[1] & 0x80) >> 7);
+ aacparse->object_type = (data[0] & 0xf8) >> 3;
aacparse->sample_rate = gst_aac_parse_get_sample_rate_from_index (sr_idx);
- aacparse->channels = (buffer[1] & 0x78) >> 3;
+ aacparse->channels = (data[1] & 0x78) >> 3;
aacparse->header_type = DSPAAC_HEADER_NONE;
aacparse->mpegversion = 4;
+ gst_buffer_unmap (buf, data, size);
GST_DEBUG ("codec_data: object_type=%d, sample_rate=%d, channels=%d",
aacparse->object_type, aacparse->sample_rate, aacparse->channels);
int skip_size = 0;
int bitstream_type;
int sr_idx;
+ GstCaps *sinkcaps;
aacparse->header_type = DSPAAC_HEADER_ADIF;
aacparse->mpegversion = 4;
gst_base_parse_set_min_frame_size (GST_BASE_PARSE (aacparse), 512);
/* arrange for metadata and get out of the way */
- gst_aac_parse_set_src_caps (aacparse,
- GST_PAD_CAPS (GST_BASE_PARSE_SINK_PAD (aacparse)));
+ sinkcaps = gst_pad_get_current_caps (GST_BASE_PARSE_SINK_PAD (aacparse));
+ gst_aac_parse_set_src_caps (aacparse, sinkcaps);
+ gst_caps_unref (sinkcaps);
/* not syncable, not easily seekable (unless we push data from start */
gst_base_parse_set_syncable (GST_BASE_PARSE_CAST (aacparse), FALSE);
gst_aac_parse_check_valid_frame (GstBaseParse * parse,
GstBaseParseFrame * frame, guint * framesize, gint * skipsize)
{
- const guint8 *data;
+ guint8 *data;
+ gsize size;
GstAacParse *aacparse;
gboolean ret = FALSE;
gboolean lost_sync;
aacparse = GST_AAC_PARSE (parse);
buffer = frame->buffer;
- data = GST_BUFFER_DATA (buffer);
+
+ data = gst_buffer_map (buffer, &size, NULL, GST_MAP_READ);
lost_sync = GST_BASE_PARSE_LOST_SYNC (parse);
if (aacparse->header_type == DSPAAC_HEADER_ADIF ||
aacparse->header_type == DSPAAC_HEADER_NONE) {
/* There is nothing to parse */
- *framesize = GST_BUFFER_SIZE (buffer);
+ *framesize = size;
ret = TRUE;
} else if (aacparse->header_type == DSPAAC_HEADER_NOT_PARSED || lost_sync) {
- ret = gst_aac_parse_detect_stream (aacparse, data, GST_BUFFER_SIZE (buffer),
+ ret = gst_aac_parse_detect_stream (aacparse, data, size,
GST_BASE_PARSE_DRAINING (parse), framesize, skipsize);
} else if (aacparse->header_type == DSPAAC_HEADER_ADTS) {
guint needed_data = 1024;
- ret = gst_aac_parse_check_adts_frame (aacparse, data,
- GST_BUFFER_SIZE (buffer), GST_BASE_PARSE_DRAINING (parse),
- framesize, &needed_data);
+ ret = gst_aac_parse_check_adts_frame (aacparse, data, size,
+ GST_BASE_PARSE_DRAINING (parse), framesize, &needed_data);
if (!ret) {
GST_DEBUG ("buffer didn't contain valid frame");
gst_base_parse_set_min_frame_size (GST_BASE_PARSE (aacparse),
ADTS_MAX_SIZE);
}
+ gst_buffer_unmap (buffer, data, size);
return ret;
}
GstBuffer *buffer;
GstFlowReturn ret = GST_FLOW_OK;
gint rate, channels;
+ guint8 *data;
+ gsize size;
aacparse = GST_AAC_PARSE (parse);
buffer = frame->buffer;
/* see above */
frame->overhead = 7;
- gst_aac_parse_parse_adts_header (aacparse, GST_BUFFER_DATA (buffer),
+ data = gst_buffer_map (buffer, &size, NULL, GST_MAP_READ);
+ gst_aac_parse_parse_adts_header (aacparse, data,
&rate, &channels, NULL, NULL);
+ gst_buffer_unmap (buffer, data, size);
+
GST_LOG_OBJECT (aacparse, "rate: %d, chans: %d", rate, channels);
if (G_UNLIKELY (rate != aacparse->sample_rate
|| channels != aacparse->channels)) {
+ GstCaps *sinkcaps;
+
aacparse->sample_rate = rate;
aacparse->channels = channels;
- if (!gst_aac_parse_set_src_caps (aacparse,
- GST_PAD_CAPS (GST_BASE_PARSE (aacparse)->sinkpad))) {
+ sinkcaps = gst_pad_get_current_caps (GST_BASE_PARSE (aacparse)->sinkpad);
+ if (!gst_aac_parse_set_src_caps (aacparse, sinkcaps)) {
/* If linking fails, we need to return appropriate error */
+ gst_caps_unref (sinkcaps);
ret = GST_FLOW_NOT_LINKED;
}
+ gst_caps_unref (sinkcaps);
gst_base_parse_set_frame_rate (GST_BASE_PARSE (aacparse),
aacparse->sample_rate, 1024, 2, 2);
static GstFlowReturn gst_ac3_parse_parse_frame (GstBaseParse * parse,
GstBaseParseFrame * frame);
-GST_BOILERPLATE (GstAc3Parse, gst_ac3_parse, GstBaseParse, GST_TYPE_BASE_PARSE);
+#define gst_ac3_parse_parent_class parent_class
+G_DEFINE_TYPE (GstAc3Parse, gst_ac3_parse, GST_TYPE_BASE_PARSE);
static void
-gst_ac3_parse_base_init (gpointer klass)
+gst_ac3_parse_class_init (GstAc3ParseClass * klass)
{
+ GObjectClass *object_class = G_OBJECT_CLASS (klass);
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstBaseParseClass *parse_class = GST_BASE_PARSE_CLASS (klass);
+
+ GST_DEBUG_CATEGORY_INIT (ac3_parse_debug, "ac3parse", 0,
+ "AC3 audio stream parser");
+
+ object_class->finalize = gst_ac3_parse_finalize;
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&sink_template));
gst_element_class_set_details_simple (element_class,
"AC3 audio stream parser", "Codec/Parser/Audio",
"AC3 parser", "Tim-Philipp Müller <tim centricular net>");
-}
-
-static void
-gst_ac3_parse_class_init (GstAc3ParseClass * klass)
-{
- GstBaseParseClass *parse_class = GST_BASE_PARSE_CLASS (klass);
- GObjectClass *object_class = G_OBJECT_CLASS (klass);
-
- GST_DEBUG_CATEGORY_INIT (ac3_parse_debug, "ac3parse", 0,
- "AC3 audio stream parser");
-
- object_class->finalize = gst_ac3_parse_finalize;
parse_class->start = GST_DEBUG_FUNCPTR (gst_ac3_parse_start);
parse_class->stop = GST_DEBUG_FUNCPTR (gst_ac3_parse_stop);
}
static void
-gst_ac3_parse_init (GstAc3Parse * ac3parse, GstAc3ParseClass * klass)
+gst_ac3_parse_init (GstAc3Parse * ac3parse)
{
gst_base_parse_set_min_frame_size (GST_BASE_PARSE (ac3parse), 6);
gst_ac3_parse_reset (ac3parse);
gst_ac3_parse_frame_header_ac3 (GstAc3Parse * ac3parse, GstBuffer * buf,
guint * frame_size, guint * rate, guint * chans, guint * blks, guint * sid)
{
- GstBitReader bits = GST_BIT_READER_INIT_FROM_BUFFER (buf);
+ GstBitReader bits;
+ gpointer data;
+ gsize size;
guint8 fscod, frmsizcod, bsid, acmod, lfe_on;
+ gboolean ret = FALSE;
GST_LOG_OBJECT (ac3parse, "parsing ac3");
+ data = gst_buffer_map (buf, &size, NULL, GST_MAP_READ);
+ gst_bit_reader_init (&bits, data, size);
+
gst_bit_reader_skip_unchecked (&bits, 16 + 16);
fscod = gst_bit_reader_get_bits_uint8_unchecked (&bits, 2);
frmsizcod = gst_bit_reader_get_bits_uint8_unchecked (&bits, 6);
if (G_UNLIKELY (fscod == 3 || frmsizcod >= G_N_ELEMENTS (frmsizcod_table))) {
GST_DEBUG_OBJECT (ac3parse, "bad fscod=%d frmsizcod=%d", fscod, frmsizcod);
- return FALSE;
+ goto cleanup;
}
bsid = gst_bit_reader_get_bits_uint8_unchecked (&bits, 5);
* but seemingly only defines 6 and 8 cases */
if (bsid > 8) {
GST_DEBUG_OBJECT (ac3parse, "unexpected bsid=%d", bsid);
- return FALSE;
+ goto cleanup;
} else if (bsid != 8 && bsid != 6) {
GST_DEBUG_OBJECT (ac3parse, "undefined bsid=%d", bsid);
}
if (sid)
*sid = 0;
- return TRUE;
+ ret = TRUE;
+
+cleanup:
+ gst_buffer_unmap (buf, data, size);
+
+ return ret;
}
static gboolean
gst_ac3_parse_frame_header_eac3 (GstAc3Parse * ac3parse, GstBuffer * buf,
guint * frame_size, guint * rate, guint * chans, guint * blks, guint * sid)
{
- GstBitReader bits = GST_BIT_READER_INIT_FROM_BUFFER (buf);
+ GstBitReader bits;
+ gpointer data;
+ gsize size;
guint16 frmsiz, sample_rate, blocks;
guint8 strmtyp, fscod, fscod2, acmod, lfe_on, strmid, numblkscod;
+ gboolean ret = FALSE;
GST_LOG_OBJECT (ac3parse, "parsing e-ac3");
+ data = gst_buffer_map (buf, &size, NULL, GST_MAP_READ);
+ gst_bit_reader_init (&bits, data, size);
+
gst_bit_reader_skip_unchecked (&bits, 16);
strmtyp = gst_bit_reader_get_bits_uint8_unchecked (&bits, 2); /* strmtyp */
if (G_UNLIKELY (strmtyp == 3)) {
GST_DEBUG_OBJECT (ac3parse, "bad strmtyp %d", strmtyp);
- return FALSE;
+ goto cleanup;
}
strmid = gst_bit_reader_get_bits_uint8_unchecked (&bits, 3); /* substreamid */
fscod2 = gst_bit_reader_get_bits_uint8_unchecked (&bits, 2); /* fscod2 */
if (G_UNLIKELY (fscod2 == 3)) {
GST_DEBUG_OBJECT (ac3parse, "invalid fscod2");
- return FALSE;
+ goto cleanup;
}
sample_rate = fscod_rates[fscod2] / 2;
blocks = 6;
if (sid)
*sid = (strmtyp & 0x1) << 3 | strmid;
- return TRUE;
+ ret = TRUE;
+
+cleanup:
+ gst_buffer_unmap (buf, data, size);
+
+ return ret;
}
static gboolean
guint * framesize, guint * rate, guint * chans, guint * blocks,
guint * sid, gboolean * eac)
{
- GstBitReader bits = GST_BIT_READER_INIT_FROM_BUFFER (buf);
+ GstBitReader bits;
guint16 sync;
guint8 bsid;
+ gpointer data;
+ gsize size;
+ gboolean ret = FALSE;
- GST_MEMDUMP_OBJECT (parse, "AC3 frame sync", GST_BUFFER_DATA (buf), 16);
+ data = gst_buffer_map (buf, &size, NULL, GST_MAP_READ);
+ gst_bit_reader_init (&bits, data, size);
+
+ GST_MEMDUMP_OBJECT (parse, "AC3 frame sync", data, MIN (size, 16));
sync = gst_bit_reader_get_bits_uint16_unchecked (&bits, 16);
gst_bit_reader_skip_unchecked (&bits, 16 + 8);
bsid = gst_bit_reader_peek_bits_uint8_unchecked (&bits, 5);
if (G_UNLIKELY (sync != 0x0b77))
- return FALSE;
+ goto cleanup;
GST_LOG_OBJECT (parse, "bsid = %d", bsid);
if (bsid <= 10) {
if (eac)
*eac = FALSE;
- return gst_ac3_parse_frame_header_ac3 (parse, buf, framesize, rate, chans,
+ ret = gst_ac3_parse_frame_header_ac3 (parse, buf, framesize, rate, chans,
blocks, sid);
- } else if (bsid <= 16) {
+ goto cleanup;
+ }
+
+ if (bsid <= 16) {
if (eac)
*eac = TRUE;
- return gst_ac3_parse_frame_header_eac3 (parse, buf, framesize, rate, chans,
+ ret = gst_ac3_parse_frame_header_eac3 (parse, buf, framesize, rate, chans,
blocks, sid);
- } else {
- GST_DEBUG_OBJECT (parse, "unexpected bsid %d", bsid);
- return FALSE;
+ goto cleanup;
}
+
+ GST_DEBUG_OBJECT (parse, "unexpected bsid %d", bsid);
+
+cleanup:
+ gst_buffer_unmap (buf, data, size);
+
+ return ret;
}
static gboolean
{
GstAc3Parse *ac3parse = GST_AC3_PARSE (parse);
GstBuffer *buf = frame->buffer;
- GstByteReader reader = GST_BYTE_READER_INIT_FROM_BUFFER (buf);
+ GstByteReader reader;
gint off;
gboolean lost_sync, draining;
+ gpointer data;
+ gsize size;
+ gboolean ret = FALSE;
- if (G_UNLIKELY (GST_BUFFER_SIZE (buf) < 6))
- return FALSE;
+ data = gst_buffer_map (buf, &size, NULL, GST_MAP_READ);
+
+ if (G_UNLIKELY (size < 6))
+ goto cleanup;
off = gst_byte_reader_masked_scan_uint32 (&reader, 0xffff0000, 0x0b770000,
- 0, GST_BUFFER_SIZE (buf));
+ 0, size);
GST_LOG_OBJECT (parse, "possible sync at buffer offset %d", off);
/* didn't find anything that looks like a sync word, skip */
if (off < 0) {
- *skipsize = GST_BUFFER_SIZE (buf) - 3;
- return FALSE;
+ *skipsize = size - 3;
+ goto cleanup;
}
/* possible frame header, but not at offset 0? skip bytes before sync */
if (off > 0) {
*skipsize = off;
- return FALSE;
+ goto cleanup;
}
/* make sure the values in the frame header look sane */
if (!gst_ac3_parse_frame_header (ac3parse, buf, framesize, NULL, NULL,
NULL, NULL, NULL)) {
*skipsize = off + 2;
- return FALSE;
+ goto cleanup;
}
GST_LOG_OBJECT (parse, "got frame");
GST_DEBUG_OBJECT (ac3parse, "... but not sufficient data");
gst_base_parse_set_min_frame_size (parse, *framesize + 6);
*skipsize = 0;
- return FALSE;
+ goto cleanup;
} else {
if (word != 0x0b77) {
GST_DEBUG_OBJECT (ac3parse, "0x%x not OK", word);
*skipsize = off + 2;
- return FALSE;
+ goto cleanup;
} else {
/* ok, got sync now, let's assume constant frame size */
gst_base_parse_set_min_frame_size (parse, *framesize);
}
}
- return TRUE;
+ ret = TRUE;
+
+cleanup:
+ gst_buffer_unmap (buf, data, size);
+
+ return ret;
}
static GstFlowReturn
GstCaps *caps = gst_caps_new_simple (eac ? "audio/x-eac3" : "audio/x-ac3",
"framed", G_TYPE_BOOLEAN, TRUE, "rate", G_TYPE_INT, rate,
"channels", G_TYPE_INT, chans, NULL);
- gst_buffer_set_caps (buf, caps);
gst_pad_set_caps (GST_BASE_PARSE_SRC_PAD (parse), caps);
gst_caps_unref (caps);
GstFlowReturn gst_amr_parse_parse_frame (GstBaseParse * parse,
GstBaseParseFrame * frame);
-#define _do_init(bla) \
- GST_DEBUG_CATEGORY_INIT (amrparse_debug, "amrparse", 0, \
- "AMR-NB audio stream parser");
-
-GST_BOILERPLATE_FULL (GstAmrParse, gst_amr_parse, GstBaseParse,
- GST_TYPE_BASE_PARSE, _do_init);
-
+G_DEFINE_TYPE (GstAmrParse, gst_amr_parse, GST_TYPE_BASE_PARSE);
/**
- * gst_amr_parse_base_init:
- * @klass: #GstElementClass.
+ * gst_amr_parse_class_init:
+ * @klass: GstAmrParseClass.
*
*/
static void
-gst_amr_parse_base_init (gpointer klass)
+gst_amr_parse_class_init (GstAmrParseClass * klass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstBaseParseClass *parse_class = GST_BASE_PARSE_CLASS (klass);
+
+ GST_DEBUG_CATEGORY_INIT (amrparse_debug, "amrparse", 0,
+ "AMR-NB audio stream parser");
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&sink_template));
"AMR audio stream parser", "Codec/Parser/Audio",
"Adaptive Multi-Rate audio parser",
"Ronald Bultje <rbultje@ronald.bitfreak.net>");
-}
-
-
-/**
- * gst_amr_parse_class_init:
- * @klass: GstAmrParseClass.
- *
- */
-static void
-gst_amr_parse_class_init (GstAmrParseClass * klass)
-{
- GstBaseParseClass *parse_class = GST_BASE_PARSE_CLASS (klass);
parse_class->start = GST_DEBUG_FUNCPTR (gst_amr_parse_start);
parse_class->stop = GST_DEBUG_FUNCPTR (gst_amr_parse_stop);
*
*/
static void
-gst_amr_parse_init (GstAmrParse * amrparse, GstAmrParseClass * klass)
+gst_amr_parse_init (GstAmrParse * amrparse)
{
/* init rest */
gst_base_parse_set_min_frame_size (GST_BASE_PARSE (amrparse), 62);
GstBaseParseFrame * frame, guint * framesize, gint * skipsize)
{
GstBuffer *buffer;
- const guint8 *data;
+ guint8 *data;
+ gsize size;
gint fsize, mode, dsize;
GstAmrParse *amrparse;
+ gboolean ret = FALSE;
amrparse = GST_AMR_PARSE (parse);
buffer = frame->buffer;
- data = GST_BUFFER_DATA (buffer);
- dsize = GST_BUFFER_SIZE (buffer);
+
+ data = gst_buffer_map (buffer, &size, NULL, GST_MAP_READ);
+ dsize = size;
GST_LOG ("buffer: %d bytes", dsize);
}
/* We return FALSE, so this frame won't get pushed forward. Instead,
the "skip" value is set, so next time we will receive a valid frame. */
- return FALSE;
+ goto done;
}
/* Does this look like a possible frame header candidate? */
(!GST_BASE_PARSE_LOST_SYNC (parse) || GST_BASE_PARSE_DRAINING (parse)
|| (dsize > fsize && (data[fsize] & 0x83) == 0))) {
*framesize = fsize;
- return TRUE;
+ ret = TRUE;
+ goto done;
}
}
-
GST_LOG ("sync lost");
- return FALSE;
+
+done:
+ gst_buffer_unmap (buffer, data, size);
+
+ return ret;
}
static GstFlowReturn gst_dca_parse_parse_frame (GstBaseParse * parse,
GstBaseParseFrame * frame);
-GST_BOILERPLATE (GstDcaParse, gst_dca_parse, GstBaseParse, GST_TYPE_BASE_PARSE);
-
-static void
-gst_dca_parse_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&sink_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&src_template));
-
- gst_element_class_set_details_simple (element_class,
- "DTS Coherent Acoustics audio stream parser", "Codec/Parser/Audio",
- "DCA parser", "Tim-Philipp Müller <tim centricular net>");
-}
+#define gst_dca_parse_parent_class parent_class
+G_DEFINE_TYPE (GstDcaParse, gst_dca_parse, GST_TYPE_BASE_PARSE);
static void
gst_dca_parse_class_init (GstDcaParseClass * klass)
{
GstBaseParseClass *parse_class = GST_BASE_PARSE_CLASS (klass);
+ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
GObjectClass *object_class = G_OBJECT_CLASS (klass);
GST_DEBUG_CATEGORY_INIT (dca_parse_debug, "dcaparse", 0,
parse_class->check_valid_frame =
GST_DEBUG_FUNCPTR (gst_dca_parse_check_valid_frame);
parse_class->parse_frame = GST_DEBUG_FUNCPTR (gst_dca_parse_parse_frame);
+
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&sink_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&src_template));
+
+ gst_element_class_set_details_simple (element_class,
+ "DTS Coherent Acoustics audio stream parser", "Codec/Parser/Audio",
+ "DCA parser", "Tim-Philipp Müller <tim centricular net>");
}
static void
}
static void
-gst_dca_parse_init (GstDcaParse * dcaparse, GstDcaParseClass * klass)
+gst_dca_parse_init (GstDcaParse * dcaparse)
{
gst_base_parse_set_min_frame_size (GST_BASE_PARSE (dcaparse),
DCA_MIN_FRAMESIZE);
static gint
gst_dca_parse_find_sync (GstDcaParse * dcaparse, GstByteReader * reader,
- const GstBuffer * buf, guint32 * sync)
+ gsize bufsize, guint32 * sync)
{
guint32 best_sync = 0;
guint best_offset = G_MAXUINT;
/* Raw little endian */
off = gst_byte_reader_masked_scan_uint32 (reader, 0xffffffff, 0xfe7f0180,
- 0, GST_BUFFER_SIZE (buf));
+ 0, bufsize);
if (off >= 0 && off < best_offset) {
best_offset = off;
best_sync = 0xfe7f0180;
/* Raw big endian */
off = gst_byte_reader_masked_scan_uint32 (reader, 0xffffffff, 0x7ffe8001,
- 0, GST_BUFFER_SIZE (buf));
+ 0, bufsize);
if (off >= 0 && off < best_offset) {
best_offset = off;
best_sync = 0x7ffe8001;
/* 14-bit little endian */
off = gst_byte_reader_masked_scan_uint32 (reader, 0xffffffff, 0xff1f00e8,
- 0, GST_BUFFER_SIZE (buf));
+ 0, bufsize);
if (off >= 0 && off < best_offset) {
best_offset = off;
best_sync = 0xff1f00e8;
/* 14-bit big endian */
off = gst_byte_reader_masked_scan_uint32 (reader, 0xffffffff, 0x1fffe800,
- 0, GST_BUFFER_SIZE (buf));
+ 0, bufsize);
if (off >= 0 && off < best_offset) {
best_offset = off;
best_sync = 0x1fffe800;
{
GstDcaParse *dcaparse = GST_DCA_PARSE (parse);
GstBuffer *buf = frame->buffer;
- GstByteReader r = GST_BYTE_READER_INIT_FROM_BUFFER (buf);
+ GstByteReader r;
gboolean parser_draining;
gboolean parser_in_sync;
gboolean terminator;
guint32 sync = 0;
guint size, rate, chans, num_blocks, samples_per_block;
gint off = -1;
+ gpointer data;
+ gsize bufsize;
+ gboolean ret = FALSE;
- if (G_UNLIKELY (GST_BUFFER_SIZE (buf) < 16))
- return FALSE;
+ data = gst_buffer_map (buf, &bufsize, NULL, GST_MAP_READ);
+
+ if (G_UNLIKELY (bufsize < 16))
+ goto cleanup;
parser_in_sync = !GST_BASE_PARSE_LOST_SYNC (parse);
+ gst_byte_reader_init (&r, data, bufsize);
+
if (G_LIKELY (parser_in_sync && dcaparse->last_sync != 0)) {
off = gst_byte_reader_masked_scan_uint32 (&r, 0xffffffff,
- dcaparse->last_sync, 0, GST_BUFFER_SIZE (buf));
+ dcaparse->last_sync, 0, size);
}
if (G_UNLIKELY (off < 0)) {
- off = gst_dca_parse_find_sync (dcaparse, &r, buf, &sync);
+ off = gst_dca_parse_find_sync (dcaparse, &r, bufsize, &sync);
}
/* didn't find anything that looks like a sync word, skip */
if (off < 0) {
- *skipsize = GST_BUFFER_SIZE (buf) - 3;
+ *skipsize = bufsize - 3;
GST_DEBUG_OBJECT (dcaparse, "no sync, skipping %d bytes", *skipsize);
- return FALSE;
+ goto cleanup;
}
GST_LOG_OBJECT (parse, "possible sync %08x at buffer offset %d", sync, off);
/* possible frame header, but not at offset 0? skip bytes before sync */
if (off > 0) {
*skipsize = off;
- return FALSE;
+ goto cleanup;
}
/* make sure the values in the frame header look sane */
if (!gst_dca_parse_parse_header (dcaparse, &r, &size, &rate, &chans, NULL,
NULL, &num_blocks, &samples_per_block, &terminator)) {
*skipsize = 4;
- return FALSE;
+ goto cleanup;
}
GST_LOG_OBJECT (parse, "got frame, sync %08x, size %u, rate %d, channels %d",
if (!parser_in_sync && !parser_draining) {
/* check for second frame to be sure */
GST_DEBUG_OBJECT (dcaparse, "resyncing; checking next frame syncword");
- if (GST_BUFFER_SIZE (buf) >= (size + 16)) {
+ if (bufsize >= (size + 16)) {
guint s2, r2, c2, n2, s3;
gboolean t;
- GST_MEMDUMP ("buf", GST_BUFFER_DATA (buf), size + 16);
- gst_byte_reader_init_from_buffer (&r, buf);
+ GST_MEMDUMP ("buf", data, size + 16);
+ gst_byte_reader_init (&r, data, bufsize);
gst_byte_reader_skip_unchecked (&r, size);
if (!gst_dca_parse_parse_header (dcaparse, &r, &s2, &r2, &c2, NULL, NULL,
&n2, &s3, &t)) {
GST_DEBUG_OBJECT (dcaparse, "didn't find second syncword");
*skipsize = 4;
- return FALSE;
+ goto cleanup;
}
/* ok, got sync now, let's assume constant frame size */
/* FIXME: baseparse always seems to hand us buffers of min_frame_size
* bytes, which is unhelpful here */
GST_LOG_OBJECT (dcaparse, "next sync out of reach (%u < %u)",
- GST_BUFFER_SIZE (buf), size + 16);
+ bufsize, size + 16);
/* *skipsize = 0; */
/* return FALSE; */
}
}
- return TRUE;
+ ret = TRUE;
+
+cleanup:
+ gst_buffer_unmap (buf, data, bufsize);
+ return ret;
}
static GstFlowReturn
{
GstDcaParse *dcaparse = GST_DCA_PARSE (parse);
GstBuffer *buf = frame->buffer;
- GstByteReader r = GST_BYTE_READER_INIT_FROM_BUFFER (buf);
+ GstByteReader r;
guint size, rate, chans, depth, block_size, num_blocks, samples_per_block;
gint endianness;
gboolean terminator;
+ gpointer data;
+ gsize bufsize;
+
+ data = gst_buffer_map (buf, &bufsize, NULL, GST_MAP_READ);
+ gst_byte_reader_init (&r, data, bufsize);
if (!gst_dca_parse_parse_header (dcaparse, &r, &size, &rate, &chans, &depth,
&endianness, &num_blocks, &samples_per_block, &terminator))
"endianness", G_TYPE_INT, endianness, "depth", G_TYPE_INT, depth,
"block-size", G_TYPE_INT, block_size, "frame-size", G_TYPE_INT, size,
NULL);
- gst_buffer_set_caps (buf, caps);
gst_pad_set_caps (GST_BASE_PARSE_SRC_PAD (parse), caps);
gst_caps_unref (caps);
gst_base_parse_set_frame_rate (parse, rate, block_size, 0, 0);
}
+ gst_buffer_unmap (buf, data, bufsize);
return GST_FLOW_OK;
/* ERRORS */
{
/* this really shouldn't ever happen */
GST_ELEMENT_ERROR (parse, STREAM, DECODE, (NULL), (NULL));
+ gst_buffer_unmap (buf, data, bufsize);
return GST_FLOW_ERROR;
}
}
GstFormat src_format, gint64 src_value, GstFormat dest_format,
gint64 * dest_value);
-GST_BOILERPLATE (GstFlacParse, gst_flac_parse, GstBaseParse,
- GST_TYPE_BASE_PARSE);
-
-static void
-gst_flac_parse_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&src_factory));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&sink_factory));
-
- gst_element_class_set_details_simple (element_class, "FLAC audio parser",
- "Codec/Parser/Audio",
- "Parses audio with the FLAC lossless audio codec",
- "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
-
- GST_DEBUG_CATEGORY_INIT (flacparse_debug, "flacparse", 0,
- "Flac parser element");
-}
+#define gst_flac_parse_parent_class parent_class
+G_DEFINE_TYPE (GstFlacParse, gst_flac_parse, GST_TYPE_BASE_PARSE);
static void
gst_flac_parse_class_init (GstFlacParseClass * klass)
{
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
+ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
GstBaseParseClass *baseparse_class = GST_BASE_PARSE_CLASS (klass);
+ GST_DEBUG_CATEGORY_INIT (flacparse_debug, "flacparse", 0,
+ "Flac parser element");
+
gobject_class->finalize = gst_flac_parse_finalize;
gobject_class->set_property = gst_flac_parse_set_property;
gobject_class->get_property = gst_flac_parse_get_property;
baseparse_class->pre_push_frame =
GST_DEBUG_FUNCPTR (gst_flac_parse_pre_push_frame);
baseparse_class->convert = GST_DEBUG_FUNCPTR (gst_flac_parse_convert);
+
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&src_factory));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&sink_factory));
+
+ gst_element_class_set_details_simple (element_class, "FLAC audio parser",
+ "Codec/Parser/Audio",
+ "Parses audio with the FLAC lossless audio codec",
+ "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
}
static void
-gst_flac_parse_init (GstFlacParse * flacparse, GstFlacParseClass * klass)
+gst_flac_parse_init (GstFlacParse * flacparse)
{
flacparse->check_frame_checksums = DEFAULT_CHECK_FRAME_CHECKSUMS;
}
GstBaseParseFrame * frame, guint * ret)
{
GstBuffer *buffer;
- const guint8 *data;
- guint max, size, remaining;
+ guint8 *data;
+ gsize size;
+ guint max, remaining;
guint i, search_start, search_end;
FrameHeaderCheckReturn header_ret;
guint16 block_size;
+ gboolean result = FALSE;
buffer = frame->buffer;
- data = GST_BUFFER_DATA (buffer);
- size = GST_BUFFER_SIZE (buffer);
+ data = gst_buffer_map (buffer, &size, NULL, GST_MAP_READ);
if (size <= flacparse->min_framesize)
goto need_more;
&block_size);
if (header_ret == FRAME_HEADER_INVALID) {
*ret = 0;
- return FALSE;
- } else if (header_ret == FRAME_HEADER_MORE_DATA) {
- goto need_more;
+ goto cleanup;
}
+ if (header_ret == FRAME_HEADER_MORE_DATA)
+ goto need_more;
/* mind unknown framesize */
search_start = MAX (2, flacparse->min_framesize);
}
*ret = i;
flacparse->block_size = block_size;
- return TRUE;
+ result = TRUE;
+ goto cleanup;
} else if (header_ret == FRAME_HEADER_MORE_DATA) {
goto need_more;
}
if (actual_crc == expected_crc) {
*ret = size;
flacparse->block_size = block_size;
- return TRUE;
+ result = TRUE;
+ goto cleanup;
}
} else {
*ret = size;
flacparse->block_size = block_size;
- return TRUE;
+ result = TRUE;
+ goto cleanup;
}
}
if (max == 16)
max = 1 << 24;
*ret = MIN (size + 4096, max);
- return FALSE;
+ result = TRUE;
+
+cleanup:
+ gst_buffer_unmap (buffer, data, size);
+ return result;
}
static gboolean
{
GstFlacParse *flacparse = GST_FLAC_PARSE (parse);
GstBuffer *buffer = frame->buffer;
- const guint8 *data = GST_BUFFER_DATA (buffer);
+ guint8 *data;
+ gsize bufsize;
+ gboolean result = TRUE;
- if (G_UNLIKELY (GST_BUFFER_SIZE (buffer) < 4))
- return FALSE;
+ data = gst_buffer_map (buffer, &bufsize, NULL, GST_MAP_READ);
+
+ if (G_UNLIKELY (bufsize < 4)) {
+ result = FALSE;
+ goto cleanup;
+ }
if (flacparse->state == GST_FLAC_PARSE_STATE_INIT) {
- if (memcmp (GST_BUFFER_DATA (buffer), "fLaC", 4) == 0) {
+ if (memcmp (data, "fLaC", 4) == 0) {
GST_DEBUG_OBJECT (flacparse, "fLaC marker found");
*framesize = 4;
- return TRUE;
- } else if (data[0] == 0xff && (data[1] >> 2) == 0x3e) {
+ goto cleanup;
+ }
+ if (data[0] == 0xff && (data[1] >> 2) == 0x3e) {
GST_DEBUG_OBJECT (flacparse, "Found headerless FLAC");
/* Minimal size of a frame header */
gst_base_parse_set_min_frame_size (GST_BASE_PARSE (flacparse), 9);
flacparse->state = GST_FLAC_PARSE_STATE_GENERATE_HEADERS;
*skipsize = 0;
- return FALSE;
- } else {
- GST_DEBUG_OBJECT (flacparse, "fLaC marker not found");
- return FALSE;
+ result = FALSE;
+ goto cleanup;
}
- } else if (flacparse->state == GST_FLAC_PARSE_STATE_HEADERS) {
+ GST_DEBUG_OBJECT (flacparse, "fLaC marker not found");
+ result = FALSE;
+ goto cleanup;
+ }
+
+ if (flacparse->state == GST_FLAC_PARSE_STATE_HEADERS) {
guint size = 4 + ((data[1] << 16) | (data[2] << 8) | (data[3]));
GST_DEBUG_OBJECT (flacparse, "Found metadata block of size %u", size);
*framesize = size;
- return TRUE;
- } else {
- if ((GST_READ_UINT16_BE (data) & 0xfffe) == 0xfff8) {
- gboolean ret;
- guint next;
+ goto cleanup;
+ }
- flacparse->offset = GST_BUFFER_OFFSET (buffer);
- flacparse->blocking_strategy = 0;
- flacparse->block_size = 0;
- flacparse->sample_number = 0;
-
- GST_DEBUG_OBJECT (flacparse, "Found sync code");
- ret = gst_flac_parse_frame_is_valid (flacparse, frame, &next);
- if (ret) {
- *framesize = next;
- return TRUE;
- } else {
- /* If we're at EOS and the frame was not valid, drop it! */
- if (G_UNLIKELY (GST_BASE_PARSE_DRAINING (flacparse))) {
- GST_WARNING_OBJECT (flacparse, "EOS");
- return FALSE;
- }
+ if ((GST_READ_UINT16_BE (data) & 0xfffe) == 0xfff8) {
+ gboolean ret;
+ guint next;
- if (next == 0) {
- } else if (next > GST_BUFFER_SIZE (buffer)) {
- GST_DEBUG_OBJECT (flacparse, "Requesting %u bytes", next);
- *skipsize = 0;
- gst_base_parse_set_min_frame_size (parse, next);
- return FALSE;
- } else {
- GST_ERROR_OBJECT (flacparse,
- "Giving up on invalid frame (%d bytes)",
- GST_BUFFER_SIZE (buffer));
- return FALSE;
- }
- }
- } else {
- GstByteReader reader = GST_BYTE_READER_INIT_FROM_BUFFER (buffer);
- gint off;
+ flacparse->offset = GST_BUFFER_OFFSET (buffer);
+ flacparse->blocking_strategy = 0;
+ flacparse->block_size = 0;
+ flacparse->sample_number = 0;
- off =
- gst_byte_reader_masked_scan_uint32 (&reader, 0xfffc0000, 0xfff80000,
- 0, GST_BUFFER_SIZE (buffer));
+ GST_DEBUG_OBJECT (flacparse, "Found sync code");
+ ret = gst_flac_parse_frame_is_valid (flacparse, frame, &next);
+ if (ret) {
+ *framesize = next;
+ goto cleanup;
+ } else {
+ /* If we're at EOS and the frame was not valid, drop it! */
+ if (G_UNLIKELY (GST_BASE_PARSE_DRAINING (flacparse))) {
+ GST_WARNING_OBJECT (flacparse, "EOS");
+ result = FALSE;
+ goto cleanup;
+ }
- if (off > 0) {
- GST_DEBUG_OBJECT (parse, "Possible sync at buffer offset %d", off);
- *skipsize = off;
- return FALSE;
+ if (next == 0) {
+ } else if (next > bufsize) {
+ GST_DEBUG_OBJECT (flacparse, "Requesting %u bytes", next);
+ *skipsize = 0;
+ gst_base_parse_set_min_frame_size (parse, next);
+ result = FALSE;
+ goto cleanup;
} else {
- GST_DEBUG_OBJECT (flacparse, "Sync code not found");
- *skipsize = GST_BUFFER_SIZE (buffer) - 3;
- return FALSE;
+ GST_ERROR_OBJECT (flacparse,
+ "Giving up on invalid frame (%d bytes)", bufsize);
+ result = FALSE;
+ goto cleanup;
}
}
+ } else {
+ GstByteReader reader;
+ gint off;
+
+ gst_byte_reader_init (&reader, data, bufsize);
+ off =
+ gst_byte_reader_masked_scan_uint32 (&reader, 0xfffc0000, 0xfff80000,
+ 0, bufsize);
+
+ if (off > 0) {
+ GST_DEBUG_OBJECT (parse, "Possible sync at buffer offset %d", off);
+ *skipsize = off;
+ result = FALSE;
+ goto cleanup;
+ } else {
+ GST_DEBUG_OBJECT (flacparse, "Sync code not found");
+ *skipsize = bufsize - 3;
+ result = FALSE;
+ goto cleanup;
+ }
}
- return FALSE;
+ result = FALSE;
+
+cleanup:
+ gst_buffer_unmap (buffer, data, bufsize);
+ return result;
}
static gboolean
gst_flac_parse_handle_streaminfo (GstFlacParse * flacparse, GstBuffer * buffer)
{
- GstBitReader reader = GST_BIT_READER_INIT_FROM_BUFFER (buffer);
+ GstBitReader reader;
+ guint8 *data;
+ gsize size;
+
+ data = gst_buffer_map (buffer, &size, NULL, GST_MAP_READ);
+ gst_bit_reader_init (&reader, data, size);
- if (GST_BUFFER_SIZE (buffer) != 4 + 34) {
+ if (size != 4 + 34) {
GST_ERROR_OBJECT (flacparse, "Invalid metablock size for STREAMINFO: %u",
- GST_BUFFER_SIZE (buffer));
- return FALSE;
+ size);
+ goto failure;
}
/* Skip metadata block header */
goto error;
if (flacparse->samplerate == 0) {
GST_ERROR_OBJECT (flacparse, "Invalid sample rate 0");
- return FALSE;
+ goto failure;
}
if (!gst_bit_reader_get_bits_uint8 (&reader, &flacparse->channels, 3))
if (flacparse->channels > 8) {
GST_ERROR_OBJECT (flacparse, "Invalid number of channels %u",
flacparse->channels);
- return FALSE;
+ goto failure;
}
if (!gst_bit_reader_get_bits_uint8 (&reader, &flacparse->bps, 5))
GST_FORMAT_DEFAULT, flacparse->total_samples, 0);
}
+ gst_buffer_unmap (buffer, data, size);
+
GST_DEBUG_OBJECT (flacparse, "STREAMINFO:\n"
"\tmin/max blocksize: %u/%u,\n"
"\tmin/max framesize: %u/%u,\n"
error:
GST_ERROR_OBJECT (flacparse, "Failed to read data");
+failure:
+ gst_buffer_unmap (buffer, data, size);
return FALSE;
}
gst_flac_parse_handle_vorbiscomment (GstFlacParse * flacparse,
GstBuffer * buffer)
{
- flacparse->tags = gst_tag_list_from_vorbiscomment_buffer (buffer,
- GST_BUFFER_DATA (buffer), 4, NULL);
+ guint8 *data;
+ gsize size;
+
+ data = gst_buffer_map (buffer, &size, NULL, GST_MAP_READ);
+
+ flacparse->tags = gst_tag_list_from_vorbiscomment (data, size, data, 4, NULL);
+ gst_buffer_unmap (buffer, data, size);
if (flacparse->tags == NULL) {
GST_ERROR_OBJECT (flacparse, "Invalid vorbiscomment block");
static gboolean
gst_flac_parse_handle_picture (GstFlacParse * flacparse, GstBuffer * buffer)
{
- GstByteReader reader = GST_BYTE_READER_INIT_FROM_BUFFER (buffer);
- const guint8 *data = GST_BUFFER_DATA (buffer);
+ GstByteReader reader;
+ guint8 *data;
+ gsize bufsize;
guint32 img_len = 0, img_type = 0;
guint32 img_mimetype_len = 0, img_description_len = 0;
+ data = gst_buffer_map (buffer, &bufsize, NULL, GST_MAP_READ);
+ gst_byte_reader_init (&reader, data, bufsize);
+
if (!gst_byte_reader_skip (&reader, 4))
goto error;
flacparse->tags = NULL;
}
+ gst_buffer_unmap (buffer, data, bufsize);
return TRUE;
error:
GST_ERROR_OBJECT (flacparse, "Error reading data");
+ gst_buffer_unmap (buffer, data, bufsize);
return FALSE;
}
{
GstByteReader br;
gint64 offset = 0, samples = 0;
+ gpointer data;
+ gsize bufsize;
GST_DEBUG_OBJECT (flacparse,
"parsing seektable; base offset %" G_GINT64_FORMAT, boffset);
if (boffset <= 0)
- goto done;
+ goto exit;
+
+ data = gst_buffer_map (flacparse->seektable, &bufsize, NULL, GST_MAP_READ);
+ gst_byte_reader_init (&br, data, bufsize);
- gst_byte_reader_init_from_buffer (&br, flacparse->seektable);
/* skip header */
if (!gst_byte_reader_skip (&br, 4))
goto done;
}
done:
+ gst_buffer_unmap (flacparse->seektable, data, bufsize);
+exit:
gst_buffer_unref (flacparse->seektable);
flacparse->seektable = NULL;
}
for (l = flacparse->headers; l; l = l->next) {
GstBuffer *header = l->data;
- const guint8 *data = GST_BUFFER_DATA (header);
- guint size = GST_BUFFER_SIZE (header);
+ guint8 *data;
+ gsize size;
+
+ data = gst_buffer_map (header, &size, NULL, GST_MAP_READ);
GST_BUFFER_FLAG_SET (header, GST_BUFFER_FLAG_IN_CAPS);
} else if (size > 1 && (data[0] & 0x7f) == 4) {
vorbiscomment = header;
}
+
+ gst_buffer_unmap (header, data, size);
}
if (marker == NULL || streaminfo == NULL || vorbiscomment == NULL) {
{
GstBuffer *buf;
guint16 num;
+ guint8 *sinfodata, *writedata;
+ gsize sinfosize, writesize;
+
+ sinfodata = gst_buffer_map (streaminfo, &sinfosize, NULL, GST_MAP_READ);
/* minus one for the marker that is merged with streaminfo here */
num = g_list_length (flacparse->headers) - 1;
- buf = gst_buffer_new_and_alloc (13 + GST_BUFFER_SIZE (streaminfo));
- GST_BUFFER_DATA (buf)[0] = 0x7f;
- memcpy (GST_BUFFER_DATA (buf) + 1, "FLAC", 4);
- GST_BUFFER_DATA (buf)[5] = 0x01; /* mapping version major */
- GST_BUFFER_DATA (buf)[6] = 0x00; /* mapping version minor */
- GST_BUFFER_DATA (buf)[7] = (num & 0xFF00) >> 8;
- GST_BUFFER_DATA (buf)[8] = (num & 0x00FF) >> 0;
- memcpy (GST_BUFFER_DATA (buf) + 9, "fLaC", 4);
- memcpy (GST_BUFFER_DATA (buf) + 13, GST_BUFFER_DATA (streaminfo),
- GST_BUFFER_SIZE (streaminfo));
+ buf = gst_buffer_new_and_alloc (13 + sinfosize);
+ writedata = gst_buffer_map (buf, &writesize, NULL, GST_MAP_WRITE);
+
+ writedata[0] = 0x7f;
+ memcpy (writedata + 1, "FLAC", 4);
+ writedata[5] = 0x01; /* mapping version major */
+ writedata[6] = 0x00; /* mapping version minor */
+ writedata[7] = (num & 0xFF00) >> 8;
+ writedata[8] = (num & 0x00FF) >> 0;
+ memcpy (writedata + 9, "fLaC", 4);
+ memcpy (writedata + 13, sinfodata, sinfosize);
_value_array_append_buffer (&array, buf);
+
+ gst_buffer_unmap (streaminfo, sinfodata, sinfosize);
+ gst_buffer_unmap (buf, writedata, writesize);
gst_buffer_unref (buf);
}
flacparse->headers =
g_list_delete_link (flacparse->headers, flacparse->headers);
- buf = gst_buffer_make_metadata_writable (buf);
- gst_buffer_set_caps (buf,
- GST_PAD_CAPS (GST_BASE_PARSE_SRC_PAD (GST_BASE_PARSE (flacparse))));
+ buf = gst_buffer_make_writable (buf);
/* init, set and give away frame */
gst_base_parse_frame_init (&frame);
{
GstBuffer *marker, *streaminfo, *vorbiscomment;
guint8 *data;
+ gsize bufsize;
marker = gst_buffer_new_and_alloc (4);
- memcpy (GST_BUFFER_DATA (marker), "fLaC", 4);
+ data = gst_buffer_map (marker, &bufsize, NULL, GST_MAP_WRITE);
+ memcpy (data, "fLaC", 4);
+ gst_buffer_unmap (marker, data, bufsize);
GST_BUFFER_TIMESTAMP (marker) = GST_CLOCK_TIME_NONE;
GST_BUFFER_DURATION (marker) = GST_CLOCK_TIME_NONE;
GST_BUFFER_OFFSET (marker) = 0;
flacparse->headers = g_list_append (flacparse->headers, marker);
streaminfo = gst_buffer_new_and_alloc (4 + 34);
- data = GST_BUFFER_DATA (streaminfo);
+ data = gst_buffer_map (streaminfo, &bufsize, NULL, GST_MAP_WRITE);
memset (data, 0, 4 + 34);
/* metadata block header */
}
/* MD5 = 0; */
+ gst_buffer_unmap (streaminfo, data, bufsize);
GST_BUFFER_TIMESTAMP (streaminfo) = GST_CLOCK_TIME_NONE;
GST_BUFFER_DURATION (streaminfo) = GST_CLOCK_TIME_NONE;
GST_BUFFER_OFFSET (streaminfo) = 0;
sizeof (header), NULL);
gst_tag_list_free (taglist);
+ data = gst_buffer_map (vorbiscomment, &bufsize, NULL, GST_MAP_WRITE);
+
/* Get rid of framing bit */
- if (GST_BUFFER_DATA (vorbiscomment)[GST_BUFFER_SIZE (vorbiscomment) -
- 1] == 1) {
+ if (data[bufsize - 1] == 1) {
GstBuffer *sub;
sub =
- gst_buffer_create_sub (vorbiscomment, 0,
- GST_BUFFER_SIZE (vorbiscomment) - 1);
+ gst_buffer_copy_region (vorbiscomment, GST_BUFFER_COPY_ALL, 0,
+ bufsize - 1);
+ gst_buffer_unmap (vorbiscomment, data, bufsize);
gst_buffer_unref (vorbiscomment);
vorbiscomment = sub;
+ data = gst_buffer_map (vorbiscomment, &bufsize, NULL, GST_MAP_WRITE);
}
- size = GST_BUFFER_SIZE (vorbiscomment) - 4;
- GST_BUFFER_DATA (vorbiscomment)[1] = ((size & 0xFF0000) >> 16);
- GST_BUFFER_DATA (vorbiscomment)[2] = ((size & 0x00FF00) >> 8);
- GST_BUFFER_DATA (vorbiscomment)[3] = (size & 0x0000FF);
+ size = bufsize - 4;
+ data[1] = ((size & 0xFF0000) >> 16);
+ data[2] = ((size & 0x00FF00) >> 8);
+ data[3] = (size & 0x0000FF);
+ gst_buffer_unmap (vorbiscomment, data, bufsize);
GST_BUFFER_TIMESTAMP (vorbiscomment) = GST_CLOCK_TIME_NONE;
GST_BUFFER_DURATION (vorbiscomment) = GST_CLOCK_TIME_NONE;
GST_BUFFER_OFFSET (vorbiscomment) = 0;
{
GstFlacParse *flacparse = GST_FLAC_PARSE (parse);
GstBuffer *buffer = frame->buffer;
- const guint8 *data = GST_BUFFER_DATA (buffer);
+ guint8 *data = NULL;
+ gsize bufsize;
+ GstFlowReturn res = GST_FLOW_ERROR;
+
+ data = gst_buffer_map (buffer, &bufsize, NULL, GST_MAP_READ);
if (flacparse->state == GST_FLAC_PARSE_STATE_INIT) {
GST_BUFFER_TIMESTAMP (buffer) = GST_CLOCK_TIME_NONE;
flacparse->headers =
g_list_append (flacparse->headers, gst_buffer_ref (buffer));
- return GST_BASE_PARSE_FLOW_DROPPED;
+ res = GST_BASE_PARSE_FLOW_DROPPED;
} else if (flacparse->state == GST_FLAC_PARSE_STATE_HEADERS) {
gboolean is_last = ((data[0] & 0x80) == 0x80);
guint type = (data[0] & 0x7F);
if (type == 127) {
GST_WARNING_OBJECT (flacparse, "Invalid metadata block type");
- return GST_BASE_PARSE_FLOW_DROPPED;
+ res = GST_BASE_PARSE_FLOW_DROPPED;
+ goto cleanup;
}
GST_DEBUG_OBJECT (flacparse, "Handling metadata block of type %u", type);
switch (type) {
case 0: /* STREAMINFO */
if (!gst_flac_parse_handle_streaminfo (flacparse, buffer))
- return GST_FLOW_ERROR;
+ goto cleanup;
break;
case 3: /* SEEKTABLE */
if (!gst_flac_parse_handle_seektable (flacparse, buffer))
- return GST_FLOW_ERROR;
+ goto cleanup;
break;
case 4: /* VORBIS_COMMENT */
if (!gst_flac_parse_handle_vorbiscomment (flacparse, buffer))
- return GST_FLOW_ERROR;
+ goto cleanup;
break;
case 6: /* PICTURE */
if (!gst_flac_parse_handle_picture (flacparse, buffer))
- return GST_FLOW_ERROR;
+ goto cleanup;
break;
case 1: /* PADDING */
case 2: /* APPLICATION */
if (is_last) {
if (!gst_flac_parse_handle_headers (flacparse))
- return GST_FLOW_ERROR;
+ goto cleanup;
/* Minimal size of a frame header */
gst_base_parse_set_min_frame_size (GST_BASE_PARSE (flacparse), MAX (9,
}
/* DROPPED because we pushed already or will push all headers manually */
- return GST_BASE_PARSE_FLOW_DROPPED;
+ res = GST_BASE_PARSE_FLOW_DROPPED;
} else {
if (flacparse->offset != GST_BUFFER_OFFSET (buffer)) {
FrameHeaderCheckReturn ret;
flacparse->offset = GST_BUFFER_OFFSET (buffer);
ret =
gst_flac_parse_frame_header_is_valid (flacparse,
- GST_BUFFER_DATA (buffer), GST_BUFFER_SIZE (buffer), TRUE, NULL);
+ data, bufsize, TRUE, NULL);
if (ret != FRAME_HEADER_VALID) {
GST_ERROR_OBJECT (flacparse,
"Baseclass didn't provide a complete frame");
- return GST_FLOW_ERROR;
+ goto cleanup;
}
}
if (flacparse->block_size == 0) {
GST_ERROR_OBJECT (flacparse, "Unparsed frame");
- return GST_FLOW_ERROR;
+ goto cleanup;
}
if (flacparse->seektable)
"Generating headers for variable blocksize streams not supported");
if (!gst_flac_parse_handle_headers (flacparse))
- return GST_FLOW_ERROR;
+ goto cleanup;
} else {
GST_DEBUG_OBJECT (flacparse, "Generating headers");
if (!gst_flac_parse_generate_headers (flacparse))
- return GST_FLOW_ERROR;
+ goto cleanup;
if (!gst_flac_parse_handle_headers (flacparse))
- return GST_FLOW_ERROR;
+ goto cleanup;
}
flacparse->state = GST_FLAC_PARSE_STATE_DATA;
}
flacparse->blocking_strategy = 0;
flacparse->block_size = 0;
flacparse->sample_number = 0;
- return GST_FLOW_OK;
+ res = GST_FLOW_OK;
}
+
+cleanup:
+ if (data)
+ gst_buffer_unmap (buffer, data, bufsize);
+
+ return res;
}
static GstFlowReturn
GstFormat src_format, gint64 src_value,
GstFormat dest_format, gint64 * dest_value);
-GST_BOILERPLATE (GstMpegAudioParse, gst_mpeg_audio_parse, GstBaseParse,
- GST_TYPE_BASE_PARSE);
+#define gst_mpeg_audio_parse_parent_class parent_class
+G_DEFINE_TYPE (GstMpegAudioParse, gst_mpeg_audio_parse, GST_TYPE_BASE_PARSE);
#define GST_TYPE_MPEG_AUDIO_CHANNEL_MODE \
(gst_mpeg_audio_channel_mode_get_type())
}
static void
-gst_mpeg_audio_parse_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&sink_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&src_template));
-
- gst_element_class_set_details_simple (element_class, "MPEG1 Audio Parser",
- "Codec/Parser/Audio",
- "Parses and frames mpeg1 audio streams (levels 1-3), provides seek",
- "Jan Schmidt <thaytan@mad.scientist.com>,"
- "Mark Nauwelaerts <mark.nauwelaerts@collabora.co.uk>");
-}
-
-static void
gst_mpeg_audio_parse_class_init (GstMpegAudioParseClass * klass)
{
GstBaseParseClass *parse_class = GST_BASE_PARSE_CLASS (klass);
+ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
GObjectClass *object_class = G_OBJECT_CLASS (klass);
GST_DEBUG_CATEGORY_INIT (mpeg_audio_parse_debug, "mpegaudioparse", 0,
"channel mode", "MPEG audio channel mode", NULL);
g_type_class_ref (GST_TYPE_MPEG_AUDIO_CHANNEL_MODE);
+
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&sink_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&src_template));
+
+ gst_element_class_set_details_simple (element_class, "MPEG1 Audio Parser",
+ "Codec/Parser/Audio",
+ "Parses and frames mpeg1 audio streams (levels 1-3), provides seek",
+ "Jan Schmidt <thaytan@mad.scientist.com>,"
+ "Mark Nauwelaerts <mark.nauwelaerts@collabora.co.uk>");
}
static void
}
static void
-gst_mpeg_audio_parse_init (GstMpegAudioParse * mp3parse,
- GstMpegAudioParseClass * klass)
+gst_mpeg_audio_parse_init (GstMpegAudioParse * mp3parse)
{
gst_mpeg_audio_parse_reset (mp3parse);
}
guint32 header, int bpf, gboolean at_eos, gint * valid)
{
guint32 next_header;
- const guint8 *data;
- guint available;
+ guint8 *data;
+ gsize available;
+ gboolean res = TRUE;
int frames_found = 1;
int offset = bpf;
- available = GST_BUFFER_SIZE (buf);
- data = GST_BUFFER_DATA (buf);
+ data = gst_buffer_map (buf, &available, NULL, GST_MAP_READ);
while (frames_found < MIN_RESYNC_FRAMES) {
/* Check if we have enough data for all these frames, plus the next
if (at_eos) {
/* Running out of data at EOS is fine; just accept it */
*valid = TRUE;
- return TRUE;
+ goto cleanup;
} else {
*valid = offset + 4;
- return FALSE;
+ res = FALSE;
+ goto cleanup;
}
}
(guint) header, (guint) header & HDRMASK, (guint) next_header,
(guint) next_header & HDRMASK, bpf);
*valid = FALSE;
- return TRUE;
+ goto cleanup;
} else if ((((next_header >> 12) & 0xf) == 0) ||
(((next_header >> 12) & 0xf) == 0xf)) {
/* The essential parts were the same, but the bitrate held an
invalid value - also reject */
GST_DEBUG_OBJECT (mp3parse, "next header invalid (bitrate)");
*valid = FALSE;
- return TRUE;
+ goto cleanup;
}
bpf = mp3_type_frame_length_from_header (mp3parse, next_header,
}
*valid = TRUE;
- return TRUE;
+
+cleanup:
+ gst_buffer_unmap (buf, data, available);
+ return res;
}
static gboolean
{
GstMpegAudioParse *mp3parse = GST_MPEG_AUDIO_PARSE (parse);
GstBuffer *buf = frame->buffer;
- GstByteReader reader = GST_BYTE_READER_INIT_FROM_BUFFER (buf);
+ GstByteReader reader;
gint off, bpf;
gboolean lost_sync, draining, valid, caps_change;
guint32 header;
guint bitrate, layer, rate, channels, version, mode, crc;
+ guint8 *data;
+ gsize bufsize;
+ gboolean res = FALSE;
- if (G_UNLIKELY (GST_BUFFER_SIZE (buf) < 6))
- return FALSE;
+ data = gst_buffer_map (buf, &bufsize, NULL, GST_MAP_READ);
+ if (G_UNLIKELY (bufsize < 6))
+ goto cleanup;
+
+ gst_byte_reader_init (&reader, data, bufsize);
off = gst_byte_reader_masked_scan_uint32 (&reader, 0xffe00000, 0xffe00000,
- 0, GST_BUFFER_SIZE (buf));
+ 0, bufsize);
GST_LOG_OBJECT (parse, "possible sync at buffer offset %d", off);
/* didn't find anything that looks like a sync word, skip */
if (off < 0) {
- *skipsize = GST_BUFFER_SIZE (buf) - 3;
- return FALSE;
+ *skipsize = bufsize - 3;
+ goto cleanup;
}
/* possible frame header, but not at offset 0? skip bytes before sync */
if (off > 0) {
*skipsize = off;
- return FALSE;
+ goto cleanup;
}
/* make sure the values in the frame header look sane */
- header = GST_READ_UINT32_BE (GST_BUFFER_DATA (buf));
+ header = GST_READ_UINT32_BE (data);
if (!gst_mpeg_audio_parse_head_check (mp3parse, header)) {
*skipsize = 1;
- return FALSE;
+ goto cleanup;
}
GST_LOG_OBJECT (parse, "got frame");
/* not enough data */
gst_base_parse_set_min_frame_size (parse, valid);
*skipsize = 0;
- return FALSE;
+ goto cleanup;
} else {
if (!valid) {
*skipsize = off + 2;
- return FALSE;
+ goto cleanup;
}
}
} else if (draining && lost_sync && caps_change && mp3parse->rate > 0) {
/* avoid caps jitter that we can't be sure of */
*skipsize = off + 2;
- return FALSE;
+ goto cleanup;
}
/* restore default minimum */
gst_base_parse_set_min_frame_size (parse, MIN_FRAME_SIZE);
*framesize = bpf;
- return TRUE;
+ res = TRUE;
+
+cleanup:
+ gst_buffer_unmap (buf, data, bufsize);
+ return res;
}
static void
gint64 upstream_total_bytes = 0;
GstFormat fmt = GST_FORMAT_BYTES;
guint32 read_id_xing = 0, read_id_vbri = 0;
- const guint8 *data;
+ guint8 *data, *origdata;
+ gsize bufsize;
guint bitrate;
if (mp3parse->sent_codec_tag)
offset_vbri += 4;
/* Check if we have enough data to read the Xing header */
- avail = GST_BUFFER_SIZE (buf);
- data = GST_BUFFER_DATA (buf);
+ origdata = data = gst_buffer_map (buf, &bufsize, NULL, GST_MAP_READ);
+ avail = bufsize;
if (avail >= offset_xing + 4) {
read_id_xing = GST_READ_UINT32_BE (data + offset_xing);
if (avail < bytes_needed) {
GST_DEBUG_OBJECT (mp3parse,
"Not enough data to read Xing header (need %d)", bytes_needed);
- return;
+ goto cleanup;
}
GST_DEBUG_OBJECT (mp3parse, "Reading Xing header");
if (avail < offset_vbri + 26) {
GST_DEBUG_OBJECT (mp3parse,
"Not enough data to read VBRI header (need %d)", offset_vbri + 26);
- return;
+ goto cleanup;
}
GST_DEBUG_OBJECT (mp3parse, "Reading VBRI header");
if (GST_READ_UINT16_BE (data) != 0x0001) {
GST_WARNING_OBJECT (mp3parse,
"Unsupported VBRI version 0x%x", GST_READ_UINT16_BE (data));
- return;
+ goto cleanup;
}
data += 2;
GST_DEBUG_OBJECT (mp3parse,
"Not enough data to read VBRI header (need %d)",
offset_vbri + 26 + nseek_points * seek_bytes);
- return;
+ goto cleanup;
}
- data = GST_BUFFER_DATA (buf);
+ data = origdata;
data += offset_vbri + 26;
/* VBRI seek table: frame/seek_frames -> byte */
bitrate = 0;
gst_base_parse_set_average_bitrate (GST_BASE_PARSE (mp3parse), bitrate);
+
+cleanup:
+ gst_buffer_unmap (buf, origdata, bufsize);
}
static GstFlowReturn
{
GstMpegAudioParse *mp3parse = GST_MPEG_AUDIO_PARSE (parse);
GstBuffer *buf = frame->buffer;
+ guint8 *data;
+ gsize bufsize;
guint bitrate, layer, rate, channels, version, mode, crc;
- g_return_val_if_fail (GST_BUFFER_SIZE (buf) >= 4, GST_FLOW_ERROR);
+ data = gst_buffer_map (buf, &bufsize, NULL, GST_MAP_READ);
+ if (G_UNLIKELY (bufsize < 4))
+ goto short_buffer;
if (!mp3_type_frame_length_from_header (mp3parse,
- GST_READ_UINT32_BE (GST_BUFFER_DATA (buf)),
+ GST_READ_UINT32_BE (data),
&version, &layer, &channels, &bitrate, &rate, &mode, &crc))
goto broken_header;
"layer", G_TYPE_INT, layer,
"rate", G_TYPE_INT, rate,
"channels", G_TYPE_INT, channels, "parsed", G_TYPE_BOOLEAN, TRUE, NULL);
- gst_buffer_set_caps (buf, caps);
gst_pad_set_caps (GST_BASE_PARSE_SRC_PAD (parse), caps);
gst_caps_unref (caps);
mp3parse->last_crc = crc;
mp3parse->last_mode = mode;
+ gst_buffer_unmap (buf, data, bufsize);
return GST_FLOW_OK;
/* ERRORS */
broken_header:
{
/* this really shouldn't ever happen */
+ gst_buffer_unmap (buf, data, bufsize);
GST_ELEMENT_ERROR (parse, STREAM, DECODE, (NULL), (NULL));
return GST_FLOW_ERROR;
}
+
+short_buffer:
+ {
+ gst_buffer_unmap (buf, data, bufsize);
+ return GST_FLOW_ERROR;
+ }
}
static gboolean
GstFormat src_format, gint64 srcval, GstFormat dest_format,
gint64 * destval);
-GST_BOILERPLATE (GstAuParse, gst_au_parse, GstElement, GST_TYPE_ELEMENT);
-
-static void
-gst_au_parse_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&sink_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&src_template));
- gst_element_class_set_details_simple (element_class, "AU audio demuxer",
- "Codec/Demuxer/Audio",
- "Parse an .au file into raw audio",
- "Erik Walthinsen <omega@cse.ogi.edu>");
-
- GST_DEBUG_CATEGORY_INIT (auparse_debug, "auparse", 0, ".au parser");
-}
+#define gst_au_parse_parent_class parent_class
+G_DEFINE_TYPE (GstAuParse, gst_au_parse, GST_TYPE_ELEMENT);
static void
gst_au_parse_class_init (GstAuParseClass * klass)
GObjectClass *gobject_class;
GstElementClass *gstelement_class;
+ GST_DEBUG_CATEGORY_INIT (auparse_debug, "auparse", 0, ".au parser");
+
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
gstelement_class->change_state =
GST_DEBUG_FUNCPTR (gst_au_parse_change_state);
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&src_template));
+ gst_element_class_set_details_simple (gstelement_class,
+ "AU audio demuxer",
+ "Codec/Demuxer/Audio",
+ "Parse an .au file into raw audio",
+ "Erik Walthinsen <omega@cse.ogi.edu>");
}
static void
-gst_au_parse_init (GstAuParse * auparse, GstAuParseClass * klass)
+gst_au_parse_init (GstAuParse * auparse)
{
auparse->sinkpad = gst_pad_new_from_static_template (&sink_template, "sink");
gst_pad_set_chain_function (auparse->sinkpad,
GstAuParse *auparse = GST_AU_PARSE (element);
GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
- ret = parent_class->change_state (element, transition);
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
if (ret == GST_STATE_CHANGE_FAILURE)
return ret;
static void gst_auto_audio_sink_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
-GST_BOILERPLATE (GstAutoAudioSink, gst_auto_audio_sink, GstBin, GST_TYPE_BIN);
+#define gst_auto_audio_sink_parent_class parent_class
+G_DEFINE_TYPE (GstAutoAudioSink, gst_auto_audio_sink, GST_TYPE_BIN);
static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_STATIC_CAPS_ANY);
static void
-gst_auto_audio_sink_base_init (gpointer klass)
-{
- GstElementClass *eklass = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (eklass,
- gst_static_pad_template_get (&sink_template));
-
- gst_element_class_set_details_simple (eklass, "Auto audio sink",
- "Sink/Audio",
- "Wrapper audio sink for automatically detected audio sink",
- "Jan Schmidt <thaytan@noraisin.net>");
-}
-
-static void
gst_auto_audio_sink_class_init (GstAutoAudioSinkClass * klass)
{
GObjectClass *gobject_class;
g_param_spec_boxed ("filter-caps", "Filter caps",
"Filter sink candidates using these caps.", GST_TYPE_CAPS,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gst_element_class_add_pad_template (eklass,
+ gst_static_pad_template_get (&sink_template));
+
+ gst_element_class_set_details_simple (eklass, "Auto audio sink",
+ "Sink/Audio",
+ "Wrapper audio sink for automatically detected audio sink",
+ "Jan Schmidt <thaytan@noraisin.net>");
}
static void
GST_STATIC_CAPS ("audio/x-raw-int; audio/x-raw-float");
static void
-gst_auto_audio_sink_init (GstAutoAudioSink * sink,
- GstAutoAudioSinkClass * g_class)
+gst_auto_audio_sink_init (GstAutoAudioSink * sink)
{
sink->pad = gst_ghost_pad_new_no_target ("sink", GST_PAD_SINK);
gst_element_add_pad (GST_ELEMENT (sink), sink->pad);
GstElement *element;
gchar *name, *marker;
- marker = g_strdup (GST_PLUGIN_FEATURE (factory)->name);
+ marker = g_strdup (GST_OBJECT_NAME (factory));
if (g_str_has_suffix (marker, "sink"))
marker[strlen (marker) - 4] = '\0';
if (g_str_has_prefix (marker, "gst"))
if ((el = gst_auto_audio_sink_create_element_with_pretty_name (sink, f))) {
GstStateChangeReturn ret;
- GST_DEBUG_OBJECT (sink, "Testing %s", GST_PLUGIN_FEATURE (f)->name);
+ GST_DEBUG_OBJECT (sink, "Testing %s", GST_OBJECT_NAME (f));
/* If autoaudiosink has been provided with filter caps,
* accept only sinks that match with the filter caps */
if (sink->filter_caps) {
el_pad = gst_element_get_static_pad (GST_ELEMENT (el), "sink");
- el_caps = gst_pad_get_caps (el_pad);
+ el_caps = gst_pad_get_caps (el_pad, NULL);
gst_object_unref (el_pad);
GST_DEBUG_OBJECT (sink,
"Checking caps: %" GST_PTR_FORMAT " vs. %" GST_PTR_FORMAT,
static void gst_auto_audio_src_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
-GST_BOILERPLATE (GstAutoAudioSrc, gst_auto_audio_src, GstBin, GST_TYPE_BIN);
+#define gst_auto_audio_src_parent_class parent_class
+G_DEFINE_TYPE (GstAutoAudioSrc, gst_auto_audio_src, GST_TYPE_BIN);
static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_STATIC_CAPS_ANY);
static void
-gst_auto_audio_src_base_init (gpointer klass)
-{
- GstElementClass *eklass = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (eklass,
- gst_static_pad_template_get (&src_template));
-
- gst_element_class_set_details_simple (eklass, "Auto audio source",
- "Source/Audio",
- "Wrapper audio source for automatically detected audio source",
- "Jan Schmidt <thaytan@noraisin.net>, "
- "Stefan Kost <ensonic@users.sf.net>");
-}
-
-static void
gst_auto_audio_src_class_init (GstAutoAudioSrcClass * klass)
{
GObjectClass *gobject_class;
g_param_spec_boxed ("filter-caps", "Filter caps",
"Filter sink candidates using these caps.", GST_TYPE_CAPS,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gst_element_class_add_pad_template (eklass,
+ gst_static_pad_template_get (&src_template));
+
+ gst_element_class_set_details_simple (eklass, "Auto audio source",
+ "Source/Audio",
+ "Wrapper audio source for automatically detected audio source",
+ "Jan Schmidt <thaytan@noraisin.net>, "
+ "Stefan Kost <ensonic@users.sf.net>");
}
static void
GST_STATIC_CAPS ("audio/x-raw-int; audio/x-raw-float");
static void
-gst_auto_audio_src_init (GstAutoAudioSrc * src, GstAutoAudioSrcClass * g_class)
+gst_auto_audio_src_init (GstAutoAudioSrc * src)
{
src->pad = gst_ghost_pad_new_no_target ("src", GST_PAD_SRC);
gst_element_add_pad (GST_ELEMENT (src), src->pad);
GstElement *element;
gchar *name, *marker;
- marker = g_strdup (GST_PLUGIN_FEATURE (factory)->name);
+ marker = g_strdup (GST_OBJECT_NAME (factory));
if (g_str_has_suffix (marker, "src"))
marker[strlen (marker) - 4] = '\0';
if (g_str_has_prefix (marker, "gst"))
if ((el = gst_auto_audio_src_create_element_with_pretty_name (src, f))) {
GstStateChangeReturn ret;
- GST_DEBUG_OBJECT (src, "Testing %s", GST_PLUGIN_FEATURE (f)->name);
+ GST_DEBUG_OBJECT (src, "Testing %s", GST_OBJECT_NAME (f));
/* If autoAudioSrc has been provided with filter caps,
* accept only sources that match with the filter caps */
if (src->filter_caps) {
el_pad = gst_element_get_static_pad (GST_ELEMENT (el), "src");
- el_caps = gst_pad_get_caps (el_pad);
+ el_caps = gst_pad_get_caps (el_pad, NULL);
gst_object_unref (el_pad);
GST_DEBUG_OBJECT (src,
"Checking caps: %" GST_PTR_FORMAT " vs. %" GST_PTR_FORMAT,
static void gst_auto_video_sink_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
-GST_BOILERPLATE (GstAutoVideoSink, gst_auto_video_sink, GstBin, GST_TYPE_BIN);
+#define gst_auto_video_sink_parent_class parent_class
+G_DEFINE_TYPE (GstAutoVideoSink, gst_auto_video_sink, GST_TYPE_BIN);
static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_STATIC_CAPS_ANY);
static void
-gst_auto_video_sink_base_init (gpointer klass)
-{
- GstElementClass *eklass = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (eklass,
- gst_static_pad_template_get (&sink_template));
- gst_element_class_set_details_simple (eklass, "Auto video sink",
- "Sink/Video",
- "Wrapper video sink for automatically detected video sink",
- "Jan Schmidt <thaytan@noraisin.net>");
-}
-
-static void
gst_auto_video_sink_class_init (GstAutoVideoSinkClass * klass)
{
GObjectClass *gobject_class;
g_param_spec_boxed ("filter-caps", "Filter caps",
"Filter sink candidates using these caps.", GST_TYPE_CAPS,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gst_element_class_add_pad_template (eklass,
+ gst_static_pad_template_get (&sink_template));
+ gst_element_class_set_details_simple (eklass, "Auto video sink",
+ "Sink/Video",
+ "Wrapper video sink for automatically detected video sink",
+ "Jan Schmidt <thaytan@noraisin.net>");
}
static void
gst_object_unref (targetpad);
}
-static GstStaticCaps raw_caps =
- GST_STATIC_CAPS ("video/x-raw-yuv; video/x-raw-rgb");
+static GstStaticCaps raw_caps = GST_STATIC_CAPS ("video/x-raw");
static void
-gst_auto_video_sink_init (GstAutoVideoSink * sink,
- GstAutoVideoSinkClass * g_class)
+gst_auto_video_sink_init (GstAutoVideoSink * sink)
{
sink->pad = gst_ghost_pad_new_no_target ("sink", GST_PAD_SINK);
gst_element_add_pad (GST_ELEMENT (sink), sink->pad);
GstElement *element;
gchar *name, *marker;
- marker = g_strdup (GST_PLUGIN_FEATURE (factory)->name);
+ marker = g_strdup (GST_OBJECT_NAME (factory));
if (g_str_has_suffix (marker, "sink"))
marker[strlen (marker) - 4] = '\0';
if (g_str_has_prefix (marker, "gst"))
if ((el = gst_auto_video_sink_create_element_with_pretty_name (sink, f))) {
GstStateChangeReturn ret;
- GST_DEBUG_OBJECT (sink, "Testing %s", GST_PLUGIN_FEATURE (f)->name);
+ GST_DEBUG_OBJECT (sink, "Testing %s", GST_OBJECT_NAME (f));
/* If autovideosink has been provided with filter caps,
* accept only sinks that match with the filter caps */
if (sink->filter_caps) {
el_pad = gst_element_get_static_pad (GST_ELEMENT (el), "sink");
- el_caps = gst_pad_get_caps (el_pad);
+ el_caps = gst_pad_get_caps (el_pad, NULL);
gst_object_unref (el_pad);
GST_DEBUG_OBJECT (sink,
"Checking caps: %" GST_PTR_FORMAT " vs. %" GST_PTR_FORMAT,
static void gst_auto_video_src_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
-GST_BOILERPLATE (GstAutoVideoSrc, gst_auto_video_src, GstBin, GST_TYPE_BIN);
+#define gst_auto_video_src_parent_class parent_class
+G_DEFINE_TYPE (GstAutoVideoSrc, gst_auto_video_src, GST_TYPE_BIN);
static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_STATIC_CAPS_ANY);
static void
-gst_auto_video_src_base_init (gpointer klass)
-{
- GstElementClass *eklass = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (eklass,
- gst_static_pad_template_get (&src_template));
- gst_element_class_set_details_simple (eklass, "Auto video source",
- "Source/Video",
- "Wrapper video source for automatically detected video source",
- "Jan Schmidt <thaytan@noraisin.net>, "
- "Stefan Kost <ensonic@users.sf.net>");
-}
-
-static void
gst_auto_video_src_class_init (GstAutoVideoSrcClass * klass)
{
GObjectClass *gobject_class;
g_param_spec_boxed ("filter-caps", "Filter caps",
"Filter src candidates using these caps.", GST_TYPE_CAPS,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gst_element_class_add_pad_template (eklass,
+ gst_static_pad_template_get (&src_template));
+ gst_element_class_set_details_simple (eklass, "Auto video source",
+ "Source/Video",
+ "Wrapper video source for automatically detected video source",
+ "Jan Schmidt <thaytan@noraisin.net>, "
+ "Stefan Kost <ensonic@users.sf.net>");
}
static void
gst_object_unref (targetpad);
}
-static GstStaticCaps raw_caps =
- GST_STATIC_CAPS ("video/x-raw-yuv; video/x-raw-rgb");
+static GstStaticCaps raw_caps = GST_STATIC_CAPS ("video/x-raw");
static void
-gst_auto_video_src_init (GstAutoVideoSrc * src, GstAutoVideoSrcClass * g_class)
+gst_auto_video_src_init (GstAutoVideoSrc * src)
{
src->pad = gst_ghost_pad_new_no_target ("src", GST_PAD_SRC);
gst_element_add_pad (GST_ELEMENT (src), src->pad);
GstElement *element;
gchar *name, *marker;
- marker = g_strdup (GST_PLUGIN_FEATURE (factory)->name);
+ marker = g_strdup (GST_OBJECT_NAME (factory));
if (g_str_has_suffix (marker, "src"))
marker[strlen (marker) - 4] = '\0';
if (g_str_has_prefix (marker, "gst"))
if ((el = gst_auto_video_src_create_element_with_pretty_name (src, f))) {
GstStateChangeReturn ret;
- GST_DEBUG_OBJECT (src, "Testing %s", GST_PLUGIN_FEATURE (f)->name);
+ GST_DEBUG_OBJECT (src, "Testing %s", GST_OBJECT_NAME (f));
/* If AutoVideoSrc has been provided with filter caps,
* accept only sources that match with the filter caps */
if (src->filter_caps) {
el_pad = gst_element_get_static_pad (GST_ELEMENT (el), "src");
- el_caps = gst_pad_get_caps (el_pad);
+ el_caps = gst_pad_get_caps (el_pad, NULL);
gst_object_unref (el_pad);
GST_DEBUG_OBJECT (src,
"Checking caps: %" GST_PTR_FORMAT " vs. %" GST_PTR_FORMAT,
$(GST_LIBS) \
-lgstriff-@GST_MAJORMINOR@ \
-lgstaudio-@GST_MAJORMINOR@ \
- -lgsttag-@GST_MAJORMINOR@
+ -lgsttag-@GST_MAJORMINOR@ \
+ -lgstvideo-@GST_MAJORMINOR@
libgstavi_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
libgstavi_la_LIBTOOLFLAGS = --tag=disable-static
GST_STATIC_CAPS ("video/x-msvideo")
);
-static void gst_avi_demux_base_init (GstAviDemuxClass * klass);
-static void gst_avi_demux_class_init (GstAviDemuxClass * klass);
-static void gst_avi_demux_init (GstAviDemux * avi);
static void gst_avi_demux_finalize (GObject * object);
static void gst_avi_demux_reset (GstAviDemux * avi);
static void gst_avi_demux_parse_idit (GstAviDemux * avi, GstBuffer * buf);
-static GstElementClass *parent_class = NULL;
-
/* GObject methods */
-GType
-gst_avi_demux_get_type (void)
-{
- static GType avi_demux_type = 0;
-
- if (!avi_demux_type) {
- static const GTypeInfo avi_demux_info = {
- sizeof (GstAviDemuxClass),
- (GBaseInitFunc) gst_avi_demux_base_init,
- NULL,
- (GClassInitFunc) gst_avi_demux_class_init,
- NULL,
- NULL,
- sizeof (GstAviDemux),
- 0,
- (GInstanceInitFunc) gst_avi_demux_init,
- };
-
- avi_demux_type =
- g_type_register_static (GST_TYPE_ELEMENT,
- "GstAviDemux", &avi_demux_info, 0);
- }
-
- return avi_demux_type;
-}
+#define gst_avi_demux_parent_class parent_class
+G_DEFINE_TYPE (GstAviDemux, gst_avi_demux, GST_TYPE_ELEMENT);
static void
-gst_avi_demux_base_init (GstAviDemuxClass * klass)
+gst_avi_demux_class_init (GstAviDemuxClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
+ GObjectClass *gobject_class = (GObjectClass *) klass;
GstPadTemplate *videosrctempl, *audiosrctempl, *subsrctempl;
GstCaps *audcaps, *vidcaps, *subcaps;
+ GST_DEBUG_CATEGORY_INIT (avidemux_debug, "avidemux",
+ 0, "Demuxer for AVI streams");
+
+ gobject_class->finalize = gst_avi_demux_finalize;
+
+ gstelement_class->change_state =
+ GST_DEBUG_FUNCPTR (gst_avi_demux_change_state);
+ gstelement_class->set_index = GST_DEBUG_FUNCPTR (gst_avi_demux_set_index);
+ gstelement_class->get_index = GST_DEBUG_FUNCPTR (gst_avi_demux_get_index);
+
audcaps = gst_riff_create_audio_template_caps ();
gst_caps_append (audcaps, gst_caps_new_simple ("audio/x-avi-unknown", NULL));
audiosrctempl = gst_pad_template_new ("audio_%02d",
subcaps = gst_caps_new_simple ("application/x-subtitle-avi", NULL);
subsrctempl = gst_pad_template_new ("subtitle_%02d",
GST_PAD_SRC, GST_PAD_SOMETIMES, subcaps);
- gst_element_class_add_pad_template (element_class, audiosrctempl);
- gst_element_class_add_pad_template (element_class, videosrctempl);
- gst_element_class_add_pad_template (element_class, subsrctempl);
- gst_element_class_add_pad_template (element_class,
+ gst_element_class_add_pad_template (gstelement_class, audiosrctempl);
+ gst_element_class_add_pad_template (gstelement_class, videosrctempl);
+ gst_element_class_add_pad_template (gstelement_class, subsrctempl);
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&sink_templ));
- gst_element_class_set_details_simple (element_class, "Avi demuxer",
+
+ gst_element_class_set_details_simple (gstelement_class, "Avi demuxer",
"Codec/Demuxer",
"Demultiplex an avi file into audio and video",
"Erik Walthinsen <omega@cse.ogi.edu>, "
}
static void
-gst_avi_demux_class_init (GstAviDemuxClass * klass)
-{
- GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
- GObjectClass *gobject_class = (GObjectClass *) klass;
-
- GST_DEBUG_CATEGORY_INIT (avidemux_debug, "avidemux",
- 0, "Demuxer for AVI streams");
-
- parent_class = g_type_class_peek_parent (klass);
-
- gobject_class->finalize = gst_avi_demux_finalize;
- gstelement_class->change_state =
- GST_DEBUG_FUNCPTR (gst_avi_demux_change_state);
-
- gstelement_class->set_index = GST_DEBUG_FUNCPTR (gst_avi_demux_set_index);
- gstelement_class->get_index = GST_DEBUG_FUNCPTR (gst_avi_demux_get_index);
-}
-
-static void
gst_avi_demux_init (GstAviDemux * avi)
{
avi->sinkpad = gst_pad_new_from_static_template (&sink_templ, "sink");
gst_object_unref (avi->element_index);
avi->element_index = NULL;
- if (avi->close_seg_event) {
- gst_event_unref (avi->close_seg_event);
- avi->close_seg_event = NULL;
- }
if (avi->seg_event) {
gst_event_unref (avi->seg_event);
avi->seg_event = NULL;
"have event type %s: %p on sink pad", GST_EVENT_TYPE_NAME (event), event);
switch (GST_EVENT_TYPE (event)) {
- case GST_EVENT_NEWSEGMENT:
+ case GST_EVENT_SEGMENT:
{
- GstFormat format;
- gdouble rate, arate;
- gint64 start, stop, time, offset = 0;
- gboolean update;
+ gint64 boffset, offset = 0;
GstSegment segment;
/* some debug output */
- gst_segment_init (&segment, GST_FORMAT_UNDEFINED);
- gst_event_parse_new_segment_full (event, &update, &rate, &arate, &format,
- &start, &stop, &time);
- gst_segment_set_newsegment_full (&segment, update, rate, arate, format,
- start, stop, time);
- GST_DEBUG_OBJECT (avi,
- "received format %d newsegment %" GST_SEGMENT_FORMAT, format,
+ gst_event_copy_segment (event, &segment);
+ GST_DEBUG_OBJECT (avi, "received newsegment %" GST_SEGMENT_FORMAT,
&segment);
/* chain will send initial newsegment after pads have been added */
}
/* we only expect a BYTE segment, e.g. following a seek */
- if (format != GST_FORMAT_BYTES) {
+ if (segment.format != GST_FORMAT_BYTES) {
GST_DEBUG_OBJECT (avi, "unsupported segment format, ignoring");
goto exit;
}
GstAviStream *stream;
/* compensate chunk header, stored index offset points after header */
- start += 8;
+ boffset = segment.start + 8;
/* find which stream we're on */
do {
stream = &avi->stream[i];
entry = gst_util_array_binary_search (stream->index,
stream->idx_n, sizeof (GstAviIndexEntry),
(GCompareDataFunc) gst_avi_demux_index_entry_offset_search,
- GST_SEARCH_MODE_AFTER, &start, NULL);
+ GST_SEARCH_MODE_AFTER, &boffset, NULL);
if (entry == NULL)
continue;
k = i;
}
/* exact match needs no further searching */
- if (stream->index[index].offset == start)
+ if (stream->index[index].offset == segment.start)
break;
} while (++i < avi->num_streams);
- start -= 8;
+ boffset -= 8;
offset -= 8;
stream = &avi->stream[k];
/* get the ts corresponding to start offset bytes for the stream */
gst_avi_demux_get_buffer_info (avi, stream, index,
- (GstClockTime *) & time, NULL, NULL, NULL);
+ (GstClockTime *) & segment.time, NULL, NULL, NULL);
} else if (avi->element_index) {
GstIndexEntry *entry;
/* Let's check if we have an index entry for this position */
entry = gst_index_get_assoc_entry (avi->element_index, avi->index_id,
GST_INDEX_LOOKUP_AFTER, GST_ASSOCIATION_FLAG_NONE,
- GST_FORMAT_BYTES, start);
+ GST_FORMAT_BYTES, segment.start);
/* we can not go where we have not yet been before ... */
if (!entry) {
goto eos;
}
- gst_index_entry_assoc_map (entry, GST_FORMAT_TIME, &time);
+ gst_index_entry_assoc_map (entry, GST_FORMAT_TIME,
+ (gint64 *) & segment.time);
gst_index_entry_assoc_map (entry, GST_FORMAT_BYTES, &offset);
} else {
GST_WARNING_OBJECT (avi, "no index data, forcing EOS");
goto eos;
}
- stop = GST_CLOCK_TIME_NONE;
+ segment.format = GST_FORMAT_TIME;
+ segment.start = segment.time;
+ segment.stop = GST_CLOCK_TIME_NONE;
+ segment.position = segment.start;
+
+ /* rescue duration */
+ segment.duration = avi->segment.duration;
/* set up segment and send downstream */
- gst_segment_set_newsegment_full (&avi->segment, update, rate, arate,
- GST_FORMAT_TIME, time, stop, time);
- GST_DEBUG_OBJECT (avi, "Pushing newseg update %d, rate %g, "
- "applied rate %g, format %d, start %" G_GINT64_FORMAT ", "
- "stop %" G_GINT64_FORMAT, update, rate, arate, GST_FORMAT_TIME,
- time, stop);
- gst_avi_demux_push_event (avi,
- gst_event_new_new_segment_full (update, rate, arate, GST_FORMAT_TIME,
- time, stop, time));
-
- GST_DEBUG_OBJECT (avi, "next chunk expected at %" G_GINT64_FORMAT, start);
+ gst_segment_copy_into (&segment, &avi->segment);
+
+ GST_DEBUG_OBJECT (avi, "Pushing newseg %" GST_SEGMENT_FORMAT, &segment);
+ gst_avi_demux_push_event (avi, gst_event_new_segment (&segment));
+
+ GST_DEBUG_OBJECT (avi, "next chunk expected at %" G_GINT64_FORMAT,
+ boffset);
/* adjust state for streaming thread accordingly */
if (avi->have_index)
gst_avi_demux_seek_streams (avi, offset, FALSE);
/* set up streaming thread */
- g_assert (offset >= start);
- avi->offset = start;
- avi->todrop = offset - start;
+ g_assert (offset >= boffset);
+ avi->offset = boffset;
+ avi->todrop = offset - boffset;
exit:
gst_event_unref (event);
if (gst_adapter_available (avi->adapter) < 8)
return FALSE;
- data = gst_adapter_peek (avi->adapter, 8);
+ data = gst_adapter_map (avi->adapter, 8);
*tag = GST_READ_UINT32_LE (data);
*size = GST_READ_UINT32_LE (data + 4);
+ gst_adapter_unmap (avi->adapter, 0);
return TRUE;
}
GstBuffer * buf, gst_riff_avih ** _avih)
{
gst_riff_avih *avih;
+ gsize size;
if (buf == NULL)
goto no_buffer;
- if (GST_BUFFER_SIZE (buf) < sizeof (gst_riff_avih))
+ size = gst_buffer_get_size (buf);
+ if (size < sizeof (gst_riff_avih))
goto avih_too_small;
- avih = g_memdup (GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (buf));
+ avih = g_malloc (size);
+ gst_buffer_extract (buf, 0, avih, size);
#if (G_BYTE_ORDER == G_BIG_ENDIAN)
avih->us_frame = GUINT32_FROM_LE (avih->us_frame);
{
GST_ELEMENT_ERROR (avi, STREAM, DEMUX, (NULL),
("Too small avih (%d available, %d needed)",
- GST_BUFFER_SIZE (buf), (int) sizeof (gst_riff_avih)));
+ size, (int) sizeof (gst_riff_avih)));
gst_buffer_unref (buf);
return FALSE;
}
guint16 bpe = 16;
guint32 num, i;
guint64 *indexes;
- guint size;
+ gsize size;
*_indexes = NULL;
- size = buf ? GST_BUFFER_SIZE (buf) : 0;
+ if (buf)
+ data = gst_buffer_map (buf, &size, NULL, GST_MAP_READ);
+ else
+ size = 0;
+
if (size < 24)
goto too_small;
- data = GST_BUFFER_DATA (buf);
-
/* check type of index. The opendml2 specs state that
* there should be 4 dwords per array entry. Type can be
* either frame or field (and we don't care). */
indexes[i] = GST_BUFFER_OFFSET_NONE;
*_indexes = indexes;
+ gst_buffer_unmap (buf, data, size);
gst_buffer_unref (buf);
return TRUE;
{
GST_ERROR_OBJECT (avi,
"Not enough data to parse superindex (%d available, 24 needed)", size);
- if (buf)
+ if (buf) {
+ gst_buffer_unmap (buf, data, size);
gst_buffer_unref (buf);
+ }
return FALSE;
}
invalid_params:
{
GST_ERROR_OBJECT (avi, "invalid index parameters (num = %d, bpe = %d)",
num, bpe);
- if (buf)
- gst_buffer_unref (buf);
+ gst_buffer_unmap (buf, data, size);
+ gst_buffer_unref (buf);
return FALSE;
}
}
guint16 bpe;
guint32 num, i;
guint64 baseoff;
- guint size;
+ gsize size;
- if (!buf)
+ if (buf == NULL)
return TRUE;
- size = GST_BUFFER_SIZE (buf);
-
+ data = gst_buffer_map (buf, &size, NULL, GST_MAP_READ);
/* check size */
if (size < 24)
goto too_small;
- data = GST_BUFFER_DATA (buf);
-
/* We don't support index-data yet */
if (data[3] & 0x80)
goto not_implemented;
if (G_UNLIKELY (!gst_avi_demux_add_index (avi, stream, num, &entry)))
goto out_of_mem;
}
+done:
+ gst_buffer_unmap (buf, data, size);
gst_buffer_unref (buf);
return TRUE;
{
GST_ERROR_OBJECT (avi,
"Not enough data to parse subindex (%d available, 24 needed)", size);
- gst_buffer_unref (buf);
- return TRUE; /* continue */
+ goto done; /* continue */
}
not_implemented:
{
GST_ELEMENT_ERROR (avi, STREAM, NOT_IMPLEMENTED, (NULL),
("Subindex-is-data is not implemented"));
+ gst_buffer_unmap (buf, data, size);
gst_buffer_unref (buf);
return FALSE;
}
empty_index:
{
GST_DEBUG_OBJECT (avi, "the index is empty");
- gst_buffer_unref (buf);
- return TRUE;
+ goto done; /* continue */
}
out_of_mem:
{
("Cannot allocate memory for %u*%u=%u bytes",
(guint) sizeof (GstAviIndexEntry), num,
(guint) sizeof (GstAviIndexEntry) * num));
+ gst_buffer_unmap (buf, data, size);
gst_buffer_unref (buf);
return FALSE;
}
{
gst_riff_vprp *vprp;
gint k;
+ gsize size;
g_return_val_if_fail (buf != NULL, FALSE);
g_return_val_if_fail (_vprp != NULL, FALSE);
- if (GST_BUFFER_SIZE (buf) < G_STRUCT_OFFSET (gst_riff_vprp, field_info))
+ size = gst_buffer_get_size (buf);
+
+ if (size < G_STRUCT_OFFSET (gst_riff_vprp, field_info))
goto too_small;
- vprp = g_memdup (GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (buf));
+ vprp = g_malloc (size);
+ gst_buffer_extract (buf, 0, vprp, size);
#if (G_BYTE_ORDER == G_BIG_ENDIAN)
vprp->format_token = GUINT32_FROM_LE (vprp->format_token);
/* size checking */
/* calculate fields based on size */
- k = (GST_BUFFER_SIZE (buf) - G_STRUCT_OFFSET (gst_riff_vprp, field_info)) /
- vprp->fields;
+ k = (size - G_STRUCT_OFFSET (gst_riff_vprp, field_info)) / vprp->fields;
if (vprp->fields > k) {
GST_WARNING_OBJECT (element,
"vprp header indicated %d fields, only %d available", vprp->fields, k);
{
GST_ERROR_OBJECT (element,
"Too small vprp (%d available, at least %d needed)",
- GST_BUFFER_SIZE (buf),
- (int) G_STRUCT_OFFSET (gst_riff_vprp, field_info));
+ size, (int) G_STRUCT_OFFSET (gst_riff_vprp, field_info));
gst_buffer_unref (buf);
return FALSE;
}
GstAviStream *stream = &avi->stream[i];
if (force || stream->idx_n != 0) {
- GST_LOG_OBJECT (avi, "Added pad %s with caps %" GST_PTR_FORMAT,
- GST_PAD_NAME (stream->pad), GST_PAD_CAPS (stream->pad));
+ GST_LOG_OBJECT (avi, "Adding pad %s" GST_PTR_FORMAT,
+ GST_PAD_NAME (stream->pad));
gst_element_add_pad ((GstElement *) avi, stream->pad);
if (avi->element_index)
static inline void
gst_avi_demux_roundup_list (GstAviDemux * avi, GstBuffer ** buf)
{
- gint size = GST_BUFFER_SIZE (*buf);
+ gsize size;
+
+ size = gst_buffer_get_size (*buf);
if (G_UNLIKELY (size & 1)) {
GstBuffer *obuf;
+ guint8 *data;
GST_DEBUG_OBJECT (avi, "rounding up dubious list size %d", size);
obuf = gst_buffer_new_and_alloc (size + 1);
- memcpy (GST_BUFFER_DATA (obuf), GST_BUFFER_DATA (*buf), size);
+
+ data = gst_buffer_map (obuf, NULL, NULL, GST_MAP_WRITE);
+ gst_buffer_extract (*buf, 0, data, size);
/* assume 0 padding, at least makes outcome deterministic */
- (GST_BUFFER_DATA (obuf))[size] = 0;
+ data[size] = 0;
+ gst_buffer_unmap (obuf, data, size + 1);
gst_buffer_replace (buf, obuf);
}
}
case GST_RIFF_TAG_strn:
g_free (stream->name);
if (sub != NULL) {
- stream->name =
- g_strndup ((gchar *) GST_BUFFER_DATA (sub),
- (gsize) GST_BUFFER_SIZE (sub));
+ gchar *bdata;
+ gsize bsize;
+
+ bdata = gst_buffer_map (sub, &bsize, NULL, GST_MAP_READ);
+ stream->name = g_strndup (bdata, bsize);
+ gst_buffer_unmap (sub, bdata, bsize);
gst_buffer_unref (sub);
sub = NULL;
} else {
gst_pad_set_element_private (pad, stream);
avi->num_streams++;
- gst_pad_set_caps (pad, caps);
gst_pad_set_active (pad, TRUE);
+ gst_pad_push_event (pad, gst_event_new_caps (caps));
gst_caps_unref (caps);
/* make tags */
switch (tag) {
case GST_RIFF_TAG_dmlh:{
gst_riff_dmlh dmlh, *_dmlh;
- guint size;
+ gsize size;
+ guint8 *data;
/* sub == NULL is possible and means an empty buffer */
- size = sub ? GST_BUFFER_SIZE (sub) : 0;
+ if (sub == NULL)
+ goto next;
+
+ data = gst_buffer_map (sub, &size, NULL, GST_MAP_READ);
/* check size */
if (size < sizeof (gst_riff_dmlh)) {
GST_ERROR_OBJECT (avi,
"DMLH entry is too small (%d bytes, %d needed)",
size, (int) sizeof (gst_riff_dmlh));
+ gst_buffer_unmap (sub, data, size);
goto next;
}
- _dmlh = (gst_riff_dmlh *) GST_BUFFER_DATA (sub);
+ _dmlh = (gst_riff_dmlh *) data;
dmlh.totalframes = GST_READ_UINT32_LE (&_dmlh->totalframes);
+ gst_buffer_unmap (sub, data, size);
GST_INFO_OBJECT (avi, "dmlh tag found: totalframes: %u",
dmlh.totalframes);
gst_avi_demux_parse_index (GstAviDemux * avi, GstBuffer * buf)
{
guint8 *data;
- guint size;
+ gsize size;
guint i, num, n;
gst_riff_index_entry *index;
GstClockTime stamp;
if (!buf)
return FALSE;
- data = GST_BUFFER_DATA (buf);
- size = GST_BUFFER_SIZE (buf);
+ data = gst_buffer_map (buf, &size, NULL, GST_MAP_READ);
stamp = gst_util_get_timestamp ();
n++;
}
+ gst_buffer_unmap (buf, data, size);
gst_buffer_unref (buf);
/* get stream stats now */
empty_list:
{
GST_DEBUG_OBJECT (avi, "empty index");
+ gst_buffer_unmap (buf, data, size);
gst_buffer_unref (buf);
return FALSE;
}
("Cannot allocate memory for %u*%u=%u bytes",
(guint) sizeof (GstAviIndexEntry), num,
(guint) sizeof (GstAviIndexEntry) * num));
+ gst_buffer_unmap (buf, data, size);
gst_buffer_unref (buf);
return FALSE;
}
GstBuffer *buf;
guint32 tag;
guint32 size;
+ gsize bsize;
+ guint8 *bdata;
GST_DEBUG ("demux stream index at offset %" G_GUINT64_FORMAT, offset);
res = gst_pad_pull_range (avi->sinkpad, offset, 8, &buf);
if (res != GST_FLOW_OK)
goto pull_failed;
- else if (GST_BUFFER_SIZE (buf) < 8)
+
+ bdata = gst_buffer_map (buf, &bsize, NULL, GST_MAP_READ);
+ if (bsize < 8)
goto too_small;
/* check tag first before blindy trying to read 'size' bytes */
- tag = GST_READ_UINT32_LE (GST_BUFFER_DATA (buf));
- size = GST_READ_UINT32_LE (GST_BUFFER_DATA (buf) + 4);
+ tag = GST_READ_UINT32_LE (bdata);
+ size = GST_READ_UINT32_LE (bdata + 4);
if (tag == GST_RIFF_TAG_LIST) {
/* this is the movi tag */
GST_DEBUG_OBJECT (avi, "skip LIST chunk, size %" G_GUINT32_FORMAT,
(8 + GST_ROUND_UP_2 (size)));
offset += 8 + GST_ROUND_UP_2 (size);
+ gst_buffer_unmap (buf, bdata, bsize);
gst_buffer_unref (buf);
+
res = gst_pad_pull_range (avi->sinkpad, offset, 8, &buf);
if (res != GST_FLOW_OK)
goto pull_failed;
- else if (GST_BUFFER_SIZE (buf) < 8)
+
+ bdata = gst_buffer_map (buf, &bsize, NULL, GST_MAP_READ);
+ if (bsize < 8)
goto too_small;
- tag = GST_READ_UINT32_LE (GST_BUFFER_DATA (buf));
- size = GST_READ_UINT32_LE (GST_BUFFER_DATA (buf) + 4);
+
+ tag = GST_READ_UINT32_LE (bdata);
+ size = GST_READ_UINT32_LE (bdata + 4);
}
+ gst_buffer_unmap (buf, bdata, bsize);
+ gst_buffer_unref (buf);
if (tag != GST_RIFF_TAG_idx1)
goto no_index;
if (!size)
goto zero_index;
- gst_buffer_unref (buf);
-
GST_DEBUG ("index found at offset %" G_GUINT64_FORMAT, offset);
/* read chunk, advance offset */
return;
GST_DEBUG ("will parse index chunk size %u for tag %"
- GST_FOURCC_FORMAT, GST_BUFFER_SIZE (buf), GST_FOURCC_ARGS (tag));
+ GST_FOURCC_FORMAT, gst_buffer_get_size (buf), GST_FOURCC_ARGS (tag));
gst_avi_demux_parse_index (avi, buf);
too_small:
{
GST_DEBUG_OBJECT (avi, "Buffer is too small");
+ gst_buffer_unmap (buf, bdata, bsize);
gst_buffer_unref (buf);
return;
}
GST_WARNING_OBJECT (avi,
"No index data (idx1) after movi chunk, but %" GST_FOURCC_FORMAT,
GST_FOURCC_ARGS (tag));
- gst_buffer_unref (buf);
return;
}
zero_index:
{
GST_WARNING_OBJECT (avi, "Empty index data (idx1) after movi chunk");
- gst_buffer_unref (buf);
return;
}
}
offset += 8 + GST_ROUND_UP_2 (size);
GST_DEBUG ("will parse index chunk size %u for tag %"
- GST_FOURCC_FORMAT, GST_BUFFER_SIZE (buf), GST_FOURCC_ARGS (tag));
+ GST_FOURCC_FORMAT, gst_buffer_get_size (buf), GST_FOURCC_ARGS (tag));
avi->offset = avi->first_movi_offset;
gst_avi_demux_parse_index (avi, buf);
{
GstFlowReturn res = GST_FLOW_OK;
GstBuffer *buf = NULL;
- guint bufsize;
+ gsize bufsize;
guint8 *bufdata;
res = gst_pad_pull_range (avi->sinkpad, offset, 8, &buf);
if (res != GST_FLOW_OK)
goto pull_failed;
- bufsize = GST_BUFFER_SIZE (buf);
+ bufdata = gst_buffer_map (buf, &bufsize, NULL, GST_MAP_READ);
if (bufsize != 8)
goto wrong_size;
- bufdata = GST_BUFFER_DATA (buf);
-
*tag = GST_READ_UINT32_LE (bufdata);
*size = GST_READ_UINT32_LE (bufdata + 4);
*size, offset + 8, offset + 8 + (gint64) * size);
done:
+ gst_buffer_unmap (buf, bufdata, bufsize);
gst_buffer_unref (buf);
return res;
GST_INFO ("Setting total duration to: %" GST_TIME_FORMAT,
GST_TIME_ARGS (total));
- gst_segment_set_duration (&avi->segment, GST_FORMAT_TIME, total);
+ avi->segment.duration = total;
}
/* returns FALSE if there are no pads to deliver event to,
const guint8 *data;
GstBuffer *buf = NULL, *sub = NULL;
guint offset = 4;
- gint64 stop;
gint i;
GstTagList *tags = NULL;
+ guint8 fourcc[4];
GST_DEBUG ("Reading and parsing avi headers: %d", avi->header_state);
GST_DEBUG ("Reading %d bytes", size);
buf = gst_adapter_take_buffer (avi->adapter, size);
- if (GST_READ_UINT32_LE (GST_BUFFER_DATA (buf)) != GST_RIFF_LIST_hdrl)
+ gst_buffer_extract (buf, 0, fourcc, 4);
+
+ if (GST_READ_UINT32_LE (fourcc) != GST_RIFF_LIST_hdrl)
goto header_no_hdrl;
/* mind padding */
switch (tag) {
case GST_RIFF_TAG_LIST:
- if (GST_BUFFER_SIZE (sub) < 4)
+ if (gst_buffer_get_size (sub) < 4)
goto next;
- switch (GST_READ_UINT32_LE (GST_BUFFER_DATA (sub))) {
+ gst_buffer_extract (sub, 0, fourcc, 4);
+
+ switch (GST_READ_UINT32_LE (fourcc)) {
case GST_RIFF_LIST_strl:
if (!(gst_avi_demux_parse_stream (avi, sub))) {
sub = NULL;
default:
GST_WARNING_OBJECT (avi,
"Unknown list %" GST_FOURCC_FORMAT " in AVI header",
- GST_FOURCC_ARGS (GST_READ_UINT32_LE (GST_BUFFER_DATA
- (sub))));
+ GST_FOURCC_ARGS (GST_READ_UINT32_LE (fourcc)));
/* fall-through */
case GST_RIFF_TAG_JUNQ:
case GST_RIFF_TAG_JUNK:
goto next;
+ break;
}
- break;
case GST_RIFF_IDIT:
gst_avi_demux_parse_idit (avi, sub);
goto next;
if (gst_adapter_available (avi->adapter) < 12)
return GST_FLOW_OK;
- data = gst_adapter_peek (avi->adapter, 12);
+ data = gst_adapter_map (avi->adapter, 12);
tag = GST_READ_UINT32_LE (data);
size = GST_READ_UINT32_LE (data + 4);
ltag = GST_READ_UINT32_LE (data + 8);
+ gst_adapter_unmap (avi->adapter, 0);
if (tag == GST_RIFF_TAG_LIST) {
switch (ltag) {
avi->stream[i].current_entry = 0;
/* create initial NEWSEGMENT event */
- if ((stop = avi->segment.stop) == GST_CLOCK_TIME_NONE)
- stop = avi->segment.duration;
-
- GST_DEBUG_OBJECT (avi, "segment stop %" G_GINT64_FORMAT, stop);
-
if (avi->seg_event)
gst_event_unref (avi->seg_event);
- avi->seg_event = gst_event_new_new_segment_full
- (FALSE, avi->segment.rate, avi->segment.applied_rate, GST_FORMAT_TIME,
- avi->segment.start, stop, avi->segment.time);
+ avi->seg_event = gst_event_new_segment (&avi->segment);
gst_avi_demux_check_seekability (avi);
static void
gst_avi_demux_parse_idit (GstAviDemux * avi, GstBuffer * buf)
{
- gchar *data = (gchar *) GST_BUFFER_DATA (buf);
- guint size = GST_BUFFER_SIZE (buf);
+ gchar *data, *ptr;
+ gsize size, left;
gchar *safedata = NULL;
+ data = gst_buffer_map (buf, &size, NULL, GST_MAP_READ);
/*
* According to:
* http://www.eden-foundation.org/products/code/film_date_stamp/index.html
*/
/* skip eventual initial whitespace */
- while (size > 0 && g_ascii_isspace (data[0])) {
- data++;
- size--;
+ ptr = data;
+ left = size;
+
+ while (left > 0 && g_ascii_isspace (ptr[0])) {
+ ptr++;
+ left--;
}
- if (size == 0) {
+ if (left == 0) {
goto non_parsable;
}
/* make a safe copy to add a \0 to the end of the string */
- safedata = g_strndup (data, size);
+ safedata = g_strndup (ptr, left);
/* test if the first char is a alpha or a number */
- if (g_ascii_isdigit (data[0])) {
+ if (g_ascii_isdigit (ptr[0])) {
gst_avi_demux_parse_idit_nums_only (avi, safedata);
g_free (safedata);
return;
- } else if (g_ascii_isalpha (data[0])) {
+ } else if (g_ascii_isalpha (ptr[0])) {
gst_avi_demux_parse_idit_text (avi, safedata);
g_free (safedata);
return;
non_parsable:
GST_WARNING_OBJECT (avi, "IDIT tag has no parsable info");
+ gst_buffer_unmap (buf, data, size);
}
/*
GstBuffer *buf, *sub = NULL;
guint32 tag;
guint offset = 4;
- gint64 stop;
GstElement *element = GST_ELEMENT_CAST (avi);
GstClockTime stamp;
GstTagList *tags = NULL;
+ guint8 fourcc[4];
stamp = gst_util_get_timestamp ();
goto pull_range_failed;
else if (tag != GST_RIFF_TAG_LIST)
goto no_list;
- else if (GST_BUFFER_SIZE (buf) < 4)
+ else if (gst_buffer_get_size (buf) < 4)
goto no_header;
GST_DEBUG_OBJECT (avi, "parsing headers");
/* Find the 'hdrl' LIST tag */
- while (GST_READ_UINT32_LE (GST_BUFFER_DATA (buf)) != GST_RIFF_LIST_hdrl) {
+ gst_buffer_extract (buf, 0, fourcc, 4);
+ while (GST_READ_UINT32_LE (fourcc) != GST_RIFF_LIST_hdrl) {
GST_LOG_OBJECT (avi, "buffer contains %" GST_FOURCC_FORMAT,
- GST_FOURCC_ARGS (GST_READ_UINT32_LE (GST_BUFFER_DATA (buf))));
+ GST_FOURCC_ARGS (GST_READ_UINT32_LE (fourcc)));
/* Eat up */
gst_buffer_unref (buf);
goto pull_range_failed;
else if (tag != GST_RIFF_TAG_LIST)
goto no_list;
- else if (GST_BUFFER_SIZE (buf) < 4)
+ else if (gst_buffer_get_size (buf) < 4)
goto no_header;
+ gst_buffer_extract (buf, 0, fourcc, 4);
}
GST_DEBUG_OBJECT (avi, "hdrl LIST tag found");
/* now, read the elements from the header until the end */
while (gst_riff_parse_chunk (element, buf, &offset, &tag, &sub)) {
+ gsize size;
+ guint8 *data;
+
/* sub can be NULL on empty tags */
if (!sub)
continue;
+ data = gst_buffer_map (sub, &size, NULL, GST_MAP_READ);
+
switch (tag) {
case GST_RIFF_TAG_LIST:
- {
- guint8 *data;
- guint32 fourcc;
-
- if (GST_BUFFER_SIZE (sub) < 4)
+ if (size < 4)
goto next;
- data = GST_BUFFER_DATA (sub);
- fourcc = GST_READ_UINT32_LE (data);
-
- switch (fourcc) {
+ switch (GST_READ_UINT32_LE (data)) {
case GST_RIFF_LIST_strl:
if (!(gst_avi_demux_parse_stream (avi, sub))) {
GST_ELEMENT_WARNING (avi, STREAM, DEMUX, (NULL),
sub = NULL;
break;
case GST_RIFF_LIST_INFO:
- GST_BUFFER_DATA (sub) = data + 4;
- GST_BUFFER_SIZE (sub) -= 4;
+ gst_buffer_resize (sub, 4, -1);
gst_riff_parse_info (element, sub, &tags);
if (tags) {
if (avi->globaltags) {
default:
GST_WARNING_OBJECT (avi,
"Unknown list %" GST_FOURCC_FORMAT " in AVI header",
- GST_FOURCC_ARGS (fourcc));
- GST_MEMDUMP_OBJECT (avi, "Unknown list", GST_BUFFER_DATA (sub),
- GST_BUFFER_SIZE (sub));
+ GST_FOURCC_ARGS (GST_READ_UINT32_LE (data)));
+ GST_MEMDUMP_OBJECT (avi, "Unknown list", data, size);
/* fall-through */
case GST_RIFF_TAG_JUNQ:
case GST_RIFF_TAG_JUNK:
goto next;
}
break;
- }
case GST_RIFF_IDIT:
gst_avi_demux_parse_idit (avi, sub);
goto next;
GST_WARNING_OBJECT (avi,
"Unknown tag %" GST_FOURCC_FORMAT " in AVI header at off %d",
GST_FOURCC_ARGS (tag), offset);
- GST_MEMDUMP_OBJECT (avi, "Unknown tag", GST_BUFFER_DATA (sub),
- GST_BUFFER_SIZE (sub));
+ GST_MEMDUMP_OBJECT (avi, "Unknown tag", data, size);
/* fall-through */
case GST_RIFF_TAG_JUNQ:
case GST_RIFF_TAG_JUNK:
next:
- if (sub)
+ if (sub) {
+ gst_buffer_unmap (sub, data, size);
gst_buffer_unref (sub);
+ }
sub = NULL;
break;
}
/* Now, find the data (i.e. skip all junk between header and data) */
do {
guint size;
+ gsize bsize;
guint8 *data;
guint32 tag, ltag;
if (res != GST_FLOW_OK) {
GST_DEBUG_OBJECT (avi, "pull_range failure while looking for tags");
goto pull_range_failed;
- } else if (GST_BUFFER_SIZE (buf) < 12) {
+ } else if (gst_buffer_get_size (buf) < 12) {
GST_DEBUG_OBJECT (avi, "got %d bytes which is less than 12 bytes",
- GST_BUFFER_SIZE (buf));
+ gst_buffer_get_size (buf));
gst_buffer_unref (buf);
return GST_FLOW_ERROR;
}
- data = GST_BUFFER_DATA (buf);
-
+ data = gst_buffer_map (buf, &bsize, NULL, GST_MAP_READ);
tag = GST_READ_UINT32_LE (data);
size = GST_READ_UINT32_LE (data + 4);
ltag = GST_READ_UINT32_LE (data + 8);
GST_DEBUG ("tag %" GST_FOURCC_FORMAT ", size %u",
GST_FOURCC_ARGS (tag), size);
- GST_MEMDUMP ("Tag content", data, GST_BUFFER_SIZE (buf));
+ GST_MEMDUMP ("Tag content", data, bsize);
+ gst_buffer_unmap (buf, data, bsize);
gst_buffer_unref (buf);
switch (tag) {
GST_DEBUG_OBJECT (avi, "couldn't read INFO chunk");
goto pull_range_failed;
}
- GST_DEBUG ("got size %u", GST_BUFFER_SIZE (buf));
+ GST_DEBUG ("got size %u", gst_buffer_get_size (buf));
if (size < 4) {
GST_DEBUG ("skipping INFO LIST prefix");
avi->offset += (4 - GST_ROUND_UP_2 (size));
continue;
}
- sub = gst_buffer_create_sub (buf, 4, GST_BUFFER_SIZE (buf) - 4);
+ sub = gst_buffer_copy_region (buf, GST_BUFFER_COPY_ALL, 4, -1);
gst_riff_parse_info (element, sub, &tags);
if (tags) {
if (avi->globaltags) {
GST_DEBUG_OBJECT (avi, "couldn't read INFO chunk");
goto pull_range_failed;
}
- GST_MEMDUMP ("Junk", GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (buf));
+ data = gst_buffer_map (buf, &bsize, NULL, GST_MAP_READ);
+ GST_MEMDUMP ("Junk", data, bsize);
+ gst_buffer_unmap (buf, data, bsize);
gst_buffer_unref (buf);
}
avi->offset += 8 + GST_ROUND_UP_2 (size);
gst_avi_demux_expose_streams (avi, FALSE);
- /* create initial NEWSEGMENT event */
- if ((stop = avi->segment.stop) == GST_CLOCK_TIME_NONE)
- stop = avi->segment.duration;
-
- GST_DEBUG_OBJECT (avi, "segment stop %" G_GINT64_FORMAT, stop);
-
/* do initial seek to the default segment values */
gst_avi_demux_do_seek (avi, &avi->segment);
- /* prepare initial segment */
+ /* create initial NEWSEGMENT event */
if (avi->seg_event)
gst_event_unref (avi->seg_event);
- avi->seg_event = gst_event_new_new_segment_full
- (FALSE, avi->segment.rate, avi->segment.applied_rate, GST_FORMAT_TIME,
- avi->segment.start, stop, avi->segment.time);
+ avi->seg_event = gst_event_new_segment (&avi->segment);
stamp = gst_util_get_timestamp () - stamp;
GST_DEBUG_OBJECT (avi, "pulling header took %" GST_TIME_FORMAT,
guint i, index;
GstAviStream *stream;
- seek_time = segment->last_stop;
+ seek_time = segment->position;
keyframe = !!(segment->flags & GST_SEEK_FLAG_KEY_UNIT);
GST_DEBUG_OBJECT (avi, "seek to: %" GST_TIME_FORMAT
GST_TIME_ARGS (seek_time));
}
- /* the seek time is also the last_stop and stream time when going
+ /* the seek time is also the position and stream time when going
* forwards */
- segment->last_stop = seek_time;
+ segment->position = seek_time;
if (segment->rate > 0.0)
segment->time = seek_time;
if (event) {
GST_DEBUG_OBJECT (avi, "configuring seek");
- gst_segment_set_seek (&seeksegment, rate, format, flags,
+ gst_segment_do_seek (&seeksegment, rate, format, flags,
cur_type, cur, stop_type, stop, &update);
}
- /* do the seek, seeksegment.last_stop contains the new position, this
+ /* do the seek, seeksegment.position contains the new position, this
* actually never fails. */
gst_avi_demux_do_seek (avi, &seeksegment);
- gst_event_replace (&avi->close_seg_event, NULL);
if (flush) {
- GstEvent *fevent = gst_event_new_flush_stop ();
+ GstEvent *fevent = gst_event_new_flush_stop (TRUE);
GST_DEBUG_OBJECT (avi, "sending flush stop");
gst_avi_demux_push_event (avi, gst_event_ref (fevent));
gst_pad_push_event (avi->sinkpad, fevent);
- } else if (avi->segment_running) {
- /* we are running the current segment and doing a non-flushing seek,
- * close the segment first based on the last_stop. */
- GST_DEBUG_OBJECT (avi, "closing running segment %" G_GINT64_FORMAT
- " to %" G_GINT64_FORMAT, avi->segment.start, avi->segment.last_stop);
- avi->close_seg_event = gst_event_new_new_segment_full (TRUE,
- avi->segment.rate, avi->segment.applied_rate, avi->segment.format,
- avi->segment.start, avi->segment.last_stop, avi->segment.time);
}
/* now update the real segment info */
if (avi->segment.flags & GST_SEEK_FLAG_SEGMENT) {
gst_element_post_message (GST_ELEMENT_CAST (avi),
gst_message_new_segment_start (GST_OBJECT_CAST (avi),
- avi->segment.format, avi->segment.last_stop));
+ avi->segment.format, avi->segment.position));
}
- /* prepare for streaming again */
- if ((stop = avi->segment.stop) == GST_CLOCK_TIME_NONE)
- stop = avi->segment.duration;
-
/* queue the segment event for the streaming thread. */
if (avi->seg_event)
gst_event_unref (avi->seg_event);
- if (avi->segment.rate > 0.0) {
- /* forwards goes from last_stop to stop */
- avi->seg_event = gst_event_new_new_segment_full (FALSE,
- avi->segment.rate, avi->segment.applied_rate, avi->segment.format,
- avi->segment.last_stop, stop, avi->segment.time);
- } else {
- /* reverse goes from start to last_stop */
- avi->seg_event = gst_event_new_new_segment_full (FALSE,
- avi->segment.rate, avi->segment.applied_rate, avi->segment.format,
- avi->segment.start, avi->segment.last_stop, avi->segment.time);
- }
+ avi->seg_event = gst_event_new_segment (&avi->segment);
if (!avi->streaming) {
- avi->segment_running = TRUE;
gst_pad_start_task (avi->sinkpad, (GstTaskFunction) gst_avi_demux_loop,
avi->sinkpad);
}
/* let gst_segment handle any tricky stuff */
GST_DEBUG_OBJECT (avi, "configuring seek");
memcpy (&seeksegment, &avi->segment, sizeof (GstSegment));
- gst_segment_set_seek (&seeksegment, rate, format, flags,
+ gst_segment_do_seek (&seeksegment, rate, format, flags,
cur_type, cur, stop_type, stop, &update);
keyframe = !!(flags & GST_SEEK_FLAG_KEY_UNIT);
- cur = seeksegment.last_stop;
+ cur = seeksegment.position;
GST_DEBUG_OBJECT (avi,
"Seek requested: ts %" GST_TIME_FORMAT " stop %" GST_TIME_FORMAT
GstStructure *s;
gint y, w, h;
gint bpp, stride;
- guint8 *tmp = NULL;
+ guint8 *tmp = NULL, *data;
+ gsize size;
+ GstCaps *caps;
if (stream->strh->type != GST_RIFF_FCC_vids)
return buf;
return buf; /* Ignore non DIB buffers */
}
- s = gst_caps_get_structure (GST_PAD_CAPS (stream->pad), 0);
+ caps = gst_pad_get_current_caps (stream->pad);
+ s = gst_caps_get_structure (caps, 0);
+ gst_caps_unref (caps);
+
if (!gst_structure_get_int (s, "bpp", &bpp)) {
GST_WARNING ("Failed to retrieve depth from caps");
return buf;
stride = w * (bpp / 8);
buf = gst_buffer_make_writable (buf);
- if (GST_BUFFER_SIZE (buf) < (stride * h)) {
+
+ data = gst_buffer_map (buf, &size, NULL, GST_MAP_READWRITE);
+ if (size < (stride * h)) {
GST_WARNING ("Buffer is smaller than reported Width x Height x Depth");
+ gst_buffer_unmap (buf, data, size);
return buf;
}
tmp = g_malloc (stride);
for (y = 0; y < h / 2; y++) {
- swap_line (GST_BUFFER_DATA (buf) + stride * y,
- GST_BUFFER_DATA (buf) + stride * (h - 1 - y), tmp, stride);
+ swap_line (data + stride * y, data + stride * (h - 1 - y), tmp, stride);
}
g_free (tmp);
+ gst_buffer_unmap (buf, data, size);
+
return buf;
}
goto pull_failed;
/* check for short buffers, this is EOS as well */
- if (GST_BUFFER_SIZE (buf) < size)
+ if (gst_buffer_get_size (buf) < size)
goto short_buffer;
/* invert the picture if needed */
gst_avi_demux_add_assoc (avi, stream, timestamp, offset, keyframe);
- gst_buffer_set_caps (buf, GST_PAD_CAPS (stream->pad));
-
/* update current position in the segment */
- gst_segment_set_last_stop (&avi->segment, GST_FORMAT_TIME, timestamp);
+ avi->segment.position = timestamp;
GST_DEBUG_OBJECT (avi, "Pushing buffer of size %u, ts %"
GST_TIME_FORMAT ", dur %" GST_TIME_FORMAT ", off %" G_GUINT64_FORMAT
", off_end %" G_GUINT64_FORMAT,
- GST_BUFFER_SIZE (buf), GST_TIME_ARGS (timestamp),
+ gst_buffer_get_size (buf), GST_TIME_ARGS (timestamp),
GST_TIME_ARGS (duration), out_offset, out_offset_end);
ret = gst_pad_push (stream->pad, buf);
{
GST_WARNING_OBJECT (avi, "Short read at offset %" G_GUINT64_FORMAT
", only got %d/%" G_GUINT64_FORMAT " bytes (truncated file?)", offset,
- GST_BUFFER_SIZE (buf), size);
+ gst_buffer_get_size (buf), size);
gst_buffer_unref (buf);
ret = GST_FLOW_UNEXPECTED;
goto beach;
if (size) {
buf = gst_adapter_take_buffer (avi->adapter, GST_ROUND_UP_2 (size));
/* patch the size */
- GST_BUFFER_SIZE (buf) = size;
+ gst_buffer_resize (buf, 0, size);
} else {
buf = NULL;
}
stream->current_total += size;
/* update current position in the segment */
- gst_segment_set_last_stop (&avi->segment, GST_FORMAT_TIME, next_ts);
+ avi->segment.position = next_ts;
if (saw_desired_kf && buf) {
GstClockTime dur_ts = 0;
GST_BUFFER_OFFSET_END (buf) = GST_BUFFER_OFFSET_NONE;
}
- gst_buffer_set_caps (buf, GST_PAD_CAPS (stream->pad));
GST_DEBUG_OBJECT (avi,
"Pushing buffer with time=%" GST_TIME_FORMAT ", duration %"
GST_TIME_FORMAT ", offset %" G_GUINT64_FORMAT
avi->state = GST_AVI_DEMUX_MOVI;
break;
case GST_AVI_DEMUX_MOVI:
- if (G_UNLIKELY (avi->close_seg_event)) {
- gst_avi_demux_push_event (avi, avi->close_seg_event);
- avi->close_seg_event = NULL;
- }
if (G_UNLIKELY (avi->seg_event)) {
gst_avi_demux_push_event (avi, avi->seg_event);
avi->seg_event = NULL;
gboolean push_eos = FALSE;
GST_LOG_OBJECT (avi, "pausing task, reason %s", gst_flow_get_name (res));
- avi->segment_running = FALSE;
gst_pad_pause_task (avi->sinkpad);
-
if (res == GST_FLOW_UNEXPECTED) {
/* handle end-of-stream/segment */
+ /* so align our position with the end of it, if there is one
+ * this ensures a subsequent will arrive at correct base/acc time */
+ if (avi->segment.rate > 0.0 &&
+ GST_CLOCK_TIME_IS_VALID (avi->segment.stop))
+ avi->segment.position = avi->segment.stop;
+ else if (avi->segment.rate < 0.0)
+ avi->segment.position = avi->segment.start;
if (avi->segment.flags & GST_SEEK_FLAG_SEGMENT) {
gint64 stop;
avi->stream[i].discont = TRUE;
}
- GST_DEBUG ("Store %d bytes in adapter", GST_BUFFER_SIZE (buf));
+ GST_DEBUG ("Store %d bytes in adapter", gst_buffer_get_size (buf));
gst_adapter_push (avi->adapter, buf);
switch (avi->state) {
}
break;
case GST_AVI_DEMUX_MOVI:
- if (G_UNLIKELY (avi->close_seg_event)) {
- gst_avi_demux_push_event (avi, avi->close_seg_event);
- avi->close_seg_event = NULL;
- }
if (G_UNLIKELY (avi->seg_event)) {
gst_avi_demux_push_event (avi, avi->seg_event);
avi->seg_event = NULL;
static gboolean
gst_avi_demux_sink_activate (GstPad * sinkpad)
{
- if (gst_pad_check_pull_range (sinkpad)) {
- GST_DEBUG ("going to pull mode");
- return gst_pad_activate_pull (sinkpad, TRUE);
- } else {
- GST_DEBUG ("going to push (streaming) mode");
+ GstQuery *query;
+ gboolean pull_mode;
+
+ query = gst_query_new_scheduling ();
+
+ if (!gst_pad_peer_query (sinkpad, query)) {
+ gst_query_unref (query);
+ goto activate_push;
+ }
+
+ gst_query_parse_scheduling (query, &pull_mode, NULL, NULL, NULL, NULL, NULL);
+ gst_query_unref (query);
+
+ if (!pull_mode)
+ goto activate_push;
+
+ GST_DEBUG_OBJECT (sinkpad, "activating pull");
+ return gst_pad_activate_pull (sinkpad, TRUE);
+
+activate_push:
+ {
+ GST_DEBUG_OBJECT (sinkpad, "activating push");
return gst_pad_activate_push (sinkpad, TRUE);
}
}
GstAviDemux *avi = GST_AVI_DEMUX (GST_OBJECT_PARENT (sinkpad));
if (active) {
- avi->segment_running = TRUE;
avi->streaming = FALSE;
return gst_pad_start_task (sinkpad, (GstTaskFunction) gst_avi_demux_loop,
sinkpad);
} else {
- avi->segment_running = FALSE;
return gst_pad_stop_task (sinkpad);
}
}
/* segment in TIME */
GstSegment segment;
- gboolean segment_running;
/* pending tags/events */
GstEvent *seg_event;
- GstEvent *close_seg_event;
GstTagList *globaltags;
gboolean got_tags;
* <para>(write everything in one line, without the backslash characters)</para>
* |[
* gst-launch videotestsrc num-buffers=250 \
- * ! 'video/x-raw-yuv,format=(fourcc)I420,width=320,height=240,framerate=(fraction)25/1' \
+ * ! 'video/x-raw,format=(string)I420,width=320,height=240,framerate=(fraction)25/1' \
* ! queue ! mux. \
* audiotestsrc num-buffers=440 ! audioconvert \
* ! 'audio/x-raw-int,rate=44100,channels=2' ! queue ! mux. \
* test sound.
* |[
* gst-launch videotestsrc num-buffers=250 \
- * ! 'video/x-raw-yuv,format=(fourcc)I420,width=320,height=240,framerate=(fraction)25/1' \
+ * ! 'video/x-raw,format=(string)I420,width=320,height=240,framerate=(fraction)25/1' \
* ! xvidenc ! queue ! mux. \
* audiotestsrc num-buffers=440 ! audioconvert ! 'audio/x-raw-int,rate=44100,channels=2' \
* ! lame ! queue ! mux. \
GST_STATIC_PAD_TEMPLATE ("video_%d",
GST_PAD_SINK,
GST_PAD_REQUEST,
- GST_STATIC_CAPS ("video/x-raw-yuv, "
- "format = (fourcc) { YUY2, I420 }, "
+ GST_STATIC_CAPS ("video/x-raw, "
+ "format = (string) { YUY2, I420 }, "
"width = (int) [ 16, 4096 ], "
"height = (int) [ 16, 4096 ], "
"framerate = (fraction) [ 0, MAX ]; "
"wmaversion = (int) [ 1, 2 ] ")
);
-static void gst_avi_mux_base_init (gpointer g_class);
-static void gst_avi_mux_class_init (GstAviMuxClass * klass);
-static void gst_avi_mux_init (GstAviMux * avimux);
static void gst_avi_mux_pad_reset (GstAviPad * avipad, gboolean free);
static GstFlowReturn gst_avi_mux_collect_pads (GstCollectPads * pads,
GstAviMux * avimux);
static gboolean gst_avi_mux_handle_event (GstPad * pad, GstEvent * event);
static GstPad *gst_avi_mux_request_new_pad (GstElement * element,
- GstPadTemplate * templ, const gchar * name);
+ GstPadTemplate * templ, const gchar * name, const GstCaps * caps);
static void gst_avi_mux_release_pad (GstElement * element, GstPad * pad);
static void gst_avi_mux_set_property (GObject * object,
guint prop_id, const GValue * value, GParamSpec * pspec);
static GstStateChangeReturn gst_avi_mux_change_state (GstElement * element,
GstStateChange transition);
-static GstElementClass *parent_class = NULL;
-
-GType
-gst_avi_mux_get_type (void)
-{
- static GType avimux_type = 0;
-
- if (!avimux_type) {
- static const GTypeInfo avimux_info = {
- sizeof (GstAviMuxClass),
- gst_avi_mux_base_init,
- NULL,
- (GClassInitFunc) gst_avi_mux_class_init,
- NULL,
- NULL,
- sizeof (GstAviMux),
- 0,
- (GInstanceInitFunc) gst_avi_mux_init,
- };
- static const GInterfaceInfo tag_setter_info = {
- NULL,
- NULL,
- NULL
- };
-
- avimux_type =
- g_type_register_static (GST_TYPE_ELEMENT, "GstAviMux", &avimux_info, 0);
- g_type_add_interface_static (avimux_type, GST_TYPE_TAG_SETTER,
- &tag_setter_info);
- }
- return avimux_type;
-}
-
-static void
-gst_avi_mux_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&src_factory));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&audio_sink_factory));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&video_sink_factory));
-
- gst_element_class_set_details_simple (element_class, "Avi muxer",
- "Codec/Muxer",
- "Muxes audio and video into an avi stream",
- "GStreamer maintainers <gstreamer-devel@lists.sourceforge.net>");
-
- GST_DEBUG_CATEGORY_INIT (avimux_debug, "avimux", 0, "Muxer for AVI streams");
-}
+#define gst_avi_mux_parent_class parent_class
+G_DEFINE_TYPE_WITH_CODE (GstAviMux, gst_avi_mux, GST_TYPE_ELEMENT,
+ G_IMPLEMENT_INTERFACE (GST_TYPE_TAG_SETTER, NULL));
static void
gst_avi_mux_finalize (GObject * object)
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
- parent_class = g_type_class_peek_parent (klass);
+ GST_DEBUG_CATEGORY_INIT (avimux_debug, "avimux", 0, "Muxer for AVI streams");
gobject_class->get_property = gst_avi_mux_get_property;
gobject_class->set_property = gst_avi_mux_set_property;
GST_DEBUG_FUNCPTR (gst_avi_mux_request_new_pad);
gstelement_class->release_pad = GST_DEBUG_FUNCPTR (gst_avi_mux_release_pad);
gstelement_class->change_state = GST_DEBUG_FUNCPTR (gst_avi_mux_change_state);
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&src_factory));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&audio_sink_factory));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&video_sink_factory));
+
+ gst_element_class_set_details_simple (gstelement_class, "Avi muxer",
+ "Codec/Muxer",
+ "Muxes audio and video into an avi stream",
+ "GStreamer maintainers <gstreamer-devel@lists.sourceforge.net>");
}
/* reset pad to initial state
avipad->vprp.field_info[0].valid_bm_width = width;
}
- if (!strcmp (mimetype, "video/x-raw-yuv")) {
- guint32 format;
+ if (!strcmp (mimetype, "video/x-raw")) {
+ const gchar *format;
+ GstVideoFormat fmt;
+
+ format = gst_structure_get_string (structure, "format");
+ fmt = gst_video_format_from_string (format);
- gst_structure_get_fourcc (structure, "format", &format);
- avipad->vids.compression = format;
- switch (format) {
- case GST_MAKE_FOURCC ('Y', 'U', 'Y', '2'):
+ switch (fmt) {
+ case GST_VIDEO_FORMAT_YUY2:
+ avipad->vids.compression = GST_MAKE_FOURCC ('Y', 'U', 'Y', '2');
avipad->vids.bit_cnt = 16;
break;
- case GST_MAKE_FOURCC ('I', '4', '2', '0'):
+ case GST_VIDEO_FORMAT_I420:
+ avipad->vids.compression = GST_MAKE_FOURCC ('I', '4', '2', '0');
avipad->vids.bit_cnt = 12;
break;
+ default:
+ break;
}
} else {
avipad->vids.bit_cnt = 24;
avipad->vids_codec_data = gst_value_get_buffer (codec_data);
gst_buffer_ref (avipad->vids_codec_data);
/* keep global track of size */
- avimux->codec_data_size += GST_BUFFER_SIZE (avipad->vids_codec_data);
+ avimux->codec_data_size += gst_buffer_get_size (avipad->vids_codec_data);
} else {
avipad->prepend_buffer =
gst_buffer_ref (gst_value_get_buffer (codec_data));
GstBuffer * buffer)
{
guint8 *data;
- guint size;
+ gsize size;
guint spf;
guint32 header;
gulong layer;
gulong version;
gint lsf, mpg25;
- data = GST_BUFFER_DATA (buffer);
- size = GST_BUFFER_SIZE (buffer);
-
+ data = gst_buffer_map (buffer, &size, NULL, GST_MAP_READ);
if (size < 4)
goto not_parsed;
GST_WARNING_OBJECT (avimux, "input mpeg audio has varying frame size");
goto cbr_fallback;
}
+done:
+ gst_buffer_unmap (buffer, data, size);
return GST_FLOW_OK;
avipad->hdr.scale = 1;
/* no need to check further */
avipad->hook = NULL;
- return GST_FLOW_OK;
+ goto done;
}
}
avipad->auds_codec_data = gst_value_get_buffer (codec_data);
gst_buffer_ref (avipad->auds_codec_data);
/* keep global track of size */
- avimux->codec_data_size += GST_BUFFER_SIZE (avipad->auds_codec_data);
+ avimux->codec_data_size += gst_buffer_get_size (avipad->auds_codec_data);
}
if (!strcmp (mimetype, "audio/x-raw-int")) {
GstBuffer *codec_data_buf = avipad->auds_codec_data;
const gchar *stream_format;
guint codec;
+ guint8 data[2];
stream_format = gst_structure_get_string (structure, "stream-format");
if (stream_format) {
}
/* vbr case needs some special handling */
- if (!codec_data_buf || GST_BUFFER_SIZE (codec_data_buf) < 2) {
+ if (!codec_data_buf || gst_buffer_get_size (codec_data_buf) < 2) {
GST_WARNING_OBJECT (avimux, "no (valid) codec_data for AAC audio");
break;
}
avipad->auds.format = GST_RIFF_WAVE_FORMAT_AAC;
/* need to determine frame length */
- codec = GST_READ_UINT16_BE (GST_BUFFER_DATA (codec_data_buf));
+ gst_buffer_extract (codec_data_buf, 0, data, 2);
+ codec = GST_READ_UINT16_BE (data);
avipad->parent.hdr.scale = (codec & 0x4) ? 960 : 1024;
break;
}
static GstPad *
gst_avi_mux_request_new_pad (GstElement * element,
- GstPadTemplate * templ, const gchar * req_name)
+ GstPadTemplate * templ, const gchar * req_name, const GstCaps * caps)
{
GstAviMux *avimux;
GstPad *newpad;
GstElementClass *klass;
gchar *name = NULL;
const gchar *pad_name = NULL;
- GstPadSetCapsFunction setcapsfunc = NULL;
gint pad_id;
g_return_val_if_fail (templ != NULL, NULL);
name = g_strdup_printf ("audio_%02d", avimux->audio_pads++);
pad_name = name;
}
- setcapsfunc = GST_DEBUG_FUNCPTR (gst_avi_mux_audsink_set_caps);
/* init pad specific data */
avipad = g_malloc0 (sizeof (GstAviAudioPad));
/* setup pad */
pad_name = "video_00";
avimux->video_pads++;
- setcapsfunc = GST_DEBUG_FUNCPTR (gst_avi_mux_vidsink_set_caps);
/* init pad specific data */
avipad = g_malloc0 (sizeof (GstAviVideoPad));
goto wrong_template;
newpad = gst_pad_new_from_template (templ, pad_name);
- gst_pad_set_setcaps_function (newpad, setcapsfunc);
g_free (name);
GstByteWriter bw;
GSList *node;
guint avih, riff, hdrl;
+ guint8 *bdata;
+ gsize bsize;
GST_DEBUG_OBJECT (avimux, "creating avi header, data_size %u, idx_size %u",
avimux->data_size, avimux->idx_size);
if (avipad->is_video) {
codec_size = vidpad->vids_codec_data ?
- GST_BUFFER_SIZE (vidpad->vids_codec_data) : 0;
+ gst_buffer_get_size (vidpad->vids_codec_data) : 0;
/* the video header */
strf = gst_avi_mux_start_chunk (&bw, "strf", 0);
/* the actual header */
gst_byte_writer_put_uint32_le (&bw, vidpad->vids.num_colors);
gst_byte_writer_put_uint32_le (&bw, vidpad->vids.imp_colors);
if (vidpad->vids_codec_data) {
- gst_byte_writer_put_data (&bw,
- GST_BUFFER_DATA (vidpad->vids_codec_data),
- GST_BUFFER_SIZE (vidpad->vids_codec_data));
+ bdata =
+ gst_buffer_map (vidpad->vids_codec_data, &bsize, NULL,
+ GST_MAP_READ);
+ gst_byte_writer_put_data (&bw, bdata, bsize);
+ gst_buffer_unmap (vidpad->vids_codec_data, bdata, bsize);
}
gst_avi_mux_end_chunk (&bw, strf);
}
} else {
codec_size = audpad->auds_codec_data ?
- GST_BUFFER_SIZE (audpad->auds_codec_data) : 0;
+ gst_buffer_get_size (audpad->auds_codec_data) : 0;
/* the audio header */
strf = gst_avi_mux_start_chunk (&bw, "strf", 0);
/* the actual header */
gst_byte_writer_put_uint16_le (&bw, audpad->auds.size);
gst_byte_writer_put_uint16_le (&bw, codec_size);
if (audpad->auds_codec_data) {
- gst_byte_writer_put_data (&bw,
- GST_BUFFER_DATA (audpad->auds_codec_data),
- GST_BUFFER_SIZE (audpad->auds_codec_data));
+ bdata =
+ gst_buffer_map (audpad->auds_codec_data, &bsize, NULL,
+ GST_MAP_READ);
+ gst_byte_writer_put_data (&bw, bdata, bsize);
+ gst_buffer_unmap (vidpad->vids_codec_data, bdata, bsize);
}
gst_avi_mux_end_chunk (&bw, strf);
}
buffer = gst_byte_writer_reset_and_get_buffer (&bw);
/* ... but RIFF includes more than just header */
- size = GST_READ_UINT32_LE (GST_BUFFER_DATA (buffer) + 4);
+ bdata = gst_buffer_map (buffer, &bsize, NULL, GST_MAP_READWRITE);
+ size = GST_READ_UINT32_LE (bdata + 4);
size += 8 + avimux->data_size + avimux->idx_size;
- GST_WRITE_UINT32_LE (GST_BUFFER_DATA (buffer) + 4, size);
+ GST_WRITE_UINT32_LE (bdata + 4, size);
- GST_MEMDUMP_OBJECT (avimux, "avi header", GST_BUFFER_DATA (buffer),
- GST_BUFFER_SIZE (buffer));
+ GST_MEMDUMP_OBJECT (avimux, "avi header", bdata, bsize);
+ gst_buffer_unmap (buffer, bdata, bsize);
return buffer;
}
gst_avi_mux_riff_get_avix_header (guint32 datax_size)
{
GstBuffer *buffer;
- guint8 *buffdata;
+ guint8 *bdata;
+ gsize bsize;
buffer = gst_buffer_new_and_alloc (24);
- buffdata = GST_BUFFER_DATA (buffer);
- memcpy (buffdata + 0, "RIFF", 4);
- GST_WRITE_UINT32_LE (buffdata + 4, datax_size + 3 * 4);
- memcpy (buffdata + 8, "AVIX", 4);
- memcpy (buffdata + 12, "LIST", 4);
- GST_WRITE_UINT32_LE (buffdata + 16, datax_size);
- memcpy (buffdata + 20, "movi", 4);
+ bdata = gst_buffer_map (buffer, &bsize, NULL, GST_MAP_WRITE);
+ memcpy (bdata + 0, "RIFF", 4);
+ GST_WRITE_UINT32_LE (bdata + 4, datax_size + 3 * 4);
+ memcpy (bdata + 8, "AVIX", 4);
+ memcpy (bdata + 12, "LIST", 4);
+ GST_WRITE_UINT32_LE (bdata + 16, datax_size);
+ memcpy (bdata + 20, "movi", 4);
+ gst_buffer_unmap (buffer, bdata, bsize);
return buffer;
}
gst_avi_mux_riff_get_header (GstAviPad * avipad, guint32 video_frame_size)
{
GstBuffer *buffer;
- guint8 *buffdata;
+ guint8 *bdata;
+ gsize bsize;
buffer = gst_buffer_new_and_alloc (8);
- buffdata = GST_BUFFER_DATA (buffer);
- memcpy (buffdata + 0, avipad->tag, 4);
- GST_WRITE_UINT32_LE (buffdata + 4, video_frame_size);
+
+ bdata = gst_buffer_map (buffer, &bsize, NULL, GST_MAP_WRITE);
+ memcpy (bdata + 0, avipad->tag, 4);
+ GST_WRITE_UINT32_LE (bdata + 4, video_frame_size);
+ gst_buffer_unmap (buffer, bdata, bsize);
return buffer;
}
{
GstFlowReturn res;
GstBuffer *buffer;
- guint8 *buffdata, *data;
+ guint8 *data;
gst_riff_index_entry *entry;
gint i;
guint32 size, entry_count;
gboolean is_pcm = FALSE;
guint32 pcm_samples = 0;
+ guint8 *bdata;
+ gsize bsize;
/* check if it is pcm */
if (avipad && !avipad->is_video) {
/* allocate the maximum possible */
buffer = gst_buffer_new_and_alloc (32 + 8 * avimux->idx_index);
- buffdata = GST_BUFFER_DATA (buffer);
+
+ bdata = gst_buffer_map (buffer, &bsize, NULL, GST_MAP_WRITE);
+ data = bdata;
/* general index chunk info */
- memcpy (buffdata + 0, chunk, 4); /* chunk id */
- GST_WRITE_UINT32_LE (buffdata + 4, 0); /* chunk size; fill later */
- GST_WRITE_UINT16_LE (buffdata + 8, 2); /* index entry is 2 words */
- buffdata[10] = 0; /* index subtype */
- buffdata[11] = GST_AVI_INDEX_OF_CHUNKS; /* index type: AVI_INDEX_OF_CHUNKS */
- GST_WRITE_UINT32_LE (buffdata + 12, 0); /* entries in use; fill later */
- memcpy (buffdata + 16, code, 4); /* stream to which index refers */
- GST_WRITE_UINT64_LE (buffdata + 20, avimux->avix_start); /* base offset */
- GST_WRITE_UINT32_LE (buffdata + 28, 0); /* reserved */
- buffdata += 32;
+ memcpy (bdata + 0, chunk, 4); /* chunk id */
+ GST_WRITE_UINT32_LE (bdata + 4, 0); /* chunk size; fill later */
+ GST_WRITE_UINT16_LE (bdata + 8, 2); /* index entry is 2 words */
+ bdata[10] = 0; /* index subtype */
+ bdata[11] = GST_AVI_INDEX_OF_CHUNKS; /* index type: AVI_INDEX_OF_CHUNKS */
+ GST_WRITE_UINT32_LE (bdata + 12, 0); /* entries in use; fill later */
+ memcpy (bdata + 16, code, 4); /* stream to which index refers */
+ GST_WRITE_UINT64_LE (bdata + 20, avimux->avix_start); /* base offset */
+ GST_WRITE_UINT32_LE (bdata + 28, 0); /* reserved */
+ bdata += 32;
/* now the actual index entries */
i = avimux->idx_index;
while (i > 0) {
if (memcmp (&entry->id, code, 4) == 0) {
/* enter relative offset to the data (!) */
- GST_WRITE_UINT32_LE (buffdata, GUINT32_FROM_LE (entry->offset) + 8);
+ GST_WRITE_UINT32_LE (bdata, GUINT32_FROM_LE (entry->offset) + 8);
/* msb is set if not (!) keyframe */
- GST_WRITE_UINT32_LE (buffdata + 4, GUINT32_FROM_LE (entry->size)
+ GST_WRITE_UINT32_LE (bdata + 4, GUINT32_FROM_LE (entry->size)
| (GUINT32_FROM_LE (entry->flags)
& GST_RIFF_IF_KEYFRAME ? 0 : 1U << 31));
- buffdata += 8;
+ bdata += 8;
}
i--;
entry++;
}
/* ok, now we know the size and no of entries, fill in where needed */
- data = GST_BUFFER_DATA (buffer);
- GST_BUFFER_SIZE (buffer) = size = buffdata - data;
+ size = bdata - data;
GST_WRITE_UINT32_LE (data + 4, size - 8);
entry_count = (size - 32) / 8;
GST_WRITE_UINT32_LE (data + 12, entry_count);
+ gst_buffer_unmap (buffer, data, size);
- /* decorate and send */
- gst_buffer_set_caps (buffer, GST_PAD_CAPS (avimux->srcpad));
+ /* send */
if ((res = gst_pad_push (avimux->srcpad, buffer)) != GST_FLOW_OK)
return res;
GstFlowReturn res;
GstBuffer *buffer;
guint8 *buffdata;
+ gsize buffsize;
buffer = gst_buffer_new_and_alloc (8);
- buffdata = GST_BUFFER_DATA (buffer);
+
+ buffdata = gst_buffer_map (buffer, &buffsize, NULL, GST_MAP_WRITE);
memcpy (buffdata + 0, "idx1", 4);
GST_WRITE_UINT32_LE (buffdata + 4,
avimux->idx_index * sizeof (gst_riff_index_entry));
+ gst_buffer_unmap (buffer, buffdata, buffsize);
- gst_buffer_set_caps (buffer, GST_PAD_CAPS (avimux->srcpad));
res = gst_pad_push (avimux->srcpad, buffer);
if (res != GST_FLOW_OK)
return res;
buffer = gst_buffer_new ();
- GST_BUFFER_SIZE (buffer) = avimux->idx_index * sizeof (gst_riff_index_entry);
- GST_BUFFER_DATA (buffer) = (guint8 *) avimux->idx;
- GST_BUFFER_MALLOCDATA (buffer) = GST_BUFFER_DATA (buffer);
+
+ buffsize = avimux->idx_index * sizeof (gst_riff_index_entry);
+ buffdata = (guint8 *) avimux->idx;
avimux->idx = NULL; /* will be free()'ed by gst_buffer_unref() */
- avimux->total_data += GST_BUFFER_SIZE (buffer) + 8;
- gst_buffer_set_caps (buffer, GST_PAD_CAPS (avimux->srcpad));
+ gst_buffer_take_memory (buffer, -1,
+ gst_memory_new_wrapped (0, buffdata, g_free, buffsize, 0, buffsize));
+
+ avimux->total_data += buffsize + 8;
+
res = gst_pad_push (avimux->srcpad, buffer);
if (res != GST_FLOW_OK)
return res;
{
GstFlowReturn res = GST_FLOW_OK;
GstBuffer *header;
- GstEvent *event;
GSList *node;
/* first some odml standard index chunks in the movi list */
}
if (avimux->is_bigfile) {
+ GstSegment segment;
+
+ gst_segment_init (&segment, GST_FORMAT_BYTES);
+
/* search back */
- event = gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_BYTES,
- avimux->avix_start, GST_CLOCK_TIME_NONE, avimux->avix_start);
- /* if the event succeeds */
- gst_pad_push_event (avimux->srcpad, event);
+ segment.start = avimux->avix_start;
+ segment.time = avimux->avix_start;
+ gst_pad_push_event (avimux->srcpad, gst_event_new_segment (&segment));
/* rewrite AVIX header */
header = gst_avi_mux_riff_get_avix_header (avimux->datax_size);
- gst_buffer_set_caps (header, GST_PAD_CAPS (avimux->srcpad));
res = gst_pad_push (avimux->srcpad, header);
/* go back to current location, at least try */
- event = gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_BYTES,
- avimux->total_data, GST_CLOCK_TIME_NONE, avimux->total_data);
- gst_pad_push_event (avimux->srcpad, event);
+ segment.start = avimux->total_data;
+ segment.time = avimux->total_data;
+ gst_pad_push_event (avimux->srcpad, gst_event_new_segment (&segment));
if (res != GST_FLOW_OK)
return res;
}
header = gst_avi_mux_riff_get_avix_header (0);
- avimux->total_data += GST_BUFFER_SIZE (header);
+ avimux->total_data += gst_buffer_get_size (header);
/* avix_start is used as base offset for the odml index chunk */
avimux->idx_offset = avimux->total_data - avimux->avix_start;
- gst_buffer_set_caps (header, GST_PAD_CAPS (avimux->srcpad));
+
return gst_pad_push (avimux->srcpad, header);
}
GstBuffer *header;
GSList *node;
GstCaps *caps;
+ GstSegment segment;
avimux->total_data = 0;
avimux->total_frames = 0;
gst_caps_unref (caps);
/* let downstream know we think in BYTES and expect to do seeking later on */
- gst_pad_push_event (avimux->srcpad,
- gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_BYTES, 0, -1, 0));
+ gst_segment_init (&segment, GST_FORMAT_BYTES);
+ gst_pad_push_event (avimux->srcpad, gst_event_new_segment (&segment));
/* header */
avimux->avi_hdr.streams = g_slist_length (avimux->sinkpads);
avimux->is_bigfile = FALSE;
header = gst_avi_mux_riff_get_avi_header (avimux);
- avimux->total_data += GST_BUFFER_SIZE (header);
+ avimux->total_data += gst_buffer_get_size (header);
- gst_buffer_set_caps (header, GST_PAD_CAPS (avimux->srcpad));
res = gst_pad_push (avimux->srcpad, header);
avimux->idx_offset = avimux->total_data;
gst_avi_mux_stop_file (GstAviMux * avimux)
{
GstFlowReturn res = GST_FLOW_OK;
- GstEvent *event;
GstBuffer *header;
GSList *node;
+ GstSegment segment;
/* if bigfile, rewrite header, else write indexes */
/* don't bail out at once if error, still try to re-write header */
avimux->avi_hdr.tot_frames = avimux->num_frames;
/* seek and rewrite the header */
- header = gst_avi_mux_riff_get_avi_header (avimux);
- event = gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_BYTES,
- 0, GST_CLOCK_TIME_NONE, 0);
- gst_pad_push_event (avimux->srcpad, event);
+ gst_segment_init (&segment, GST_FORMAT_BYTES);
+ gst_pad_push_event (avimux->srcpad, gst_event_new_segment (&segment));
- gst_buffer_set_caps (header, GST_PAD_CAPS (avimux->srcpad));
/* the first error survives */
+ header = gst_avi_mux_riff_get_avi_header (avimux);
if (res == GST_FLOW_OK)
res = gst_pad_push (avimux->srcpad, header);
else
gst_pad_push (avimux->srcpad, header);
- event = gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_BYTES,
- avimux->total_data, GST_CLOCK_TIME_NONE, avimux->total_data);
- gst_pad_push_event (avimux->srcpad, event);
+ segment.start = avimux->total_data;
+ segment.time = avimux->total_data;
+ gst_pad_push_event (avimux->srcpad, gst_event_new_segment (&segment));
avimux->write_header = TRUE;
gst_avi_mux_handle_event (GstPad * pad, GstEvent * event)
{
GstAviMux *avimux;
- gboolean ret;
+ gboolean ret = TRUE;
avimux = GST_AVI_MUX (gst_pad_get_parent (pad));
switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_CAPS:
+ {
+ GstCaps *caps;
+ GstAviCollectData *collect_pad;
+ GstAviVideoPad *avipad;
+
+ gst_event_parse_caps (event, &caps);
+
+ /* find stream data */
+ collect_pad = (GstAviCollectData *) gst_pad_get_element_private (pad);
+ g_assert (collect_pad);
+ avipad = (GstAviVideoPad *) collect_pad->avipad;
+ g_assert (avipad);
+
+ if (avipad->parent.is_video) {
+ ret = gst_avi_mux_vidsink_set_caps (pad, caps);
+ } else {
+ ret = gst_avi_mux_audsink_set_caps (pad, caps);
+ }
+ break;
+ }
case GST_EVENT_TAG:{
GstTagList *list;
GstTagSetter *setter = GST_TAG_SETTER (avimux);
}
/* now GstCollectPads can take care of the rest, e.g. EOS */
- ret = avimux->collect_event (pad, event);
+ if (ret)
+ ret = avimux->collect_event (pad, event);
gst_object_unref (avimux);
gst_avi_mux_send_pad_data (GstAviMux * avimux, gulong num_bytes)
{
GstBuffer *buffer;
+ guint8 *bdata;
+ gsize bsize;
buffer = gst_buffer_new_and_alloc (num_bytes);
- memset (GST_BUFFER_DATA (buffer), 0, num_bytes);
- gst_buffer_set_caps (buffer, GST_PAD_CAPS (avimux->srcpad));
+
+ bdata = gst_buffer_map (buffer, &bsize, NULL, GST_MAP_WRITE);
+ memset (bdata, 0, num_bytes);
+ gst_buffer_unmap (buffer, bdata, bsize);
+
return gst_pad_push (avimux->srcpad, buffer);
}
GstBuffer *data, *header;
gulong total_size, pad_bytes = 0;
guint flags;
+ gsize datasize;
data = gst_collect_pads_pop (avimux->collect, avipad->collect);
/* arrange downstream running time */
- data = gst_buffer_make_metadata_writable (data);
+ data = gst_buffer_make_writable (data);
GST_BUFFER_TIMESTAMP (data) =
gst_segment_to_running_time (&avipad->collect->segment,
GST_FORMAT_TIME, GST_BUFFER_TIMESTAMP (data));
if (vidpad->prepend_buffer) {
GstBuffer *newdata = gst_buffer_merge (vidpad->prepend_buffer, data);
- gst_buffer_copy_metadata (newdata, data, GST_BUFFER_COPY_TIMESTAMPS);
+ gst_buffer_copy_into (newdata, data, GST_BUFFER_COPY_TIMESTAMPS, 0, -1);
gst_buffer_unref (data);
gst_buffer_unref (vidpad->prepend_buffer);
return res;
}
+ datasize = gst_buffer_get_size (data);
+
/* need to restart or start a next avix chunk ? */
if ((avimux->is_bigfile ? avimux->datax_size : avimux->data_size) +
- GST_BUFFER_SIZE (data) > GST_AVI_MAX_SIZE) {
+ datasize > GST_AVI_MAX_SIZE) {
if (avimux->enable_large_avi) {
if ((res = gst_avi_mux_bigfile (avimux, FALSE)) != GST_FLOW_OK)
return res;
}
/* get header and record some stats */
- if (GST_BUFFER_SIZE (data) & 1) {
- pad_bytes = 2 - (GST_BUFFER_SIZE (data) & 1);
+ if (datasize & 1) {
+ pad_bytes = 2 - (datasize & 1);
}
- header = gst_avi_mux_riff_get_header (avipad, GST_BUFFER_SIZE (data));
- total_size = GST_BUFFER_SIZE (header) + GST_BUFFER_SIZE (data) + pad_bytes;
+ header = gst_avi_mux_riff_get_header (avipad, datasize);
+ total_size = gst_buffer_get_size (header) + datasize + pad_bytes;
if (avimux->is_bigfile) {
avimux->datax_size += total_size;
avipad->hook (avimux, avipad, data);
/* the suggested buffer size is the max frame size */
- if (avipad->hdr.bufsize < GST_BUFFER_SIZE (data))
- avipad->hdr.bufsize = GST_BUFFER_SIZE (data);
+ if (avipad->hdr.bufsize < datasize)
+ avipad->hdr.bufsize = datasize;
if (avipad->is_video) {
avimux->total_frames++;
GstAviAudioPad *audpad = (GstAviAudioPad *) avipad;
flags = 0;
- audpad->audio_size += GST_BUFFER_SIZE (data);
+ audpad->audio_size += datasize;
audpad->audio_time += GST_BUFFER_DURATION (data);
}
- gst_avi_mux_add_index (avimux, avipad, flags, GST_BUFFER_SIZE (data));
-
- /* prepare buffers for sending */
- gst_buffer_set_caps (header, GST_PAD_CAPS (avimux->srcpad));
- data = gst_buffer_make_metadata_writable (data);
- gst_buffer_set_caps (data, GST_PAD_CAPS (avimux->srcpad));
+ gst_avi_mux_add_index (avimux, avipad, flags, datasize);
+ /* send buffers */
GST_LOG_OBJECT (avimux, "pushing buffers: head, data");
if ((res = gst_pad_push (avimux->srcpad, header)) != GST_FLOW_OK)
static gboolean gst_avi_subtitle_send_event (GstElement * element,
GstEvent * event);
-GST_BOILERPLATE (GstAviSubtitle, gst_avi_subtitle, GstElement,
- GST_TYPE_ELEMENT);
+#define gst_avi_subtitle_parent_class parent_class
+G_DEFINE_TYPE (GstAviSubtitle, gst_avi_subtitle, GST_TYPE_ELEMENT);
#define IS_BOM_UTF8(data) ((GST_READ_UINT32_BE(data) >> 8) == 0xEFBBBF)
#define IS_BOM_UTF16_BE(data) (GST_READ_UINT16_BE(data) == 0xFEFF)
{
const gchar *input_enc = NULL;
GstBuffer *ret = NULL;
- gchar *data;
+ gchar *data, *bdata;
+ gsize bsize;
- data = (gchar *) GST_BUFFER_DATA (buffer) + offset;
+ bdata = gst_buffer_map (buffer, &bsize, NULL, GST_MAP_READ);
+ data = bdata + offset;
if (len >= (3 + 1) && IS_BOM_UTF8 (data) &&
g_utf8_validate (data + 3, len - 3, NULL)) {
- ret = gst_buffer_create_sub (buffer, offset + 3, len - 3);
+ ret =
+ gst_buffer_copy_region (buffer, GST_BUFFER_COPY_ALL, offset + 3,
+ len - 3);
} else if (len >= 2 && IS_BOM_UTF16_BE (data)) {
input_enc = "UTF-16BE";
data += 2;
len -= 4;
} else if (g_utf8_validate (data, len, NULL)) {
/* not specified, check if it's UTF-8 */
- ret = gst_buffer_create_sub (buffer, offset, len);
+ ret = gst_buffer_copy_region (buffer, GST_BUFFER_COPY_ALL, offset, len);
} else {
/* we could fall back to gst_tag_freeform_to_utf8() here */
GST_WARNING_OBJECT (sub, "unspecified encoding, and not UTF-8");
- return NULL;
+ ret = NULL;
+ goto done;
}
g_return_val_if_fail (ret != NULL || input_enc != NULL, NULL);
if (input_enc) {
GError *err = NULL;
gchar *utf8;
+ gsize slen;
GST_DEBUG_OBJECT (sub, "converting subtitles from %s to UTF-8", input_enc);
utf8 = g_convert (data, len, "UTF-8", input_enc, NULL, NULL, &err);
if (err != NULL) {
GST_WARNING_OBJECT (sub, "conversion to UTF-8 failed : %s", err->message);
g_error_free (err);
- return NULL;
+ ret = NULL;
+ goto done;
}
ret = gst_buffer_new ();
- GST_BUFFER_DATA (ret) = (guint8 *) utf8;
- GST_BUFFER_MALLOCDATA (ret) = (guint8 *) utf8;
- GST_BUFFER_SIZE (ret) = strlen (utf8);
+ slen = strlen (utf8);
+ gst_buffer_take_memory (ret, -1,
+ gst_memory_new_wrapped (0, utf8, g_free, slen, 0, slen));
+
GST_BUFFER_OFFSET (ret) = 0;
}
- GST_BUFFER_CAPS (ret) = gst_caps_new_simple ("application/x-subtitle", NULL);
+done:
+ gst_buffer_unmap (buffer, bdata, bsize);
+
return ret;
}
static GstFlowReturn
gst_avi_subtitle_parse_gab2_chunk (GstAviSubtitle * sub, GstBuffer * buf)
{
- const guint8 *data;
+ guint8 *data;
gchar *name_utf8;
guint name_length;
guint file_length;
- guint size;
+ gsize size;
- data = GST_BUFFER_DATA (buf);
- size = GST_BUFFER_SIZE (buf);
+ data = gst_buffer_map (buf, &size, NULL, GST_MAP_READ);
/* check the magic word "GAB2\0", and the next word must be 2 */
if (size < 12 || memcmp (data, "GAB2\0\2\0", 5 + 2) != 0)
if (sub->subfile == NULL)
goto extract_failed;
+ gst_buffer_unmap (buf, data, size);
+
return GST_FLOW_OK;
/* ERRORS */
wrong_magic_word:
{
GST_ELEMENT_ERROR (sub, STREAM, DECODE, (NULL), ("Wrong magic word"));
+ gst_buffer_unmap (buf, data, size);
return GST_FLOW_ERROR;
}
wrong_name_length:
{
GST_ELEMENT_ERROR (sub, STREAM, DECODE, (NULL),
("name doesn't fit in buffer (%d < %d)", size, 17 + name_length));
+ gst_buffer_unmap (buf, data, size);
return GST_FLOW_ERROR;
}
wrong_fixed_word_2:
GST_ELEMENT_ERROR (sub, STREAM, DECODE, (NULL),
("wrong fixed word: expected %u, got %u", 4,
GST_READ_UINT16_LE (data + 11 + name_length)));
+ gst_buffer_unmap (buf, data, size);
return GST_FLOW_ERROR;
}
wrong_total_length:
GST_ELEMENT_ERROR (sub, STREAM, DECODE, (NULL),
("buffer size is wrong: need %d bytes, have %d bytes",
17 + name_length + file_length, size));
+ gst_buffer_unmap (buf, data, size);
return GST_FLOW_ERROR;
}
extract_failed:
{
GST_ELEMENT_ERROR (sub, STREAM, DECODE, (NULL),
("could not extract subtitles"));
+ gst_buffer_unmap (buf, data, size);
return GST_FLOW_ERROR;
}
}
}
static void
-gst_avi_subtitle_base_init (gpointer klass)
+gst_avi_subtitle_class_init (GstAviSubtitleClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
GST_DEBUG_CATEGORY_INIT (avisubtitle_debug, "avisubtitle", 0,
"parse avi subtitle stream");
- gst_element_class_add_pad_template (element_class,
+ gstelement_class->change_state =
+ GST_DEBUG_FUNCPTR (gst_avi_subtitle_change_state);
+ gstelement_class->send_event =
+ GST_DEBUG_FUNCPTR (gst_avi_subtitle_send_event);
+
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&sink_template));
- gst_element_class_add_pad_template (element_class,
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&src_template));
- gst_element_class_set_details_simple (element_class,
+ gst_element_class_set_details_simple (gstelement_class,
"Avi subtitle parser", "Codec/Parser/Subtitle",
"Parse avi subtitle stream", "Thijs Vermeir <thijsvermeir@gmail.com>");
}
static void
-gst_avi_subtitle_class_init (GstAviSubtitleClass * klass)
-{
- GstElementClass *gstelement_class = (GstElementClass *) klass;
-
- gstelement_class->change_state =
- GST_DEBUG_FUNCPTR (gst_avi_subtitle_change_state);
- gstelement_class->send_event =
- GST_DEBUG_FUNCPTR (gst_avi_subtitle_send_event);
-}
-
-static void
-gst_avi_subtitle_init (GstAviSubtitle * self, GstAviSubtitleClass * klass)
+gst_avi_subtitle_init (GstAviSubtitle * self)
{
GstCaps *caps;
PROP_LEAKY
};
-GST_BOILERPLATE (GstCutter, gst_cutter, GstElement, GST_TYPE_ELEMENT);
+#define gst_cutter_parent_class parent_class
+G_DEFINE_TYPE (GstCutter, gst_cutter, GST_TYPE_ELEMENT);
static void gst_cutter_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static gboolean gst_cutter_get_caps (GstPad * pad, GstCutter * filter);
static void
-gst_cutter_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&cutter_src_factory));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&cutter_sink_factory));
- gst_element_class_set_details_simple (element_class, "Audio cutter",
- "Filter/Editor/Audio",
- "Audio Cutter to split audio into non-silent bits",
- "Thomas Vander Stichele <thomas at apestaart dot org>");
-}
-
-static void
gst_cutter_class_init (GstCutterClass * klass)
{
GObjectClass *gobject_class;
+ GstElementClass *element_class;
gobject_class = (GObjectClass *) klass;
+ element_class = (GstElementClass *) klass;
gobject_class->set_property = gst_cutter_set_property;
gobject_class->get_property = gst_cutter_get_property;
FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
GST_DEBUG_CATEGORY_INIT (cutter_debug, "cutter", 0, "Audio cutting");
+
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&cutter_src_factory));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&cutter_sink_factory));
+ gst_element_class_set_details_simple (element_class, "Audio cutter",
+ "Filter/Editor/Audio",
+ "Audio Cutter to split audio into non-silent bits",
+ "Thomas Vander Stichele <thomas at apestaart dot org>");
}
static void
-gst_cutter_init (GstCutter * filter, GstCutterClass * g_class)
+gst_cutter_init (GstCutter * filter)
{
filter->sinkpad =
gst_pad_new_from_static_template (&cutter_sink_factory, "sink");
{
GstCutter *filter;
gint16 *in_data;
+ gsize in_size;
guint num_samples;
gdouble NCS = 0.0; /* Normalized Cumulative Square of buffer */
gdouble RMS = 0.0; /* RMS of signal in buffer */
return GST_FLOW_NOT_NEGOTIATED;
}
- in_data = (gint16 *) GST_BUFFER_DATA (buf);
+ in_data = (gint16 *) gst_buffer_map (buf, &in_size, NULL, GST_MAP_READ);
GST_LOG_OBJECT (filter, "length of prerec buffer: %" GST_TIME_FORMAT,
GST_TIME_ARGS (filter->pre_run_length));
/* calculate mean square value on buffer */
switch (filter->width) {
case 16:
- num_samples = GST_BUFFER_SIZE (buf) / 2;
+ num_samples = in_size / 2;
gst_cutter_calculate_gint16 (in_data, num_samples, &NCS);
NMS = NCS / num_samples;
break;
case 8:
- num_samples = GST_BUFFER_SIZE (buf);
+ num_samples = in_size;
gst_cutter_calculate_gint8 ((gint8 *) in_data, num_samples, &NCS);
NMS = NCS / num_samples;
break;
break;
}
+ gst_buffer_unmap (buf, in_data, in_size);
+
filter->silent_prev = filter->silent;
RMS = sqrt (NMS);
GstCaps *caps;
GstStructure *structure;
- caps = gst_pad_get_caps (pad);
+ caps = gst_pad_get_current_caps (pad);
if (!caps) {
GST_INFO ("no caps on pad %s:%s", GST_DEBUG_PAD_NAME (pad));
return FALSE;
#define GST_IS_BREAK_MY_DATA_CLASS(klass) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_BREAK_MY_DATA))
+GType gst_break_my_data_get_type (void);
+
enum
{
ARG_0,
GST_PAD_ALWAYS,
GST_STATIC_CAPS_ANY);
+#define gst_break_my_data_parent_class parent_class
+G_DEFINE_TYPE (GstBreakMyData, gst_break_my_data, GST_TYPE_BASE_TRANSFORM);
-#define DEBUG_INIT(bla) \
- GST_DEBUG_CATEGORY_INIT (gst_break_my_data_debug, "breakmydata", 0, \
- "debugging category for breakmydata element");
-
-GType gst_break_my_data_get_type (void);
-GST_BOILERPLATE_FULL (GstBreakMyData, gst_break_my_data, GstBaseTransform,
- GST_TYPE_BASE_TRANSFORM, DEBUG_INIT);
-
-
-static void
-gst_break_my_data_base_init (gpointer g_class)
-{
- GstElementClass *gstelement_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_pad_template (gstelement_class,
- gst_static_pad_template_get (&bmd_sink_template));
- gst_element_class_add_pad_template (gstelement_class,
- gst_static_pad_template_get (&bmd_src_template));
-
- gst_element_class_set_details_simple (gstelement_class, "Break my data",
- "Testing",
- "randomly change data in the stream", "Benjamin Otte <otte@gnome>");
-}
static void
gst_break_my_data_class_init (GstBreakMyDataClass * klass)
{
GstBaseTransformClass *gstbasetrans_class;
+ GstElementClass *gstelement_class;
GObjectClass *gobject_class;
gobject_class = G_OBJECT_CLASS (klass);
+ gstelement_class = GST_ELEMENT_CLASS (klass);
gstbasetrans_class = GST_BASE_TRANSFORM_CLASS (klass);
+ GST_DEBUG_CATEGORY_INIT (gst_break_my_data_debug, "breakmydata", 0,
+ "debugging category for breakmydata element");
+
gobject_class->set_property = gst_break_my_data_set_property;
gobject_class->get_property = gst_break_my_data_get_property;
"probability for each byte in the buffer to be changed", 0.0, 1.0,
0.0, G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&bmd_sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&bmd_src_template));
+
+ gst_element_class_set_details_simple (gstelement_class, "Break my data",
+ "Testing",
+ "randomly change data in the stream", "Benjamin Otte <otte@gnome>");
+
gstbasetrans_class->transform_ip =
GST_DEBUG_FUNCPTR (gst_break_my_data_transform_ip);
gstbasetrans_class->start = GST_DEBUG_FUNCPTR (gst_break_my_data_start);
}
static void
-gst_break_my_data_init (GstBreakMyData * bmd, GstBreakMyDataClass * g_class)
+gst_break_my_data_init (GstBreakMyData * bmd)
{
gst_base_transform_set_in_place (GST_BASE_TRANSFORM (bmd), TRUE);
gst_base_transform_set_passthrough (GST_BASE_TRANSFORM (bmd), TRUE);
static gboolean gst_cpu_report_start (GstBaseTransform * trans);
static gboolean gst_cpu_report_stop (GstBaseTransform * trans);
-GST_BOILERPLATE (GstCpuReport, gst_cpu_report, GstBaseTransform,
- GST_TYPE_BASE_TRANSFORM);
-
-static void
-gst_cpu_report_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&cpu_report_sink_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&cpu_report_src_template));
-
- gst_element_class_set_details_simple (element_class, "CPU report",
- "Testing",
- "Post cpu usage information every buffer",
- "Zaheer Abbas Merali <zaheerabbas at merali dot org>");
-}
+#define gst_cpu_report_parent_class parent_class
+G_DEFINE_TYPE (GstCpuReport, gst_cpu_report, GST_TYPE_BASE_TRANSFORM);
static void
gst_cpu_report_finalize (GObject * obj)
gst_cpu_report_class_init (GstCpuReportClass * g_class)
{
GstBaseTransformClass *gstbasetrans_class;
+ GstElementClass *element_class;
GObjectClass *gobject_class;
gobject_class = G_OBJECT_CLASS (g_class);
+ element_class = GST_ELEMENT_CLASS (g_class);
gstbasetrans_class = GST_BASE_TRANSFORM_CLASS (g_class);
gobject_class->finalize = gst_cpu_report_finalize;
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&cpu_report_sink_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&cpu_report_src_template));
+
+ gst_element_class_set_details_simple (element_class, "CPU report",
+ "Testing",
+ "Post cpu usage information every buffer",
+ "Zaheer Abbas Merali <zaheerabbas at merali dot org>");
+
gstbasetrans_class->transform_ip =
GST_DEBUG_FUNCPTR (gst_cpu_report_transform_ip);
gstbasetrans_class->start = GST_DEBUG_FUNCPTR (gst_cpu_report_start);
}
static void
-gst_cpu_report_init (GstCpuReport * report, GstCpuReportClass * g_class)
+gst_cpu_report_init (GstCpuReport * report)
{
gst_base_transform_set_passthrough (GST_BASE_TRANSFORM (report), TRUE);
static GstElementClass *parent_class = NULL;
-typedef struct _GstFencedBuffer GstFencedBuffer;
-struct _GstFencedBuffer
+typedef struct _GstMetaFenced
{
- GstBuffer buffer;
+ GstMeta meta;
+
void *region;
unsigned int length;
-};
+} GstMetaFenced;
+
+static const GstMetaInfo *
+gst_meta_fenced_get_info (void)
+{
+ static const GstMetaInfo *meta_fenced_info = NULL;
+
+ if (meta_fenced_info == NULL) {
+ meta_fenced_info = gst_meta_register ("GstMetaFenced", "GstMetaFenced",
+ sizeof (GstMetaFenced),
+ (GstMetaInitFunction) NULL,
+ (GstMetaFreeFunction) NULL,
+ (GstMetaTransformFunction) NULL,
+ (GstMetaSerializeFunction) NULL, (GstMetaDeserializeFunction) NULL);
+ }
+ return meta_fenced_info;
+}
-GType gst_fenced_buffer_get_type (void);
-static void gst_fenced_buffer_finalize (GstFencedBuffer * buf);
-static GstFencedBuffer *gst_fenced_buffer_copy (const GstBuffer * buffer);
+#define GST_META_FENCED_GET(buf) ((GstMetaFenced *)gst_buffer_get_meta(buf,gst_meta_fenced_get_info()))
+#define GST_META_FENCED_ADD(buf) ((GstMetaFenced *)gst_buffer_add_meta(buf,gst_meta_fenced_get_info(),NULL))
+
+static void gst_fenced_buffer_dispose (GstBuffer * buf);
+static GstBuffer *gst_fenced_buffer_copy (const GstBuffer * buffer);
static void *gst_fenced_buffer_alloc (GstBuffer * buffer, unsigned int length,
gboolean fence_top);
+#if 0
static GstFlowReturn gst_efence_buffer_alloc (GstPad * pad, guint64 offset,
guint size, GstCaps * caps, GstBuffer ** buf);
-
-#define GST_TYPE_FENCED_BUFFER (gst_fenced_buffer_get_type())
-
-#define GST_IS_FENCED_BUFFER(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_FENCED_BUFFER))
-#define GST_FENCED_BUFFER(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_FENCED_BUFFER, GstFencedBuffer))
+#endif
GType
gst_gst_efence_get_type (void)
GST_DEBUG_FUNCPTR (gst_pad_proxy_setcaps));
gst_pad_set_chain_function (filter->sinkpad,
GST_DEBUG_FUNCPTR (gst_efence_chain));
+#if 0
gst_pad_set_bufferalloc_function (filter->sinkpad,
GST_DEBUG_FUNCPTR (gst_efence_buffer_alloc));
+#endif
gst_element_add_pad (GST_ELEMENT (filter), filter->sinkpad);
filter->srcpad =
efence = GST_EFENCE (GST_OBJECT_PARENT (pad));
g_return_val_if_fail (GST_IS_EFENCE (efence), GST_FLOW_ERROR);
+#if 0
if (GST_IS_FENCED_BUFFER (buffer)) {
GST_DEBUG_OBJECT (efence, "Passing on existing fenced buffer with caps %"
GST_PTR_FORMAT, GST_BUFFER_CAPS (buffer));
return gst_pad_push (efence->srcpad, buffer);
}
+#endif
copy = (GstBuffer *) gst_fenced_buffer_copy (buffer);
return gst_pad_activate_pull (efence->sinkpad, active);
}
+#if 0
static GstFlowReturn
gst_efence_buffer_alloc (GstPad * pad, guint64 offset,
guint size, GstCaps * caps, GstBuffer ** buf)
return GST_FLOW_OK;
}
+#endif
static void
gst_efence_set_property (GObject * object, guint prop_id,
plugin_init, VERSION, "LGPL", GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN);
-static GstBufferClass *fenced_buffer_parent_class = NULL;
-
static void
-gst_fenced_buffer_finalize (GstFencedBuffer * buffer)
+gst_fenced_buffer_dispose (GstBuffer * buffer)
{
- GstFencedBuffer *fenced_buffer;
+ GstMetaFenced *meta;
- GST_DEBUG ("free buffer=%p", buffer);
+ meta = GST_META_FENCED_GET (buffer);
- fenced_buffer = GST_FENCED_BUFFER (buffer);
+ GST_DEBUG ("free buffer=%p", buffer);
/* free our data */
if (GST_BUFFER_DATA (buffer)) {
- GST_DEBUG ("free region %p %d", fenced_buffer->region,
- fenced_buffer->length);
- munmap (fenced_buffer->region, fenced_buffer->length);
+ GST_DEBUG ("free region %p %d", meta->region, meta->length);
+ munmap (meta->region, meta->length);
}
-
- GST_MINI_OBJECT_CLASS (fenced_buffer_parent_class)->finalize (GST_MINI_OBJECT
- (buffer));
}
-static GstFencedBuffer *
+static GstBuffer *
gst_fenced_buffer_copy (const GstBuffer * buffer)
{
GstBuffer *copy;
g_return_val_if_fail (buffer != NULL, NULL);
/* create a fresh new buffer */
- copy = (GstBuffer *) gst_mini_object_new (GST_TYPE_FENCED_BUFFER);
+ copy = gst_buffer_new ();
/* we simply copy everything from our parent */
- ptr = gst_fenced_buffer_alloc (GST_BUFFER (copy),
- GST_BUFFER_SIZE (buffer), TRUE);
+ ptr = gst_fenced_buffer_alloc (copy, GST_BUFFER_SIZE (buffer), TRUE);
memcpy (ptr, GST_BUFFER_DATA (buffer), GST_BUFFER_SIZE (buffer));
/* copy relevant flags */
", caps: %" GST_PTR_FORMAT, buffer,
GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (copy)), GST_BUFFER_CAPS (copy));
- return GST_FENCED_BUFFER (copy);
+ return copy;
}
void *
{
int alloc_size;
void *region;
- GstFencedBuffer *fenced_buffer = (GstFencedBuffer *) buffer;
+ GstMetaFenced *meta;
int page_size;
GST_DEBUG ("buffer=%p length=%d fence_top=%d", buffer, length, fence_top);
g_warning ("mmap failed");
return NULL;
}
+
+ GST_MINI_OBJECT_CAST (buffer)->dispose =
+ (GstMiniObjectDisposeFunction) gst_fenced_buffer_dispose;
+ GST_MINI_OBJECT_CAST (buffer)->copy =
+ (GstMiniObjectCopyFunction) gst_fenced_buffer_copy;
+
+ meta = GST_META_FENCED_ADD (buffer);
+
#if 0
munmap (region, page_size);
munmap (region + alloc_size - page_size, page_size);
- fenced_buffer->region = region + page_size;
- fenced_buffer->length = alloc_size - page_size;
+ meta->region = region + page_size;
+ meta->length = alloc_size - page_size;
#else
mprotect (region, page_size, PROT_NONE);
mprotect ((char *) region + alloc_size - page_size, page_size, PROT_NONE);
- fenced_buffer->region = region;
- fenced_buffer->length = alloc_size;
+ meta->region = region;
+ meta->length = alloc_size;
#endif
- GST_DEBUG ("new region %p %d", fenced_buffer->region, fenced_buffer->length);
+ GST_DEBUG ("new region %p %d", meta->region, meta->length);
if (fence_top) {
int offset;
return (void *) ((char *) region + page_size);
}
}
-
-static void
-gst_fenced_buffer_class_init (gpointer g_class, gpointer class_data)
-{
- GstMiniObjectClass *mini_object_class = GST_MINI_OBJECT_CLASS (g_class);
-
- fenced_buffer_parent_class = g_type_class_peek_parent (g_class);
-
- mini_object_class->finalize =
- (GstMiniObjectFinalizeFunction) gst_fenced_buffer_finalize;
- mini_object_class->copy = (GstMiniObjectCopyFunction) gst_fenced_buffer_copy;
-}
-
-GType
-gst_fenced_buffer_get_type (void)
-{
- static GType fenced_buf_type = 0;
-
- if (G_UNLIKELY (!fenced_buf_type)) {
- static const GTypeInfo fenced_buf_info = {
- sizeof (GstBufferClass),
- NULL,
- NULL,
- (GClassInitFunc) gst_fenced_buffer_class_init,
- NULL,
- NULL,
- sizeof (GstFencedBuffer),
- 0,
- NULL,
- };
-
- fenced_buf_type = g_type_register_static (GST_TYPE_BUFFER,
- "GstFencedBuffer", &fenced_buf_info, 0);
- }
- return fenced_buf_type;
-}
/* class initialization */
-#define DEBUG_INIT(bla) \
- GST_DEBUG_CATEGORY_INIT (gst_caps_debug_debug, "capsdebug", 0, \
- "debug category for capsdebug element");
-
-GST_BOILERPLATE_FULL (GstCapsDebug, gst_caps_debug, GstElement,
- GST_TYPE_ELEMENT, DEBUG_INIT);
-
-static void
-gst_caps_debug_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_caps_debug_src_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_caps_debug_sink_template));
-
- gst_element_class_set_details_simple (element_class, "Caps debug",
- "Generic", "Debug caps negotiation", "David Schleef <ds@schleef.org>");
-}
+#define gst_caps_debug_parent_class parent_class
+G_DEFINE_TYPE (GstCapsDebug, gst_caps_debug, GST_TYPE_ELEMENT);
static void
gst_caps_debug_class_init (GstCapsDebugClass * klass)
gobject_class->finalize = gst_caps_debug_finalize;
element_class->change_state = GST_DEBUG_FUNCPTR (gst_caps_debug_change_state);
+ GST_DEBUG_CATEGORY_INIT (gst_caps_debug_debug, "capsdebug", 0,
+ "debug category for capsdebug element");
+
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_caps_debug_src_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_caps_debug_sink_template));
+
+ gst_element_class_set_details_simple (element_class, "Caps debug",
+ "Generic", "Debug caps negotiation", "David Schleef <ds@schleef.org>");
}
static void
-gst_caps_debug_init (GstCapsDebug * capsdebug,
- GstCapsDebugClass * capsdebug_class)
+gst_caps_debug_init (GstCapsDebug * capsdebug)
{
capsdebug->srcpad =
static void gst_caps_setter_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
-GST_BOILERPLATE (GstCapsSetter, gst_caps_setter, GstBaseTransform,
- GST_TYPE_BASE_TRANSFORM);
-
-static void
-gst_caps_setter_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_set_details_simple (element_class, "CapsSetter",
- "Generic",
- "Set/merge caps on stream",
- "Mark Nauwelaerts <mnauw@users.sourceforge.net>");
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_caps_setter_sink_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_caps_setter_src_template));
-}
+#define gst_caps_setter_parent_class parent_class
+G_DEFINE_TYPE (GstCapsSetter, gst_caps_setter, GST_TYPE_BASE_TRANSFORM);
static void
gst_caps_setter_class_init (GstCapsSetterClass * g_class)
{
GObjectClass *gobject_class = (GObjectClass *) g_class;
+ GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
GstBaseTransformClass *trans_class = (GstBaseTransformClass *) g_class;
GST_DEBUG_CATEGORY_INIT (caps_setter_debug, "capssetter", 0, "capssetter");
"Drop fields of incoming caps", DEFAULT_REPLACE,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ gst_element_class_set_details_simple (element_class, "CapsSetter",
+ "Generic",
+ "Set/merge caps on stream",
+ "Mark Nauwelaerts <mnauw@users.sourceforge.net>");
+
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_caps_setter_sink_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_caps_setter_src_template));
+
trans_class->transform_size =
GST_DEBUG_FUNCPTR (gst_caps_setter_transform_size);
trans_class->transform_caps =
}
static void
-gst_caps_setter_init (GstCapsSetter * filter, GstCapsSetterClass * g_class)
+gst_caps_setter_init (GstCapsSetter * filter)
{
filter->caps = gst_caps_new_any ();
filter->join = DEFAULT_JOIN;
GValue * value, GParamSpec * pspec);
GType gst_navseek_get_type (void);
-GST_BOILERPLATE (GstNavSeek, gst_navseek, GstBaseTransform,
- GST_TYPE_BASE_TRANSFORM);
-
-static void
-gst_navseek_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&navseek_sink_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&navseek_src_template));
-
- gst_element_class_set_details_simple (element_class,
- "Seek based on left-right arrows", "Filter/Video",
- "Seek based on navigation keys left-right",
- "Jan Schmidt <thaytan@mad.scientist.com>");
-}
+#define gst_navseek_parent_class parent_class
+G_DEFINE_TYPE (GstNavSeek, gst_navseek, GST_TYPE_BASE_TRANSFORM);
static void
gst_navseek_class_init (GstNavSeekClass * klass)
{
GstBaseTransformClass *gstbasetrans_class;
+ GstElementClass *element_class;
GObjectClass *gobject_class;
gobject_class = G_OBJECT_CLASS (klass);
+ element_class = GST_ELEMENT_CLASS (klass);
gstbasetrans_class = GST_BASE_TRANSFORM_CLASS (klass);
gobject_class->set_property = gst_navseek_set_property;
"Time in seconds to seek by", 0.0, G_MAXDOUBLE, 5.0,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&navseek_sink_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&navseek_src_template));
+
+ gst_element_class_set_details_simple (element_class,
+ "Seek based on left-right arrows", "Filter/Video",
+ "Seek based on navigation keys left-right",
+ "Jan Schmidt <thaytan@mad.scientist.com>");
+
gstbasetrans_class->event = GST_DEBUG_FUNCPTR (gst_navseek_event);
gstbasetrans_class->transform_ip =
GST_DEBUG_FUNCPTR (gst_navseek_transform_ip);
}
static void
-gst_navseek_init (GstNavSeek * navseek, GstNavSeekClass * g_class)
+gst_navseek_init (GstNavSeek * navseek)
{
gst_pad_set_event_function (GST_BASE_TRANSFORM (navseek)->srcpad,
GST_DEBUG_FUNCPTR (gst_navseek_handle_src_event));
static void gst_push_file_src_uri_handler_init (gpointer g_iface,
gpointer iface_data);
-static void gst_file_push_src_add_uri_handler (GType type);
-GST_BOILERPLATE_FULL (GstPushFileSrc, gst_push_file_src, GstBin, GST_TYPE_BIN,
- gst_file_push_src_add_uri_handler);
-
-static void
-gst_file_push_src_add_uri_handler (GType type)
-{
- static const GInterfaceInfo info = {
- gst_push_file_src_uri_handler_init,
- NULL,
- NULL
- };
-
- g_type_add_interface_static (type, GST_TYPE_URI_HANDLER, &info);
- GST_DEBUG_CATEGORY_INIT (pushfilesrc_debug, "pushfilesrc", 0,
- "pushfilesrc element");
-}
-
-static void
-gst_push_file_src_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&srctemplate));
-
- gst_element_class_set_details_simple (element_class, "Push File Source",
- "Testing",
- "Implements pushfile:// URI-handler for push-based file access",
- "Tim-Philipp Müller <tim centricular net>");
-}
+#define gst_push_file_src_parent_class parent_class
+G_DEFINE_TYPE_WITH_CODE (GstPushFileSrc, gst_push_file_src, GST_TYPE_BIN,
+ G_IMPLEMENT_INTERFACE (GST_TYPE_URI_HANDLER,
+ gst_push_file_src_uri_handler_init));
static void
gst_push_file_src_dispose (GObject * obj)
gst_push_file_src_class_init (GstPushFileSrcClass * g_class)
{
GObjectClass *gobject_class;
+ GstElementClass *element_class;
gobject_class = G_OBJECT_CLASS (g_class);
+ element_class = GST_ELEMENT_CLASS (g_class);
+
+ GST_DEBUG_CATEGORY_INIT (pushfilesrc_debug, "pushfilesrc", 0,
+ "pushfilesrc element");
gobject_class->dispose = gst_push_file_src_dispose;
+
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&srctemplate));
+
+ gst_element_class_set_details_simple (element_class, "Push File Source",
+ "Testing",
+ "Implements pushfile:// URI-handler for push-based file access",
+ "Tim-Philipp Müller <tim centricular net>");
}
static gboolean
}
static void
-gst_push_file_src_init (GstPushFileSrc * src, GstPushFileSrcClass * g_class)
+gst_push_file_src_init (GstPushFileSrc * src)
{
src->filesrc = gst_element_factory_make ("filesrc", "real-filesrc");
if (src->filesrc) {
};
-#define DEBUG_INIT(bla) \
- GST_DEBUG_CATEGORY_INIT (gst_tag_inject_debug, "taginject", 0, "tag inject element");
-
-GST_BOILERPLATE_FULL (GstTagInject, gst_tag_inject, GstBaseTransform,
- GST_TYPE_BASE_TRANSFORM, DEBUG_INIT);
+#define gst_tag_inject_parent_class parent_class
+G_DEFINE_TYPE (GstTagInject, gst_tag_inject, GST_TYPE_BASE_TRANSFORM);
static void gst_tag_inject_finalize (GObject * object);
static void gst_tag_inject_set_property (GObject * object, guint prop_id,
static void
-gst_tag_inject_base_init (gpointer g_class)
-{
- GstElementClass *gstelement_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_set_details_simple (gstelement_class,
- "TagInject",
- "Generic", "inject metadata tags", "Stefan Kost <ensonic@users.sf.net>");
- gst_element_class_add_pad_template (gstelement_class,
- gst_static_pad_template_get (&srctemplate));
- gst_element_class_add_pad_template (gstelement_class,
- gst_static_pad_template_get (&sinktemplate));
-}
-
-static void
gst_tag_inject_finalize (GObject * object)
{
GstTagInject *self = GST_TAG_INJECT (object);
gst_tag_inject_class_init (GstTagInjectClass * klass)
{
GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
GstBaseTransformClass *gstbasetrans_class;
gobject_class = G_OBJECT_CLASS (klass);
+ gstelement_class = GST_ELEMENT_CLASS (klass);
gstbasetrans_class = GST_BASE_TRANSFORM_CLASS (klass);
+ GST_DEBUG_CATEGORY_INIT (gst_tag_inject_debug, "taginject", 0,
+ "tag inject element");
+
gobject_class->set_property = gst_tag_inject_set_property;
gobject_class->get_property = gst_tag_inject_get_property;
gobject_class->finalize = gst_tag_inject_finalize;
+ gst_element_class_set_details_simple (gstelement_class,
+ "TagInject",
+ "Generic", "inject metadata tags", "Stefan Kost <ensonic@users.sf.net>");
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&srctemplate));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&sinktemplate));
+
gstbasetrans_class->transform_ip =
GST_DEBUG_FUNCPTR (gst_tag_inject_transform_ip);
}
static void
-gst_tag_inject_init (GstTagInject * self, GstTagInjectClass * g_class)
+gst_tag_inject_init (GstTagInject * self)
{
self->tags = NULL;
}
static gboolean gst_progress_report_start (GstBaseTransform * trans);
static gboolean gst_progress_report_stop (GstBaseTransform * trans);
-GST_BOILERPLATE (GstProgressReport, gst_progress_report, GstBaseTransform,
- GST_TYPE_BASE_TRANSFORM);
-
-static void
-gst_progress_report_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&progress_report_sink_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&progress_report_src_template));
-
- gst_element_class_set_details_simple (element_class, "Progress report",
- "Testing",
- "Periodically query and report on processing progress",
- "Jan Schmidt <thaytan@mad.scientist.com>");
-}
+#define gst_progress_report_parent_class parent_class
+G_DEFINE_TYPE (GstProgressReport, gst_progress_report, GST_TYPE_BASE_TRANSFORM);
static void
gst_progress_report_finalize (GObject * obj)
gst_progress_report_class_init (GstProgressReportClass * g_class)
{
GstBaseTransformClass *gstbasetrans_class;
+ GstElementClass *element_class;
GObjectClass *gobject_class;
gobject_class = G_OBJECT_CLASS (g_class);
+ element_class = GST_ELEMENT_CLASS (g_class);
gstbasetrans_class = GST_BASE_TRANSFORM_CLASS (g_class);
gobject_class->finalize = gst_progress_report_finalize;
"Format to use for the querying", DEFAULT_FORMAT,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&progress_report_sink_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&progress_report_src_template));
+
+ gst_element_class_set_details_simple (element_class, "Progress report",
+ "Testing",
+ "Periodically query and report on processing progress",
+ "Jan Schmidt <thaytan@mad.scientist.com>");
+
gstbasetrans_class->event = GST_DEBUG_FUNCPTR (gst_progress_report_event);
gstbasetrans_class->transform_ip =
GST_DEBUG_FUNCPTR (gst_progress_report_transform_ip);
}
static void
-gst_progress_report_init (GstProgressReport * report,
- GstProgressReportClass * g_class)
+gst_progress_report_init (GstProgressReport * report)
{
gst_base_transform_set_passthrough (GST_BASE_TRANSFORM (report), TRUE);
static GstStateChangeReturn gst_rnd_buffer_size_change_state (GstElement *
element, GstStateChange transition);
-#define DEBUG_INIT(bla) \
- GST_DEBUG_CATEGORY_INIT (gst_rnd_buffer_size_debug, "rndbuffersize", 0, \
- "rndbuffersize element");
-
GType gst_rnd_buffer_size_get_type (void);
-GST_BOILERPLATE_FULL (GstRndBufferSize, gst_rnd_buffer_size, GstElement,
- GST_TYPE_ELEMENT, DEBUG_INIT);
-
+#define gst_rnd_buffer_size_parent_class parent_class
+G_DEFINE_TYPE (GstRndBufferSize, gst_rnd_buffer_size, GST_TYPE_ELEMENT);
static void
-gst_rnd_buffer_size_base_init (gpointer g_class)
+gst_rnd_buffer_size_class_init (GstRndBufferSizeClass * klass)
{
- GstElementClass *gstelement_class = GST_ELEMENT_CLASS (g_class);
+ GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
+ GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
+
+ GST_DEBUG_CATEGORY_INIT (gst_rnd_buffer_size_debug, "rndbuffersize", 0,
+ "rndbuffersize element");
+
+ gobject_class->set_property = gst_rnd_buffer_size_set_property;
+ gobject_class->get_property = gst_rnd_buffer_size_get_property;
+ gobject_class->finalize = gst_rnd_buffer_size_finalize;
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&sink_template));
gst_element_class_set_details_simple (gstelement_class, "Random buffer size",
"Testing", "pull random sized buffers",
"Stefan Kost <stefan.kost@nokia.com>");
-}
-
-
-static void
-gst_rnd_buffer_size_class_init (GstRndBufferSizeClass * klass)
-{
- GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
- GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
-
- gobject_class->set_property = gst_rnd_buffer_size_set_property;
- gobject_class->get_property = gst_rnd_buffer_size_get_property;
- gobject_class->finalize = gst_rnd_buffer_size_finalize;
gstelement_class->change_state =
GST_DEBUG_FUNCPTR (gst_rnd_buffer_size_change_state);
}
static void
-gst_rnd_buffer_size_init (GstRndBufferSize * self,
- GstRndBufferSizeClass * g_class)
+gst_rnd_buffer_size_init (GstRndBufferSize * self)
{
self->sinkpad = gst_pad_new_from_static_template (&sink_template, "sink");
gst_pad_set_activate_function (self->sinkpad,
GST_PAD_ALWAYS,
GST_STATIC_CAPS_ANY);
-#define DEBUG_INIT(bla) \
- GST_DEBUG_CATEGORY_INIT (gst_test_debug, "testsink", 0, \
- "debugging category for testsink element");
-
GType gst_test_get_type (void);
-GST_BOILERPLATE_FULL (GstTest, gst_test, GstBaseSink, GST_TYPE_BASE_SINK,
- DEBUG_INIT);
-
-
-static void
-gst_test_base_init (gpointer g_class)
-{
- GstElementClass *gstelement_class = GST_ELEMENT_CLASS (g_class);
+#define gst_test_parent_class parent_class
+G_DEFINE_TYPE (GstTest, gst_test, GST_TYPE_BASE_SINK);
- gst_element_class_add_pad_template (gstelement_class,
- gst_static_pad_template_get (&sinktemplate));
-
- gst_element_class_set_details_simple (gstelement_class, "Test plugin",
- "Testing", "perform a number of tests", "Benjamin Otte <otte@gnome>");
-}
static void
gst_test_class_init (GstTestClass * klass)
{
GstBaseSinkClass *basesink_class = GST_BASE_SINK_CLASS (klass);
+ GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
GObjectClass *object_class = G_OBJECT_CLASS (klass);
guint i;
+ GST_DEBUG_CATEGORY_INIT (gst_test_debug, "testsink", 0,
+ "debugging category for testsink element");
+
object_class->set_property = gst_test_set_property;
object_class->get_property = gst_test_get_property;
g_object_class_install_property (object_class, 2 * i + 2, spec);
}
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&sinktemplate));
+
+ gst_element_class_set_details_simple (gstelement_class, "Test plugin",
+ "Testing", "perform a number of tests", "Benjamin Otte <otte@gnome>");
+
basesink_class->render = GST_DEBUG_FUNCPTR (gst_test_render_buffer);
basesink_class->event = GST_DEBUG_FUNCPTR (gst_test_sink_event);
basesink_class->start = GST_DEBUG_FUNCPTR (gst_test_start);
}
static void
-gst_test_init (GstTest * test, GstTestClass * g_class)
+gst_test_init (GstTest * test)
{
GstTestClass *klass;
guint i;
#include "gstedge.h"
-#include <gst/video/video.h>
-
-GST_BOILERPLATE (GstEdgeTV, gst_edgetv, GstVideoFilter, GST_TYPE_VIDEO_FILTER);
+#define gst_edgetv_parent_class parent_class
+G_DEFINE_TYPE (GstEdgeTV, gst_edgetv, GST_TYPE_VIDEO_FILTER);
#if G_BYTE_ORDER == G_LITTLE_ENDIAN
-#define CAPS_STR GST_VIDEO_CAPS_BGRx ";" GST_VIDEO_CAPS_RGBx
+#define CAPS_STR GST_VIDEO_CAPS_MAKE ("{ BGRx, RGBx }")
#else
-#define CAPS_STR GST_VIDEO_CAPS_xBGR ";" GST_VIDEO_CAPS_xRGB
+#define CAPS_STR GST_VIDEO_CAPS_MAKE ("{ xBGR, xRGB }")
#endif
static GstStaticPadTemplate gst_edgetv_src_template =
GstCaps * outcaps)
{
GstEdgeTV *edgetv = GST_EDGETV (btrans);
- GstStructure *structure;
- gboolean ret = FALSE;
+ GstVideoInfo info;
+ guint map_size;
+ gint width, height;
- structure = gst_caps_get_structure (incaps, 0);
+ if (!gst_video_info_from_caps (&info, incaps))
+ goto invalid_caps;
- GST_OBJECT_LOCK (edgetv);
- if (gst_structure_get_int (structure, "width", &edgetv->width) &&
- gst_structure_get_int (structure, "height", &edgetv->height)) {
- guint map_size;
+ edgetv->info = info;
- edgetv->map_width = edgetv->width / 4;
- edgetv->map_height = edgetv->height / 4;
- edgetv->video_width_margin = edgetv->width % 4;
+ width = GST_VIDEO_INFO_WIDTH (&info);
+ height = GST_VIDEO_INFO_HEIGHT (&info);
- map_size = edgetv->map_width * edgetv->map_height * sizeof (guint32) * 2;
+ edgetv->map_width = width / 4;
+ edgetv->map_height = height / 4;
+ edgetv->video_width_margin = width % 4;
- g_free (edgetv->map);
- edgetv->map = (guint32 *) g_malloc0 (map_size);
- ret = TRUE;
- }
- GST_OBJECT_UNLOCK (edgetv);
+ map_size = edgetv->map_width * edgetv->map_height * sizeof (guint32) * 2;
- return ret;
+ g_free (edgetv->map);
+ edgetv->map = (guint32 *) g_malloc0 (map_size);
+
+ return TRUE;
+
+ /* ERRORS */
+invalid_caps:
+ {
+ GST_DEBUG_OBJECT (btrans, "could not parse caps");
+ return FALSE;
+ }
}
static GstFlowReturn
gint video_width_margin;
guint32 *map;
GstFlowReturn ret = GST_FLOW_OK;
+ GstVideoFrame in_frame, out_frame;
- src = (guint32 *) GST_BUFFER_DATA (in);
- dest = (guint32 *) GST_BUFFER_DATA (out);
-
- GST_OBJECT_LOCK (filter);
map = filter->map;
- width = filter->width;
map_height = filter->map_height;
map_width = filter->map_width;
video_width_margin = filter->video_width_margin;
+
+ gst_video_frame_map (&in_frame, &filter->info, in, GST_MAP_READ);
+ gst_video_frame_map (&out_frame, &filter->info, out, GST_MAP_WRITE);
+
+ src = GST_VIDEO_FRAME_PLANE_DATA (&in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (&out_frame, 0);
+
+ width = GST_VIDEO_FRAME_WIDTH (&in_frame);
+
src += width * 4 + 4;
dest += width * 4 + 4;
src += width * 3 + 8 + video_width_margin;
dest += width * 3 + 8 + video_width_margin;
}
- GST_OBJECT_UNLOCK (filter);
return ret;
}
}
static void
-gst_edgetv_base_init (gpointer g_class)
+gst_edgetv_class_init (GstEdgeTVClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
+ GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass;
+
+ gobject_class->finalize = gst_edgetv_finalize;
- gst_element_class_set_details_simple (element_class, "EdgeTV effect",
+ gst_element_class_set_details_simple (gstelement_class, "EdgeTV effect",
"Filter/Effect/Video",
"Apply edge detect on video", "Wim Taymans <wim.taymans@chello.be>");
- gst_element_class_add_pad_template (element_class,
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_edgetv_sink_template));
- gst_element_class_add_pad_template (element_class,
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_edgetv_src_template));
-}
-
-static void
-gst_edgetv_class_init (GstEdgeTVClass * klass)
-{
- GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass;
- GObjectClass *gobject_class = (GObjectClass *) klass;
-
- gobject_class->finalize = gst_edgetv_finalize;
trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_edgetv_set_caps);
trans_class->transform = GST_DEBUG_FUNCPTR (gst_edgetv_transform);
}
static void
-gst_edgetv_init (GstEdgeTV * edgetv, GstEdgeTVClass * klass)
+gst_edgetv_init (GstEdgeTV * edgetv)
{
}
#include <gst/gst.h>
+#include <gst/video/video.h>
#include <gst/video/gstvideofilter.h>
G_BEGIN_DECLS
GstVideoFilter videofilter;
/* < private > */
-
- gint width, height;
+ GstVideoInfo info;
gint map_width, map_height;
guint32 *map;
gint video_width_margin;
" rate=(int)[1000,MAX]," \
" channels=(int)[1,MAX]"
-static void
-_do_init (GType object_type)
-{
- const GInterfaceInfo child_proxy_interface_info = {
- (GInterfaceInitFunc) gst_iir_equalizer_child_proxy_interface_init,
- NULL, /* interface_finalize */
- NULL /* interface_data */
- };
-
- g_type_add_interface_static (object_type, GST_TYPE_CHILD_PROXY,
- &child_proxy_interface_info);
-}
+#define gst_iir_equalizer_parent_class parent_class
+G_DEFINE_TYPE_WITH_CODE (GstIirEqualizer, gst_iir_equalizer,
+ GST_TYPE_AUDIO_FILTER,
+ G_IMPLEMENT_INTERFACE (GST_TYPE_CHILD_PROXY,
+ gst_iir_equalizer_child_proxy_interface_init));
-GST_BOILERPLATE_FULL (GstIirEqualizer, gst_iir_equalizer,
- GstAudioFilter, GST_TYPE_AUDIO_FILTER, _do_init);
/* child object */
/* equalizer implementation */
static void
-gst_iir_equalizer_base_init (gpointer g_class)
-{
- GstAudioFilterClass *audiofilter_class = GST_AUDIO_FILTER_CLASS (g_class);
- GstCaps *caps;
-
- caps = gst_caps_from_string (ALLOWED_CAPS);
- gst_audio_filter_class_add_pad_templates (audiofilter_class, caps);
- gst_caps_unref (caps);
-}
-
-static void
gst_iir_equalizer_class_init (GstIirEqualizerClass * klass)
{
GstAudioFilterClass *audio_filter_class = (GstAudioFilterClass *) klass;
GstBaseTransformClass *btrans_class = (GstBaseTransformClass *) klass;
GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstCaps *caps;
gobject_class->finalize = gst_iir_equalizer_finalize;
audio_filter_class->setup = gst_iir_equalizer_setup;
btrans_class->transform_ip = gst_iir_equalizer_transform_ip;
+
+ caps = gst_caps_from_string (ALLOWED_CAPS);
+ gst_audio_filter_class_add_pad_templates (audio_filter_class, caps);
+ gst_caps_unref (caps);
}
static void
-gst_iir_equalizer_init (GstIirEqualizer * eq, GstIirEqualizerClass * g_class)
+gst_iir_equalizer_init (GstIirEqualizer * eq)
{
eq->bands_lock = g_mutex_new ();
eq->need_new_coefficients = TRUE;
GstAudioFilter *filter = GST_AUDIO_FILTER (btrans);
GstIirEqualizer *equ = GST_IIR_EQUALIZER (btrans);
GstClockTime timestamp;
+ guint8 *data;
+ gsize size;
if (G_UNLIKELY (filter->format.channels < 1 || equ->process == NULL))
return GST_FLOW_NOT_NEGOTIATED;
if (GST_CLOCK_TIME_IS_VALID (timestamp))
gst_object_sync_values (G_OBJECT (equ), timestamp);
- equ->process (equ, GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (buf),
- filter->format.channels);
+ data = gst_buffer_map (buf, &size, NULL, GST_MAP_WRITE);
+ equ->process (equ, data, size, filter->format.channels);
+ gst_buffer_unmap (buf, data, size);
return GST_FLOW_OK;
}
#define GST_CAT_DEFAULT equalizer_debug
-static void
-_do_init (GType object_type)
-{
- const GInterfaceInfo preset_interface_info = {
- NULL, /* interface_init */
- NULL, /* interface_finalize */
- NULL /* interface_data */
- };
-
- g_type_add_interface_static (object_type, GST_TYPE_PRESET,
- &preset_interface_info);
-}
-
-GST_BOILERPLATE_FULL (GstIirEqualizer10Bands, gst_iir_equalizer_10bands,
- GstIirEqualizer, GST_TYPE_IIR_EQUALIZER, _do_init);
+#define gst_iir_equalizer_10bands_parent_class parent_class
+G_DEFINE_TYPE_WITH_CODE (GstIirEqualizer10Bands, gst_iir_equalizer_10bands,
+ GST_TYPE_IIR_EQUALIZER, G_IMPLEMENT_INTERFACE (GST_TYPE_PRESET, NULL));
/* equalizer implementation */
static void
-gst_iir_equalizer_10bands_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_set_details_simple (element_class, "10 Band Equalizer",
- "Filter/Effect/Audio",
- "Direct Form 10 band IIR equalizer",
- "Stefan Kost <ensonic@users.sf.net>");
-}
-
-static void
gst_iir_equalizer_10bands_class_init (GstIirEqualizer10BandsClass * klass)
{
GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
gobject_class->set_property = gst_iir_equalizer_10bands_set_property;
gobject_class->get_property = gst_iir_equalizer_10bands_get_property;
"gain for the frequency band 15011 Hz, ranging from -24 dB to +12 dB",
-24.0, 12.0, 0.0,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
+
+ gst_element_class_set_details_simple (gstelement_class, "10 Band Equalizer",
+ "Filter/Effect/Audio",
+ "Direct Form 10 band IIR equalizer",
+ "Stefan Kost <ensonic@users.sf.net>");
}
static void
-gst_iir_equalizer_10bands_init (GstIirEqualizer10Bands * equ_n,
- GstIirEqualizer10BandsClass * g_class)
+gst_iir_equalizer_10bands_init (GstIirEqualizer10Bands * equ_n)
{
GstIirEqualizer *equ = GST_IIR_EQUALIZER (equ_n);
GST_DEBUG_CATEGORY_EXTERN (equalizer_debug);
#define GST_CAT_DEFAULT equalizer_debug
-
-static void
-_do_init (GType object_type)
-{
- const GInterfaceInfo preset_interface_info = {
- NULL, /* interface_init */
- NULL, /* interface_finalize */
- NULL /* interface_data */
- };
-
- g_type_add_interface_static (object_type, GST_TYPE_PRESET,
- &preset_interface_info);
-}
-
-GST_BOILERPLATE_FULL (GstIirEqualizer3Bands, gst_iir_equalizer_3bands,
- GstIirEqualizer, GST_TYPE_IIR_EQUALIZER, _do_init);
+#define gst_iir_equalizer_3bands_parent_class parent_class
+G_DEFINE_TYPE_WITH_CODE (GstIirEqualizer3Bands, gst_iir_equalizer_3bands,
+ GST_TYPE_IIR_EQUALIZER, G_IMPLEMENT_INTERFACE (GST_TYPE_PRESET, NULL));
/* equalizer implementation */
static void
-gst_iir_equalizer_3bands_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_set_details_simple (element_class, "3 Band Equalizer",
- "Filter/Effect/Audio",
- "Direct Form 3 band IIR equalizer", "Stefan Kost <ensonic@users.sf.net>");
-}
-
-static void
gst_iir_equalizer_3bands_class_init (GstIirEqualizer3BandsClass * klass)
{
GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
gobject_class->set_property = gst_iir_equalizer_3bands_set_property;
gobject_class->get_property = gst_iir_equalizer_3bands_get_property;
"gain for the frequency band 11 kHz, ranging from -24.0 to +12.0",
-24.0, 12.0, 0.0,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
+
+ gst_element_class_set_details_simple (gstelement_class, "3 Band Equalizer",
+ "Filter/Effect/Audio",
+ "Direct Form 3 band IIR equalizer", "Stefan Kost <ensonic@users.sf.net>");
}
static void
-gst_iir_equalizer_3bands_init (GstIirEqualizer3Bands * equ_n,
- GstIirEqualizer3BandsClass * g_class)
+gst_iir_equalizer_3bands_init (GstIirEqualizer3Bands * equ_n)
{
GstIirEqualizer *equ = GST_IIR_EQUALIZER (equ_n);
GST_DEBUG_CATEGORY_EXTERN (equalizer_debug);
#define GST_CAT_DEFAULT equalizer_debug
-GST_BOILERPLATE (GstIirEqualizerNBands, gst_iir_equalizer_nbands,
- GstIirEqualizer, GST_TYPE_IIR_EQUALIZER);
+#define gst_iir_equalizer_nbands_parent_class parent_class
+G_DEFINE_TYPE (GstIirEqualizerNBands, gst_iir_equalizer_nbands,
+ GST_TYPE_IIR_EQUALIZER);
/* equalizer implementation */
static void
-gst_iir_equalizer_nbands_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_set_details_simple (element_class, "N Band Equalizer",
- "Filter/Effect/Audio",
- "Direct Form IIR equalizer",
- "Benjamin Otte <otte@gnome.org>," " Stefan Kost <ensonic@users.sf.net>");
-}
-
-static void
gst_iir_equalizer_nbands_class_init (GstIirEqualizerNBandsClass * klass)
{
GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
gobject_class->set_property = gst_iir_equalizer_nbands_set_property;
gobject_class->get_property = gst_iir_equalizer_nbands_get_property;
g_param_spec_uint ("num-bands", "num-bands",
"number of different bands to use", 1, 64, 10,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | G_PARAM_CONSTRUCT));
+
+ gst_element_class_set_details_simple (gstelement_class, "N Band Equalizer",
+ "Filter/Effect/Audio",
+ "Direct Form IIR equalizer",
+ "Benjamin Otte <otte@gnome.org>," " Stefan Kost <ensonic@users.sf.net>");
}
static void
-gst_iir_equalizer_nbands_init (GstIirEqualizerNBands * equ_n,
- GstIirEqualizerNBandsClass * g_class)
+gst_iir_equalizer_nbands_init (GstIirEqualizerNBands * equ_n)
{
GstIirEqualizer *equ = GST_IIR_EQUALIZER (equ_n);
GST_DEBUG_CATEGORY_STATIC (flvdemux_debug);
#define GST_CAT_DEFAULT flvdemux_debug
-GST_BOILERPLATE (GstFlvDemux, gst_flv_demux, GstElement, GST_TYPE_ELEMENT);
+#define gst_flv_demux_parent_class parent_class
+G_DEFINE_TYPE (GstFlvDemux, gst_flv_demux, GST_TYPE_ELEMENT);
/* 9 bytes of header + 4 bytes of first previous tag size */
#define FLV_HEADER_SIZE 13
gboolean key;
gst_index_entry_assoc_map (entry, GST_FORMAT_TIME, &time);
- key = ! !(GST_INDEX_ASSOC_FLAGS (entry) & GST_ASSOCIATION_FLAG_KEY_UNIT);
+ key = !!(GST_INDEX_ASSOC_FLAGS (entry) & GST_ASSOCIATION_FLAG_KEY_UNIT);
GST_LOG_OBJECT (demux, "position already mapped to time %" GST_TIME_FORMAT
", keyframe %d", GST_TIME_ARGS (time), key);
/* there is not really a way to delete the existing one */
- if (time != ts || key != ! !keyframe)
+ if (time != ts || key != !!keyframe)
GST_DEBUG_OBJECT (demux, "metadata mismatch");
#endif
return;
gst_flv_demux_parse_tag_script (GstFlvDemux * demux, GstBuffer * buffer)
{
GstFlowReturn ret = GST_FLOW_OK;
- GstByteReader reader = GST_BYTE_READER_INIT_FROM_BUFFER (buffer);
+ GstByteReader reader;
guint8 type = 0;
+ guint8 *data;
+ gsize size;
- g_return_val_if_fail (GST_BUFFER_SIZE (buffer) >= 7, GST_FLOW_ERROR);
+ g_return_val_if_fail (gst_buffer_get_size (buffer) >= 7, GST_FLOW_ERROR);
+
+ data = gst_buffer_map (buffer, &size, NULL, GST_MAP_READ);
+ gst_byte_reader_init (&reader, data, size);
gst_byte_reader_skip (&reader, 7);
GST_LOG_OBJECT (demux, "parsing a script tag");
if (!gst_byte_reader_get_uint8 (&reader, &type))
- return GST_FLOW_OK;
+ goto cleanup;
/* Must be string */
if (type == 2) {
if (!gst_byte_reader_get_uint8 (&reader, &type)) {
g_free (function_name);
- return GST_FLOW_OK;
+ goto cleanup;
}
switch (type) {
/* ECMA array */
if (!gst_byte_reader_get_uint32_be (&reader, &nb_elems)) {
g_free (function_name);
- return GST_FLOW_OK;
+ goto cleanup;
}
/* The number of elements is just a hint, some files have
default:
GST_DEBUG_OBJECT (demux, "Unhandled script data type : %d", type);
g_free (function_name);
- return GST_FLOW_OK;
+ goto cleanup;
}
demux->push_tags = TRUE;
}
}
+cleanup:
+ gst_buffer_unmap (buffer, data, -1);
+
return ret;
}
break;
case 10:
{
+ guint8 *data = NULL;
+ gsize size;
+
+ if (demux->audio_codec_data)
+ data = gst_buffer_map (demux->audio_codec_data, &size, NULL,
+ GST_MAP_READ);
/* use codec-data to extract and verify samplerate */
- if (demux->audio_codec_data &&
- GST_BUFFER_SIZE (demux->audio_codec_data) >= 2) {
+ if (demux->audio_codec_data && size >= 2) {
gint freq_index;
- freq_index =
- ((GST_READ_UINT16_BE (GST_BUFFER_DATA (demux->audio_codec_data))));
+ freq_index = GST_READ_UINT16_BE (data);
freq_index = (freq_index & 0x0780) >> 7;
adjusted_rate =
gst_codec_utils_aac_get_sample_rate_from_index (freq_index);
adjusted_rate = rate;
}
}
+ if (data)
+ gst_buffer_unmap (demux->audio_codec_data, data, -1);
caps = gst_caps_new_simple ("audio/mpeg",
"mpegversion", G_TYPE_INT, 4, "framed", G_TYPE_BOOLEAN, TRUE,
"stream-format", G_TYPE_STRING, "raw", NULL);
guint32 pts = 0, codec_tag = 0, rate = 5512, width = 8, channels = 1;
guint32 codec_data = 0, pts_ext = 0;
guint8 flags = 0;
- guint8 *data = GST_BUFFER_DATA (buffer);
+ guint8 *data;
GstBuffer *outbuf;
+ gsize size;
GST_LOG_OBJECT (demux, "parsing an audio tag");
if (demux->no_more_pads && !demux->audio_pad) {
GST_WARNING_OBJECT (demux,
"Signaled no-more-pads already but had no audio pad -- ignoring");
- goto beach;
+ return GST_FLOW_OK;
}
- g_return_val_if_fail (GST_BUFFER_SIZE (buffer) == demux->tag_size,
+ g_return_val_if_fail (gst_buffer_get_size (buffer) == demux->tag_size,
GST_FLOW_ERROR);
+ /* Error out on tags with too small headers */
+ if (gst_buffer_get_size (buffer) < 11) {
+ GST_ERROR_OBJECT (demux, "Too small tag size (%d)",
+ gst_buffer_get_size (buffer));
+ return GST_FLOW_ERROR;
+ }
+
+ data = gst_buffer_map (buffer, &size, NULL, GST_MAP_READ);
+
/* Grab information about audio tag */
pts = GST_READ_UINT24_BE (data);
/* read the pts extension to 32 bits integer */
GST_LOG_OBJECT (demux, "pts bytes %02X %02X %02X %02X (%d)", data[0], data[1],
data[2], data[3], pts);
- /* Error out on tags with too small headers */
- if (GST_BUFFER_SIZE (buffer) < 11) {
- GST_ERROR_OBJECT (demux, "Too small tag size (%d)",
- GST_BUFFER_SIZE (buffer));
- return GST_FLOW_ERROR;
- }
-
- /* Silently skip buffers with no data */
- if (GST_BUFFER_SIZE (buffer) == 11)
- return GST_FLOW_OK;
-
/* Skip the stream id and go directly to the flags */
flags = GST_READ_UINT8 (data + 7);
+ /* Silently skip buffers with no data */
+ if (size == 11)
+ goto beach;
+
/* Channels */
if (flags & 0x01) {
channels = 2;
ret = GST_FLOW_ERROR;
goto beach;
}
+#ifndef GST_DISABLE_GST_DEBUG
+ {
+ GstCaps *caps;
- GST_DEBUG_OBJECT (demux, "created audio pad with caps %" GST_PTR_FORMAT,
- GST_PAD_CAPS (demux->audio_pad));
+ caps = gst_pad_get_current_caps (demux->audio_pad);
+ GST_DEBUG_OBJECT (demux, "created audio pad with caps %" GST_PTR_FORMAT,
+ caps);
+ if (caps)
+ gst_caps_unref (caps);
+ }
+#endif
/* Set functions on the pad */
gst_pad_set_query_type_function (demux->audio_pad,
}
/* Create buffer from pad */
- outbuf =
- gst_buffer_create_sub (buffer, 7 + codec_data,
- demux->tag_data_size - codec_data);
+ outbuf = gst_buffer_copy_region (buffer, GST_BUFFER_COPY_MEMORY,
+ 7 + codec_data, demux->tag_data_size - codec_data);
if (demux->audio_codec_tag == 10) {
guint8 aac_packet_type = GST_READ_UINT8 (data + 8);
GST_BUFFER_DURATION (outbuf) = GST_CLOCK_TIME_NONE;
GST_BUFFER_OFFSET (outbuf) = demux->audio_offset++;
GST_BUFFER_OFFSET_END (outbuf) = demux->audio_offset;
- gst_buffer_set_caps (outbuf, GST_PAD_CAPS (demux->audio_pad));
if (demux->duration == GST_CLOCK_TIME_NONE ||
demux->duration < GST_BUFFER_TIMESTAMP (outbuf))
demux->audio_need_discont = FALSE;
}
- gst_segment_set_last_stop (&demux->segment, GST_FORMAT_TIME,
- GST_BUFFER_TIMESTAMP (outbuf));
+ demux->segment.position = GST_BUFFER_TIMESTAMP (outbuf);
/* Do we need a newsegment event ? */
if (G_UNLIKELY (demux->audio_need_segment)) {
- if (demux->close_seg_event)
- gst_pad_push_event (demux->audio_pad,
- gst_event_ref (demux->close_seg_event));
-
+ /* FIXME need one segment sent for all stream to maintain a/v sync */
if (!demux->new_seg_event) {
GST_DEBUG_OBJECT (demux, "pushing newsegment from %"
GST_TIME_FORMAT " to %" GST_TIME_FORMAT,
- GST_TIME_ARGS (demux->segment.last_stop),
+ GST_TIME_ARGS (demux->segment.position),
GST_TIME_ARGS (demux->segment.stop));
- demux->new_seg_event =
- gst_event_new_new_segment (FALSE, demux->segment.rate,
- demux->segment.format, demux->segment.last_stop,
- demux->segment.stop, demux->segment.last_stop);
+ demux->segment.start = demux->segment.time = demux->segment.position;
+ demux->new_seg_event = gst_event_new_segment (&demux->segment);
} else {
GST_DEBUG_OBJECT (demux, "pushing pre-generated newsegment event");
}
GST_LOG_OBJECT (demux, "pushing %d bytes buffer at pts %" GST_TIME_FORMAT
" with duration %" GST_TIME_FORMAT ", offset %" G_GUINT64_FORMAT,
- GST_BUFFER_SIZE (outbuf), GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)),
+ gst_buffer_get_size (outbuf),
+ GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)),
GST_TIME_ARGS (GST_BUFFER_DURATION (outbuf)), GST_BUFFER_OFFSET (outbuf));
if (!GST_CLOCK_TIME_IS_VALID (demux->audio_start)) {
ret = gst_pad_push (demux->audio_pad, outbuf);
if (G_UNLIKELY (ret != GST_FLOW_OK)) {
if (demux->segment.rate < 0.0 && ret == GST_FLOW_UNEXPECTED &&
- demux->segment.last_stop > demux->segment.stop) {
+ demux->segment.position > demux->segment.stop) {
/* In reverse playback we can get a GST_FLOW_UNEXPECTED when
* we are at the end of the segment, so we just need to jump
* back to the previous section. */
demux->audio_linked = TRUE;
beach:
+ gst_buffer_unmap (buffer, data, -1);
+
return ret;
}
guint32 pts = 0, codec_data = 1, pts_ext = 0;
gboolean keyframe = FALSE;
guint8 flags = 0, codec_tag = 0;
- guint8 *data = GST_BUFFER_DATA (buffer);
+ guint8 *data;
GstBuffer *outbuf;
+ gsize size;
- g_return_val_if_fail (GST_BUFFER_SIZE (buffer) == demux->tag_size,
+ g_return_val_if_fail (gst_buffer_get_size (buffer) == demux->tag_size,
GST_FLOW_ERROR);
GST_LOG_OBJECT (demux, "parsing a video tag");
-
if (demux->no_more_pads && !demux->video_pad) {
GST_WARNING_OBJECT (demux,
"Signaled no-more-pads already but had no audio pad -- ignoring");
- goto beach;
+ return GST_FLOW_OK;
}
+ if (gst_buffer_get_size (buffer) < 12) {
+ GST_ERROR_OBJECT (demux, "Too small tag size");
+ return GST_FLOW_ERROR;
+ }
+
+ data = gst_buffer_map (buffer, &size, NULL, GST_MAP_READ);
+
/* Grab information about video tag */
pts = GST_READ_UINT24_BE (data);
/* read the pts extension to 32 bits integer */
GST_LOG_OBJECT (demux, "pts bytes %02X %02X %02X %02X (%d)", data[0], data[1],
data[2], data[3], pts);
- if (GST_BUFFER_SIZE (buffer) < 12) {
- GST_ERROR_OBJECT (demux, "Too small tag size");
- return GST_FLOW_ERROR;
- }
-
/* Skip the stream id and go directly to the flags */
flags = GST_READ_UINT8 (data + 7);
* metadata tag that would come later and trigger a caps change */
demux->got_par = FALSE;
- GST_DEBUG_OBJECT (demux, "created video pad with caps %" GST_PTR_FORMAT,
- GST_PAD_CAPS (demux->video_pad));
+#ifndef GST_DISABLE_GST_DEBUG
+ {
+ GstCaps *caps;
+
+ caps = gst_pad_get_current_caps (demux->video_pad);
+ GST_DEBUG_OBJECT (demux, "created video pad with caps %" GST_PTR_FORMAT,
+ caps);
+ if (caps)
+ gst_caps_unref (caps);
+ }
+#endif
/* Set functions on the pad */
gst_pad_set_query_type_function (demux->video_pad,
}
/* Create buffer from pad */
- outbuf =
- gst_buffer_create_sub (buffer, 7 + codec_data,
- demux->tag_data_size - codec_data);
+ outbuf = gst_buffer_copy_region (buffer, GST_BUFFER_COPY_MEMORY,
+ 7 + codec_data, demux->tag_data_size - codec_data);
if (demux->video_codec_tag == 7) {
guint8 avc_packet_type = GST_READ_UINT8 (data + 8);
GST_BUFFER_DURATION (outbuf) = GST_CLOCK_TIME_NONE;
GST_BUFFER_OFFSET (outbuf) = demux->video_offset++;
GST_BUFFER_OFFSET_END (outbuf) = demux->video_offset;
- gst_buffer_set_caps (outbuf, GST_PAD_CAPS (demux->video_pad));
if (demux->duration == GST_CLOCK_TIME_NONE ||
demux->duration < GST_BUFFER_TIMESTAMP (outbuf))
demux->video_need_discont = FALSE;
}
- gst_segment_set_last_stop (&demux->segment, GST_FORMAT_TIME,
- GST_BUFFER_TIMESTAMP (outbuf));
+ demux->segment.position = GST_BUFFER_TIMESTAMP (outbuf);
/* Do we need a newsegment event ? */
if (G_UNLIKELY (demux->video_need_segment)) {
- if (demux->close_seg_event)
- gst_pad_push_event (demux->video_pad,
- gst_event_ref (demux->close_seg_event));
-
+ /* FIXME need one segment sent for all stream to maintain a/v sync */
if (!demux->new_seg_event) {
GST_DEBUG_OBJECT (demux, "pushing newsegment from %"
GST_TIME_FORMAT " to %" GST_TIME_FORMAT,
- GST_TIME_ARGS (demux->segment.last_stop),
+ GST_TIME_ARGS (demux->segment.position),
GST_TIME_ARGS (demux->segment.stop));
- demux->new_seg_event =
- gst_event_new_new_segment (FALSE, demux->segment.rate,
- demux->segment.format, demux->segment.last_stop,
- demux->segment.stop, demux->segment.last_stop);
+ demux->segment.start = demux->segment.time = demux->segment.position;
+ demux->new_seg_event = gst_event_new_segment (&demux->segment);
} else {
GST_DEBUG_OBJECT (demux, "pushing pre-generated newsegment event");
}
GST_LOG_OBJECT (demux, "pushing %d bytes buffer at pts %" GST_TIME_FORMAT
" with duration %" GST_TIME_FORMAT ", offset %" G_GUINT64_FORMAT
- ", keyframe (%d)", GST_BUFFER_SIZE (outbuf),
+ ", keyframe (%d)", gst_buffer_get_size (outbuf),
GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)),
GST_TIME_ARGS (GST_BUFFER_DURATION (outbuf)), GST_BUFFER_OFFSET (outbuf),
keyframe);
if (G_UNLIKELY (ret != GST_FLOW_OK)) {
if (demux->segment.rate < 0.0 && ret == GST_FLOW_UNEXPECTED &&
- demux->segment.last_stop > demux->segment.stop) {
+ demux->segment.position > demux->segment.stop) {
/* In reverse playback we can get a GST_FLOW_UNEXPECTED when
* we are at the end of the segment, so we just need to jump
* back to the previous section. */
demux->video_linked = TRUE;
beach:
+ gst_buffer_unmap (buffer, data, -1);
return ret;
}
guint32 tag_data_size;
guint8 type;
gboolean keyframe = TRUE;
- GstClockTime ret;
- guint8 *data = GST_BUFFER_DATA (buffer);
+ GstClockTime ret = GST_CLOCK_TIME_NONE;
+ guint8 *data, *bdata;
+ gsize size;
+
+ g_return_val_if_fail (gst_buffer_get_size (buffer) >= 12,
+ GST_CLOCK_TIME_NONE);
- g_return_val_if_fail (GST_BUFFER_SIZE (buffer) >= 12, GST_CLOCK_TIME_NONE);
+ data = bdata = gst_buffer_map (buffer, &size, NULL, GST_MAP_READ);
type = data[0];
if (type != 9 && type != 8 && type != 18) {
GST_WARNING_OBJECT (demux, "Unsupported tag type %u", data[0]);
- return GST_CLOCK_TIME_NONE;
+ goto exit;
}
if (type == 9)
tag_data_size = GST_READ_UINT24_BE (data + 1);
- if (GST_BUFFER_SIZE (buffer) >= tag_data_size + 11 + 4) {
+ if (size >= tag_data_size + 11 + 4) {
if (GST_READ_UINT32_BE (data + tag_data_size + 11) != tag_data_size + 11) {
GST_WARNING_OBJECT (demux, "Invalid tag size");
- return GST_CLOCK_TIME_NONE;
+ goto exit;
}
}
if (demux->duration == GST_CLOCK_TIME_NONE || demux->duration < ret)
demux->duration = ret;
+exit:
+ gst_buffer_unmap (buffer, bdata, -1);
return ret;
}
{
GstFlowReturn ret = GST_FLOW_OK;
guint8 tag_type = 0;
- guint8 *data = GST_BUFFER_DATA (buffer);
+ guint8 *data;
- g_return_val_if_fail (GST_BUFFER_SIZE (buffer) >= 4, GST_FLOW_ERROR);
+ g_return_val_if_fail (gst_buffer_get_size (buffer) >= 4, GST_FLOW_ERROR);
+
+ data = gst_buffer_map (buffer, NULL, NULL, GST_MAP_READ);
tag_type = data[0];
GST_LOG_OBJECT (demux, "tag data size is %" G_GUINT64_FORMAT,
demux->tag_data_size);
+ gst_buffer_unmap (buffer, data, -1);
+
return ret;
}
gst_flv_demux_parse_header (GstFlvDemux * demux, GstBuffer * buffer)
{
GstFlowReturn ret = GST_FLOW_OK;
- guint8 *data = GST_BUFFER_DATA (buffer);
+ guint8 *data;
- g_return_val_if_fail (GST_BUFFER_SIZE (buffer) >= 9, GST_FLOW_ERROR);
+ g_return_val_if_fail (gst_buffer_get_size (buffer) >= 9, GST_FLOW_ERROR);
+
+ data = gst_buffer_map (buffer, NULL, NULL, GST_MAP_READ);
/* Check for the FLV tag */
if (data[0] == 'F' && data[1] == 'L' && data[2] == 'V') {
}
}
- /* Jump over the 4 first bytes */
- data += 4;
-
/* Now look at audio/video flags */
{
- guint8 flags = data[0];
+ guint8 flags = data[4];
demux->has_video = demux->has_audio = FALSE;
demux->need_header = FALSE;
beach:
+ gst_buffer_unmap (buffer, data, -1);
return ret;
}
demux->new_seg_event = NULL;
}
- if (demux->close_seg_event) {
- gst_event_unref (demux->close_seg_event);
- demux->close_seg_event = NULL;
- }
-
gst_adapter_clear (demux->adapter);
if (demux->audio_codec_data) {
demux = GST_FLV_DEMUX (gst_pad_get_parent (pad));
GST_LOG_OBJECT (demux, "received buffer of %d bytes at offset %"
- G_GUINT64_FORMAT, GST_BUFFER_SIZE (buffer), GST_BUFFER_OFFSET (buffer));
+ G_GUINT64_FORMAT, gst_buffer_get_size (buffer),
+ GST_BUFFER_OFFSET (buffer));
if (G_UNLIKELY (GST_BUFFER_OFFSET (buffer) == 0)) {
GST_DEBUG_OBJECT (demux, "beginning of file, expect header");
if (!demux->indexed) {
if (demux->offset == demux->file_size - sizeof (guint32)) {
- GstBuffer *buffer =
- gst_adapter_take_buffer (demux->adapter, sizeof (guint32));
- GstByteReader *reader = gst_byte_reader_new_from_buffer (buffer);
guint64 seek_offset;
+ guint8 *data;
- if (!gst_adapter_available (demux->adapter) >= sizeof (guint32)) {
- /* error */
- }
+ data = gst_adapter_take (demux->adapter, 4);
+ if (!data)
+ goto no_index;
- seek_offset =
- demux->file_size - sizeof (guint32) -
- gst_byte_reader_peek_uint32_be_unchecked (reader);
- gst_byte_reader_free (reader);
- gst_buffer_unref (buffer);
+ seek_offset = demux->file_size - sizeof (guint32) -
+ GST_READ_UINT32_BE (data);
+ g_free (data);
GST_INFO_OBJECT (demux,
"Seeking to beginning of last tag at %" G_GUINT64_FORMAT,
return ret;
}
- if (G_UNLIKELY (*buffer && GST_BUFFER_SIZE (*buffer) != size)) {
+ if (G_UNLIKELY (*buffer && gst_buffer_get_size (*buffer) != size)) {
GST_WARNING_OBJECT (demux,
"partial pull got %d when expecting %d from offset %" G_GUINT64_FORMAT,
- GST_BUFFER_SIZE (*buffer), size, offset);
+ gst_buffer_get_size (*buffer), size, offset);
gst_buffer_unref (*buffer);
ret = GST_FLOW_UNEXPECTED;
*buffer = NULL;
GstFormat fmt = GST_FORMAT_BYTES;
size_t tag_size, size;
GstBuffer *buffer = NULL;
+ guint8 *data;
if (G_UNLIKELY (!gst_pad_query_peer_duration (demux->sinkpad, &fmt, &offset)
|| fmt != GST_FORMAT_BYTES))
4, &buffer))
goto exit;
- tag_size = GST_READ_UINT32_BE (GST_BUFFER_DATA (buffer));
+ data = gst_buffer_map (buffer, NULL, NULL, GST_MAP_READ);
+ tag_size = GST_READ_UINT32_BE (data);
+ gst_buffer_unmap (buffer, data, -1);
GST_DEBUG_OBJECT (demux, "last tag size: %" G_GSIZE_FORMAT, tag_size);
gst_buffer_unref (buffer);
buffer = NULL;
goto exit;
/* a consistency check */
- size = GST_READ_UINT24_BE (GST_BUFFER_DATA (buffer) + 1);
+ data = gst_buffer_map (buffer, NULL, NULL, GST_MAP_READ);
+ size = GST_READ_UINT24_BE (data + 1);
if (size != tag_size - 11) {
+ gst_buffer_unmap (buffer, data, -1);
GST_DEBUG_OBJECT (demux,
"tag size %" G_GSIZE_FORMAT ", expected %" G_GSIZE_FORMAT
", corrupt or truncated file", size, tag_size - 11);
gst_flv_demux_parse_tag_timestamp (demux, FALSE, buffer, &size);
/* maybe get some more metadata */
- if (GST_BUFFER_DATA (buffer)[0] == 18) {
+ if (data[0] == 18) {
+ gst_buffer_unmap (buffer, data, -1);
gst_buffer_unref (buffer);
buffer = NULL;
GST_DEBUG_OBJECT (demux, "script tag, pulling it to parse");
if (GST_FLOW_OK == gst_flv_demux_pull_range (demux, demux->sinkpad, offset,
tag_size, &buffer))
gst_flv_demux_parse_tag_script (demux, buffer);
+ } else {
+ gst_buffer_unmap (buffer, data, -1);
}
exit:
if (demux->segment.rate < 0.0) {
/* check end of section */
if ((gint64) demux->offset >= demux->to_offset ||
- demux->segment.last_stop >= demux->segment.stop + 2 * GST_SECOND ||
+ demux->segment.position >= demux->segment.stop + 2 * GST_SECOND ||
(demux->audio_done && demux->video_done))
ret = gst_flv_demux_seek_to_prev_keyframe (demux);
} else {
/* check EOS condition */
if ((demux->segment.stop != -1) &&
- (demux->segment.last_stop >= demux->segment.stop)) {
+ (demux->segment.position >= demux->segment.stop)) {
ret = GST_FLOW_UNEXPECTED;
}
}
gst_pad_pause_task (pad);
if (ret == GST_FLOW_UNEXPECTED) {
+ /* handle end-of-stream/segment */
+ /* so align our position with the end of it, if there is one
+ * this ensures a subsequent will arrive at correct base/acc time */
+ if (demux->segment.rate > 0.0 &&
+ GST_CLOCK_TIME_IS_VALID (demux->segment.stop))
+ demux->segment.position = demux->segment.stop;
+ else if (demux->segment.rate < 0.0)
+ demux->segment.position = demux->segment.start;
+
/* perform EOS logic */
if (!demux->no_more_pads) {
gst_element_no_more_pads (GST_ELEMENT_CAST (demux));
g_return_val_if_fail (segment != NULL, 0);
- time = segment->last_stop;
+ time = segment->position;
if (demux->index) {
/* Let's check if we have an index entry for that seek time */
GST_DEBUG_OBJECT (demux, "found index entry for %" GST_TIME_FORMAT
" at %" GST_TIME_FORMAT ", seeking to %" G_GINT64_FORMAT,
- GST_TIME_ARGS (segment->last_stop), GST_TIME_ARGS (time), bytes);
+ GST_TIME_ARGS (segment->position), GST_TIME_ARGS (time), bytes);
/* Key frame seeking */
if (segment->flags & GST_SEEK_FLAG_KEY_UNIT) {
if (time < segment->start) {
segment->start = segment->time = time;
}
- segment->last_stop = time;
+ segment->position = time;
}
} else {
GST_DEBUG_OBJECT (demux, "no index entry found for %" GST_TIME_FORMAT,
if (format != GST_FORMAT_TIME)
goto wrong_format;
- flush = ! !(flags & GST_SEEK_FLAG_FLUSH);
+ flush = !!(flags & GST_SEEK_FLAG_FLUSH);
/* FIXME : the keyframe flag is never used ! */
/* Work on a copy until we are sure the seek succeeded. */
&demux->segment);
/* Apply the seek to our segment */
- gst_segment_set_seek (&seeksegment, rate, format, flags,
+ gst_segment_do_seek (&seeksegment, rate, format, flags,
start_type, start, stop_type, stop, &update);
GST_DEBUG_OBJECT (demux, "segment configured %" GST_SEGMENT_FORMAT,
&seeksegment);
- if (flush || seeksegment.last_stop != demux->segment.last_stop) {
+ if (flush || seeksegment.position != demux->segment.position) {
/* Do the actual seeking */
guint64 offset = gst_flv_demux_find_offset (demux, &seeksegment);
demux->seeking = seeking;
GST_OBJECT_UNLOCK (demux);
- flush = ! !(flags & GST_SEEK_FLAG_FLUSH);
+ flush = !!(flags & GST_SEEK_FLAG_FLUSH);
/* FIXME : the keyframe flag is never used */
if (flush) {
if (flush) {
/* Stop flushing upstream we need to pull */
- gst_pad_push_event (demux->sinkpad, gst_event_new_flush_stop ());
+ gst_pad_push_event (demux->sinkpad, gst_event_new_flush_stop (TRUE));
}
/* Work on a copy until we are sure the seek succeeded. */
&demux->segment);
/* Apply the seek to our segment */
- gst_segment_set_seek (&seeksegment, rate, format, flags,
+ gst_segment_do_seek (&seeksegment, rate, format, flags,
start_type, start, stop_type, stop, &update);
GST_DEBUG_OBJECT (demux, "segment configured %" GST_SEGMENT_FORMAT,
&seeksegment);
- if (flush || seeksegment.last_stop != demux->segment.last_stop) {
+ if (flush || seeksegment.position != demux->segment.position) {
/* Do the actual seeking */
/* index is reliable if it is complete or we do not go to far ahead */
if (seeking && !demux->indexed &&
- seeksegment.last_stop > demux->index_max_time + 10 * GST_SECOND) {
+ seeksegment.position > demux->index_max_time + 10 * GST_SECOND) {
GST_DEBUG_OBJECT (demux, "delaying seek to post-scan; "
" index only up to %" GST_TIME_FORMAT,
GST_TIME_ARGS (demux->index_max_time));
/* stop flushing for now */
if (flush)
- gst_flv_demux_push_src_event (demux, gst_event_new_flush_stop ());
+ gst_flv_demux_push_src_event (demux, gst_event_new_flush_stop (TRUE));
/* delegate scanning and index building to task thread to avoid
* occupying main (UI) loop */
if (demux->seek_event)
gst_event_unref (demux->seek_event);
demux->seek_event = gst_event_ref (event);
- demux->seek_time = seeksegment.last_stop;
+ demux->seek_time = seeksegment.position;
demux->state = FLV_STATE_SEEK;
/* do not know about succes yet, but we did care and handled it */
ret = TRUE;
ret = TRUE;
}
- if (G_UNLIKELY (demux->close_seg_event)) {
- gst_event_unref (demux->close_seg_event);
- demux->close_seg_event = NULL;
- }
-
if (flush) {
/* Stop flushing, the sinks are at time 0 now */
- gst_flv_demux_push_src_event (demux, gst_event_new_flush_stop ());
- } else {
- GST_DEBUG_OBJECT (demux, "closing running segment %" GST_SEGMENT_FORMAT,
- &demux->segment);
-
- /* Close the current segment for a linear playback */
- if (demux->segment.rate >= 0) {
- /* for forward playback, we played from start to last_stop */
- demux->close_seg_event = gst_event_new_new_segment (TRUE,
- demux->segment.rate, demux->segment.format,
- demux->segment.start, demux->segment.last_stop, demux->segment.time);
- } else {
- gint64 stop;
-
- if ((stop = demux->segment.stop) == -1)
- stop = demux->segment.duration;
-
- /* for reverse playback, we played from stop to last_stop. */
- demux->close_seg_event = gst_event_new_new_segment (TRUE,
- demux->segment.rate, demux->segment.format,
- demux->segment.last_stop, stop, demux->segment.last_stop);
- }
+ gst_flv_demux_push_src_event (demux, gst_event_new_flush_stop (TRUE));
}
if (ret) {
if (demux->segment.flags & GST_SEEK_FLAG_SEGMENT) {
gst_element_post_message (GST_ELEMENT (demux),
gst_message_new_segment_start (GST_OBJECT (demux),
- demux->segment.format, demux->segment.last_stop));
+ demux->segment.format, demux->segment.position));
}
/* Tell all the stream a new segment is needed */
GST_TIME_FORMAT " to %" GST_TIME_FORMAT,
GST_TIME_ARGS (demux->segment.start),
GST_TIME_ARGS (demux->segment.stop));
- demux->new_seg_event =
- gst_event_new_new_segment (FALSE, demux->segment.rate,
- demux->segment.format, demux->segment.start,
- demux->segment.stop, demux->segment.start);
+ demux->new_seg_event = gst_event_new_segment (&demux->segment);
}
}
static gboolean
gst_flv_demux_sink_activate (GstPad * sinkpad)
{
- if (gst_pad_check_pull_range (sinkpad)) {
- return gst_pad_activate_pull (sinkpad, TRUE);
- } else {
+ GstQuery *query;
+ gboolean pull_mode;
+
+ query = gst_query_new_scheduling ();
+
+ if (!gst_pad_peer_query (sinkpad, query)) {
+ gst_query_unref (query);
+ goto activate_push;
+ }
+
+ gst_query_parse_scheduling (query, &pull_mode, NULL, NULL, NULL, NULL, NULL);
+ gst_query_unref (query);
+
+ if (!pull_mode)
+ goto activate_push;
+
+ GST_DEBUG_OBJECT (sinkpad, "activating pull");
+ return gst_pad_activate_pull (sinkpad, TRUE);
+
+activate_push:
+ {
+ GST_DEBUG_OBJECT (sinkpad, "activating push");
return gst_pad_activate_push (sinkpad, TRUE);
}
}
GST_WARNING_OBJECT (demux, "failed pushing EOS on streams");
ret = TRUE;
break;
- case GST_EVENT_NEWSEGMENT:
+ case GST_EVENT_SEGMENT:
{
- GstFormat format;
- gdouble rate;
- gint64 start, stop, time;
- gboolean update;
+ GstSegment in_segment;
GST_DEBUG_OBJECT (demux, "received new segment");
- gst_event_parse_new_segment (event, &update, &rate, &format, &start,
- &stop, &time);
+ gst_event_copy_segment (event, &in_segment);
- if (format == GST_FORMAT_TIME) {
+ if (in_segment.format == GST_FORMAT_TIME) {
/* time segment, this is perfect, copy over the values. */
- gst_segment_set_newsegment (&demux->segment, update, rate, format,
- start, stop, time);
+ memcpy (&demux->segment, &in_segment, sizeof (in_segment));
GST_DEBUG_OBJECT (demux, "NEWSEGMENT: %" GST_SEGMENT_FORMAT,
&demux->segment);
}
GST_DEBUG_OBJECT (pad, "position query, replying %" GST_TIME_FORMAT,
- GST_TIME_ARGS (demux->segment.last_stop));
+ GST_TIME_ARGS (demux->segment.position));
- gst_query_set_position (query, GST_FORMAT_TIME, demux->segment.last_stop);
+ gst_query_set_position (query, GST_FORMAT_TIME, demux->segment.position);
break;
}
demux->new_seg_event = NULL;
}
- if (demux->close_seg_event) {
- gst_event_unref (demux->close_seg_event);
- demux->close_seg_event = NULL;
- }
-
if (demux->audio_codec_data) {
gst_buffer_unref (demux->audio_codec_data);
demux->audio_codec_data = NULL;
}
static void
-gst_flv_demux_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&flv_sink_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&audio_src_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&video_src_template));
- gst_element_class_set_details_simple (element_class, "FLV Demuxer",
- "Codec/Demuxer",
- "Demux FLV feeds into digital streams",
- "Julien Moutte <julien@moutte.net>");
-}
-
-static void
gst_flv_demux_class_init (GstFlvDemuxClass * klass)
{
GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
GST_DEBUG_FUNCPTR (gst_flv_demux_change_state);
gstelement_class->set_index = GST_DEBUG_FUNCPTR (gst_flv_demux_set_index);
gstelement_class->get_index = GST_DEBUG_FUNCPTR (gst_flv_demux_get_index);
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&flv_sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&audio_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&video_src_template));
+ gst_element_class_set_details_simple (gstelement_class, "FLV Demuxer",
+ "Codec/Demuxer",
+ "Demux FLV feeds into digital streams",
+ "Julien Moutte <julien@moutte.net>");
}
static void
-gst_flv_demux_init (GstFlvDemux * demux, GstFlvDemuxClass * g_class)
+gst_flv_demux_init (GstFlvDemux * demux)
{
demux->sinkpad =
gst_pad_new_from_static_template (&flv_sink_template, "sink");
GstSegment segment;
- GstEvent *close_seg_event;
GstEvent *new_seg_event;
GstTagList *taglist;
"audio/x-speex, channels = (int) { 1, 2 }, rate = (int) { 5512, 11025, 22050, 44100 };")
);
-#define _do_init(type) \
- G_STMT_START{ \
- static const GInterfaceInfo tag_setter_info = { \
- NULL, \
- NULL, \
- NULL \
- }; \
- g_type_add_interface_static (type, GST_TYPE_TAG_SETTER, \
- &tag_setter_info); \
- }G_STMT_END
-
-GST_BOILERPLATE_FULL (GstFlvMux, gst_flv_mux, GstElement, GST_TYPE_ELEMENT,
- _do_init);
+#define gst_flv_mux_parent_class parent_class
+G_DEFINE_TYPE_WITH_CODE (GstFlvMux, gst_flv_mux, GST_TYPE_ELEMENT,
+ G_IMPLEMENT_INTERFACE (GST_TYPE_TAG_SETTER, NULL));
static void gst_flv_mux_finalize (GObject * object);
static GstFlowReturn
static gboolean gst_flv_mux_handle_src_event (GstPad * pad, GstEvent * event);
static GstPad *gst_flv_mux_request_new_pad (GstElement * element,
- GstPadTemplate * templ, const gchar * name);
+ GstPadTemplate * templ, const gchar * req_name, const GstCaps * caps);
static void gst_flv_mux_release_pad (GstElement * element, GstPad * pad);
+static gboolean gst_flv_mux_video_pad_setcaps (GstPad * pad, GstCaps * caps);
+static gboolean gst_flv_mux_audio_pad_setcaps (GstPad * pad, GstCaps * caps);
+
static void gst_flv_mux_get_property (GObject * object,
guint prop_id, GValue * value, GParamSpec * pspec);
static void gst_flv_mux_set_property (GObject * object,
g_slice_free (GstFlvMuxIndexEntry, entry);
}
-static void
-gst_flv_mux_base_init (gpointer g_class)
+static GstBuffer *
+_gst_buffer_new_wrapped (gpointer mem, gsize size, GFreeFunc free_func)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
+ GstBuffer *buf;
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&videosink_templ));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&audiosink_templ));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&src_templ));
- gst_element_class_set_details_simple (element_class, "FLV muxer",
- "Codec/Muxer",
- "Muxes video/audio streams into a FLV stream",
- "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
+ buf = gst_buffer_new ();
+ gst_buffer_take_memory (buf, -1,
+ gst_memory_new_wrapped (free_func ? 0 : GST_MEMORY_FLAG_READONLY,
+ mem, free_func, size, 0, size));
- GST_DEBUG_CATEGORY_INIT (flvmux_debug, "flvmux", 0, "FLV muxer");
+ return buf;
+}
+
+static void
+_gst_buffer_new_and_alloc (gsize size, GstBuffer ** buffer, guint8 ** data)
+{
+ g_return_if_fail (data != NULL);
+ g_return_if_fail (buffer != NULL);
+
+ *data = g_malloc (size);
+ *buffer = _gst_buffer_new_wrapped (*data, size, g_free);
}
static void
gstelement_class->request_new_pad =
GST_DEBUG_FUNCPTR (gst_flv_mux_request_new_pad);
gstelement_class->release_pad = GST_DEBUG_FUNCPTR (gst_flv_mux_release_pad);
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&videosink_templ));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&audiosink_templ));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&src_templ));
+ gst_element_class_set_details_simple (gstelement_class, "FLV muxer",
+ "Codec/Muxer",
+ "Muxes video/audio streams into a FLV stream",
+ "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
+
+ GST_DEBUG_CATEGORY_INIT (flvmux_debug, "flvmux", 0, "FLV muxer");
}
static void
-gst_flv_mux_init (GstFlvMux * mux, GstFlvMuxClass * g_class)
+gst_flv_mux_init (GstFlvMux * mux)
{
mux->srcpad = gst_pad_new_from_static_template (&src_templ, "src");
gst_pad_set_event_function (mux->srcpad, gst_flv_mux_handle_src_event);
gboolean ret = TRUE;
switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_CAPS:
+ {
+ GstCaps *caps;
+ GstFlvPad *flvpad;
+
+ gst_event_parse_caps (event, &caps);
+
+ /* find stream data */
+ flvpad = (GstFlvPad *) gst_pad_get_element_private (pad);
+ g_assert (flvpad);
+
+ if (flvpad->video) {
+ ret = gst_flv_mux_video_pad_setcaps (pad, caps);
+ } else {
+ ret = gst_flv_mux_audio_pad_setcaps (pad, caps);
+ }
+ /* and eat */
+ ret = FALSE;
+ gst_event_unref (event);
+ break;
+ }
case GST_EVENT_TAG:{
GstTagList *list;
GstTagSetter *setter = GST_TAG_SETTER (mux);
static GstPad *
gst_flv_mux_request_new_pad (GstElement * element,
- GstPadTemplate * templ, const gchar * pad_name)
+ GstPadTemplate * templ, const gchar * req_name, const GstCaps * caps)
{
GstElementClass *klass = GST_ELEMENT_GET_CLASS (element);
GstFlvMux *mux = GST_FLV_MUX (element);
GstFlvPad *cpad;
GstPad *pad = NULL;
const gchar *name = NULL;
- GstPadSetCapsFunction setcapsfunc = NULL;
gboolean video;
if (mux->state != GST_FLV_MUX_STATE_HEADER) {
mux->have_audio = TRUE;
name = "audio";
video = FALSE;
- setcapsfunc = GST_DEBUG_FUNCPTR (gst_flv_mux_audio_pad_setcaps);
} else if (templ == gst_element_class_get_pad_template (klass, "video")) {
if (mux->have_video) {
GST_WARNING_OBJECT (mux, "Already have a video pad");
mux->have_video = TRUE;
name = "video";
video = TRUE;
- setcapsfunc = GST_DEBUG_FUNCPTR (gst_flv_mux_video_pad_setcaps);
} else {
GST_WARNING_OBJECT (mux, "Invalid template");
return NULL;
gst_pad_set_event_function (pad,
GST_DEBUG_FUNCPTR (gst_flv_mux_handle_sink_event));
- gst_pad_set_setcaps_function (pad, setcapsfunc);
gst_pad_set_active (pad, TRUE);
gst_element_add_pad (element, pad);
static GstFlowReturn
gst_flv_mux_push (GstFlvMux * mux, GstBuffer * buffer)
{
- gst_buffer_set_caps (buffer, GST_PAD_CAPS (mux->srcpad));
/* pushing the buffer that rewrites the header will make it no longer be the
* total output size in bytes, but it doesn't matter at that point */
- mux->byte_count += GST_BUFFER_SIZE (buffer);
+ mux->byte_count += gst_buffer_get_size (buffer);
return gst_pad_push (mux->srcpad, buffer);
}
GstBuffer *header;
guint8 *data;
- header = gst_buffer_new_and_alloc (9 + 4);
- data = GST_BUFFER_DATA (header);
+ _gst_buffer_new_and_alloc (9 + 4, &header, &data);
data[0] = 'F';
data[1] = 'L';
GST_DEBUG_OBJECT (mux, "preallocating %d bytes for the index",
preallocate_size);
- tmp = gst_buffer_new_and_alloc (preallocate_size);
- data = GST_BUFFER_DATA (tmp);
+ _gst_buffer_new_and_alloc (preallocate_size, &tmp, &data);
/* prefill the space with a gstfiller: <spaces> script tag variable */
GST_WRITE_UINT16_BE (data, 9); /* 9 characters */
static GstBuffer *
gst_flv_mux_create_number_script_value (const gchar * name, gdouble value)
{
- GstBuffer *tmp = gst_buffer_new_and_alloc (2 + strlen (name) + 1 + 8);
- guint8 *data = GST_BUFFER_DATA (tmp);
+ GstBuffer *tmp;
+ guint8 *data;
+
+ _gst_buffer_new_and_alloc (2 + strlen (name) + 1 + 8, &tmp, &data);
GST_WRITE_UINT16_BE (data, strlen (name)); /* name length */
memcpy (&data[2], name, strlen (name));
GST_DEBUG_OBJECT (mux, "tags = %" GST_PTR_FORMAT, tags);
- script_tag = gst_buffer_new_and_alloc (11);
- data = GST_BUFFER_DATA (script_tag);
+ /* FIXME perhaps some bytewriter'ing here ... */
+
+ _gst_buffer_new_and_alloc (11, &script_tag, &data);
data[0] = 18;
/* Stream ID */
data[8] = data[9] = data[10] = 0;
- tmp = gst_buffer_new_and_alloc (13);
- data = GST_BUFFER_DATA (tmp);
+ _gst_buffer_new_and_alloc (13, &tmp, &data);
data[0] = 2; /* string */
data[1] = 0;
data[2] = 10; /* length 10 */
script_tag = gst_buffer_join (script_tag, tmp);
n_tags = (tags) ? gst_structure_n_fields ((GstStructure *) tags) : 0;
- tmp = gst_buffer_new_and_alloc (5);
- data = GST_BUFFER_DATA (tmp);
+ _gst_buffer_new_and_alloc (5, &tmp, &data);
data[0] = 8; /* ECMA array */
GST_WRITE_UINT32_BE (data + 1, n_tags);
script_tag = gst_buffer_join (script_tag, tmp);
if (!gst_tag_list_get_string (tags, tag_name, &s))
continue;
- tmp = gst_buffer_new_and_alloc (2 + strlen (t) + 1 + 2 + strlen (s));
- data = GST_BUFFER_DATA (tmp);
+ _gst_buffer_new_and_alloc (2 + strlen (t) + 1 + 2 + strlen (s),
+ &tmp, &data);
data[0] = 0; /* tag name length */
data[1] = strlen (t);
memcpy (&data[2], t, strlen (t));
d /= (gdouble) GST_SECOND;
GST_DEBUG_OBJECT (mux, "determined the duration to be %f", d);
- data = GST_BUFFER_DATA (script_tag);
+ data = gst_buffer_map (script_tag, NULL, NULL, GST_MAP_WRITE);
GST_WRITE_DOUBLE_BE (data + 29 + 2 + 8 + 1, d);
+ gst_buffer_unmap (script_tag, data, -1);
}
if (mux->have_video) {
}
}
- if (video_pad && GST_PAD_CAPS (video_pad)) {
- GstStructure *s = gst_caps_get_structure (GST_PAD_CAPS (video_pad), 0);
+ if (video_pad && gst_pad_has_current_caps (video_pad)) {
+ GstCaps *caps;
+ GstStructure *s;
gint size;
gint num, den;
script_tag = gst_buffer_join (script_tag, tmp);
tags_written++;
+ caps = gst_pad_get_current_caps (video_pad);
+ s = gst_caps_get_structure (caps, 0);
+ gst_caps_unref (caps);
+
if (gst_structure_get_int (s, "width", &size)) {
GST_DEBUG_OBJECT (mux, "putting width %d in the metadata", size);
{
const gchar *s = "GStreamer FLV muxer";
- tmp = gst_buffer_new_and_alloc (2 + 15 + 1 + 2 + strlen (s));
- data = GST_BUFFER_DATA (tmp);
+ _gst_buffer_new_and_alloc (2 + 15 + 1 + 2 + strlen (s), &tmp, &data);
data[0] = 0; /* 15 bytes name */
data[1] = 15;
memcpy (&data[2], "metadatacreator", 15);
months[tm->tm_mon], tm->tm_mday, tm->tm_hour, tm->tm_min, tm->tm_sec,
tm->tm_year + 1900);
- tmp = gst_buffer_new_and_alloc (2 + 12 + 1 + 2 + strlen (s));
- data = GST_BUFFER_DATA (tmp);
+ _gst_buffer_new_and_alloc (2 + 12 + 1 + 2 + strlen (s), &tmp, &data);
data[0] = 0; /* 12 bytes name */
data[1] = 12;
memcpy (&data[2], "creationdate", 12);
tags_written++;
}
- tmp = gst_buffer_new_and_alloc (2 + 0 + 1);
- data = GST_BUFFER_DATA (tmp);
+ _gst_buffer_new_and_alloc (2 + 0 + 1, &tmp, &data);
data[0] = 0; /* 0 byte size */
data[1] = 0;
data[2] = 9; /* end marker */
script_tag = gst_buffer_join (script_tag, tmp);
tags_written++;
- tmp = gst_buffer_new_and_alloc (4);
- data = GST_BUFFER_DATA (tmp);
- GST_WRITE_UINT32_BE (data, GST_BUFFER_SIZE (script_tag));
+ _gst_buffer_new_and_alloc (4, &tmp, &data);
+ GST_WRITE_UINT32_BE (data, gst_buffer_get_size (script_tag));
script_tag = gst_buffer_join (script_tag, tmp);
- data = GST_BUFFER_DATA (script_tag);
- data[1] = ((GST_BUFFER_SIZE (script_tag) - 11 - 4) >> 16) & 0xff;
- data[2] = ((GST_BUFFER_SIZE (script_tag) - 11 - 4) >> 8) & 0xff;
- data[3] = ((GST_BUFFER_SIZE (script_tag) - 11 - 4) >> 0) & 0xff;
+ data = gst_buffer_map (script_tag, NULL, NULL, GST_MAP_WRITE);
+ data[1] = ((gst_buffer_get_size (script_tag) - 11 - 4) >> 16) & 0xff;
+ data[2] = ((gst_buffer_get_size (script_tag) - 11 - 4) >> 8) & 0xff;
+ data[3] = ((gst_buffer_get_size (script_tag) - 11 - 4) >> 0) & 0xff;
GST_WRITE_UINT32_BE (data + 11 + 13 + 1, tags_written);
+ gst_buffer_unmap (script_tag, data, -1);
return script_tag;
}
guint32 timestamp =
(GST_BUFFER_TIMESTAMP_IS_VALID (buffer)) ? GST_BUFFER_TIMESTAMP (buffer) /
GST_MSECOND : cpad->last_timestamp / GST_MSECOND;
+ guint8 *bdata;
+ gsize bsize;
+
+ bdata = gst_buffer_map (buffer, &bsize, NULL, GST_MAP_READ);
size = 11;
if (cpad->video) {
size += 1;
if (cpad->video_codec == 7)
- size += 4 + GST_BUFFER_SIZE (buffer);
+ size += 4 + bsize;
else
- size += GST_BUFFER_SIZE (buffer);
+ size += bsize;
} else {
size += 1;
if (cpad->audio_codec == 10)
- size += 1 + GST_BUFFER_SIZE (buffer);
+ size += 1 + bsize;
else
- size += GST_BUFFER_SIZE (buffer);
+ size += bsize;
}
size += 4;
- tag = gst_buffer_new_and_alloc (size);
+ _gst_buffer_new_and_alloc (size, &tag, &data);
GST_BUFFER_TIMESTAMP (tag) = timestamp * GST_MSECOND;
- data = GST_BUFFER_DATA (tag);
memset (data, 0, size);
data[0] = (cpad->video) ? 9 : 8;
/* FIXME: what to do about composition time */
data[13] = data[14] = data[15] = 0;
- memcpy (data + 11 + 1 + 4, GST_BUFFER_DATA (buffer),
- GST_BUFFER_SIZE (buffer));
+ memcpy (data + 11 + 1 + 4, bdata, bsize);
} else {
- memcpy (data + 11 + 1, GST_BUFFER_DATA (buffer),
- GST_BUFFER_SIZE (buffer));
+ memcpy (data + 11 + 1, bdata, bsize);
}
} else {
data[11] |= (cpad->audio_codec << 4) & 0xf0;
if (cpad->audio_codec == 10) {
data[12] = is_codec_data ? 0 : 1;
- memcpy (data + 11 + 1 + 1, GST_BUFFER_DATA (buffer),
- GST_BUFFER_SIZE (buffer));
+ memcpy (data + 11 + 1 + 1, bdata, bsize);
} else {
- memcpy (data + 11 + 1, GST_BUFFER_DATA (buffer),
- GST_BUFFER_SIZE (buffer));
+ memcpy (data + 11 + 1, bdata, bsize);
}
}
+ gst_buffer_unmap (buffer, bdata, -1);
+
GST_WRITE_UINT32_BE (data + size - 4, size - 4);
- gst_buffer_copy_metadata (tag, buffer, GST_BUFFER_COPY_TIMESTAMPS);
+ GST_BUFFER_TIMESTAMP (tag) = GST_BUFFER_TIMESTAMP (buffer);
+ GST_BUFFER_DURATION (tag) = GST_BUFFER_DURATION (buffer);
+ GST_BUFFER_OFFSET (tag) = GST_BUFFER_OFFSET (buffer);
+ GST_BUFFER_OFFSET_END (tag) = GST_BUFFER_OFFSET_END (buffer);
+
/* mark the buffer if it's an audio buffer and there's also video being muxed
* or it's a video interframe */
if ((mux->have_video && !cpad->video) ||
gst_structure_set_value (structure, "streamheader", &streamheader);
g_value_unset (&streamheader);
- if (GST_PAD_CAPS (mux->srcpad) == NULL)
+ if (!gst_pad_has_current_caps (mux->srcpad))
gst_pad_set_caps (mux->srcpad, caps);
gst_caps_unref (caps);
GstFlowReturn ret;
/* arrange downstream running time */
- buffer = gst_buffer_make_metadata_writable (buffer);
+ buffer = gst_buffer_make_writable (buffer);
GST_BUFFER_TIMESTAMP (buffer) =
gst_segment_to_running_time (&cpad->collect.segment,
GST_FORMAT_TIME, GST_BUFFER_TIMESTAMP (buffer));
GList *l;
guint32 index_len, allocate_size;
guint32 i, index_skip;
+ GstSegment segment;
if (mux->streamable)
return GST_FLOW_OK;
/* seek back to the preallocated index space */
- event = gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_BYTES,
- 13 + 29, GST_CLOCK_TIME_NONE, 13 + 29);
+ gst_segment_init (&segment, GST_FORMAT_BYTES);
+ segment.start = segment.time = 13 + 29;
+ event = gst_event_new_segment (&segment);
if (!gst_pad_push_event (mux->srcpad, event)) {
GST_WARNING_OBJECT (mux, "Seek to rewrite header failed");
return GST_FLOW_OK;
/* see size calculation in gst_flv_mux_preallocate_index */
allocate_size = 11 + 8 + 22 + 10 + index_len * 18;
GST_DEBUG_OBJECT (mux, "Allocating %d bytes for index", allocate_size);
- index = gst_buffer_new_and_alloc (allocate_size);
- data = GST_BUFFER_DATA (index);
+ _gst_buffer_new_and_alloc (allocate_size, &index, &data);
GST_WRITE_UINT16_BE (data, 9); /* the 'keyframes' key */
memcpy (data + 2, "keyframes", 9);
guint8 *data;
guint32 remaining_filler_size;
- tmp = gst_buffer_new_and_alloc (14);
- data = GST_BUFFER_DATA (tmp);
+ _gst_buffer_new_and_alloc (14, &tmp, &data);
GST_WRITE_UINT16_BE (data, 9);
memcpy (data + 2, "gstfiller", 9);
GST_WRITE_UINT8 (data + 11, 2); /* string */
rewrite = gst_buffer_join (rewrite, index);
- gst_buffer_set_caps (rewrite, GST_PAD_CAPS (mux->srcpad));
return gst_flv_mux_push (mux, rewrite);
}
gboolean eos = TRUE;
if (mux->state == GST_FLV_MUX_STATE_HEADER) {
+ GstSegment segment;
+
if (mux->collect->data == NULL) {
GST_ELEMENT_ERROR (mux, STREAM, MUX, (NULL),
("No input streams configured"));
return GST_FLOW_ERROR;
}
- if (gst_pad_push_event (mux->srcpad,
- gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_BYTES, 0, -1, 0)))
+ gst_segment_init (&segment, GST_FORMAT_BYTES);
+ if (gst_pad_push_event (mux->srcpad, gst_event_new_segment (&segment)))
ret = gst_flv_mux_write_header (mux);
else
ret = GST_FLOW_ERROR;
/* gstgoom.c: implementation of goom drawing element
* Copyright (C) <2001> Richard Boulton <richard@tartarus.org>
* (C) <2006> Wim Taymans <wim at fluendo dot com>
+ * (C) <2011> Wim Taymans <wim.taymans at gmail dot com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
);
-static void gst_goom_class_init (GstGoomClass * klass);
-static void gst_goom_base_init (GstGoomClass * klass);
-static void gst_goom_init (GstGoom * goom);
static void gst_goom_finalize (GObject * object);
static GstStateChangeReturn gst_goom_change_state (GstElement * element,
static gboolean gst_goom_src_query (GstPad * pad, GstQuery * query);
-static gboolean gst_goom_sink_setcaps (GstPad * pad, GstCaps * caps);
-static gboolean gst_goom_src_setcaps (GstPad * pad, GstCaps * caps);
-
-static GstElementClass *parent_class = NULL;
-
-GType
-gst_goom_get_type (void)
-{
- static GType type = 0;
-
- if (!type) {
- static const GTypeInfo info = {
- sizeof (GstGoomClass),
- (GBaseInitFunc) gst_goom_base_init,
- NULL,
- (GClassInitFunc) gst_goom_class_init,
- NULL,
- NULL,
- sizeof (GstGoom),
- 0,
- (GInstanceInitFunc) gst_goom_init,
- };
-
- type = g_type_register_static (GST_TYPE_ELEMENT, "GstGoom", &info, 0);
- }
- return type;
-}
-
-static void
-gst_goom_base_init (GstGoomClass * klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_set_details_simple (element_class, "GOOM: what a GOOM!",
- "Visualization",
- "Takes frames of data and outputs video frames using the GOOM filter",
- "Wim Taymans <wim@fluendo.com>");
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&sink_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&src_template));
-}
+#define gst_goom_parent_class parent_class
+G_DEFINE_TYPE (GstGoom, gst_goom, GST_TYPE_ELEMENT);
static void
gst_goom_class_init (GstGoomClass * klass)
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
- parent_class = g_type_class_peek_parent (klass);
-
gobject_class->finalize = gst_goom_finalize;
+ gst_element_class_set_details_simple (gstelement_class, "GOOM: what a GOOM!",
+ "Visualization",
+ "Takes frames of data and outputs video frames using the GOOM filter",
+ "Wim Taymans <wim@fluendo.com>");
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&src_template));
+
gstelement_class->change_state = GST_DEBUG_FUNCPTR (gst_goom_change_state);
}
GST_DEBUG_FUNCPTR (gst_goom_chain));
gst_pad_set_event_function (goom->sinkpad,
GST_DEBUG_FUNCPTR (gst_goom_sink_event));
- gst_pad_set_setcaps_function (goom->sinkpad,
- GST_DEBUG_FUNCPTR (gst_goom_sink_setcaps));
gst_element_add_pad (GST_ELEMENT (goom), goom->sinkpad);
goom->srcpad = gst_pad_new_from_static_template (&src_template, "src");
- gst_pad_set_setcaps_function (goom->srcpad,
- GST_DEBUG_FUNCPTR (gst_goom_src_setcaps));
gst_pad_set_event_function (goom->srcpad,
GST_DEBUG_FUNCPTR (gst_goom_src_event));
gst_pad_set_query_function (goom->srcpad,
goom->plugin = NULL;
g_object_unref (goom->adapter);
+ if (goom->pool)
+ gst_object_unref (goom->pool);
G_OBJECT_CLASS (parent_class)->finalize (object);
}
}
static gboolean
-gst_goom_sink_setcaps (GstPad * pad, GstCaps * caps)
+gst_goom_sink_setcaps (GstGoom * goom, GstCaps * caps)
{
- GstGoom *goom;
GstStructure *structure;
- gboolean res;
-
- goom = GST_GOOM (GST_PAD_PARENT (pad));
structure = gst_caps_get_structure (caps, 0);
- res = gst_structure_get_int (structure, "channels", &goom->channels);
- res &= gst_structure_get_int (structure, "rate", &goom->rate);
+ gst_structure_get_int (structure, "channels", &goom->channels);
+ gst_structure_get_int (structure, "rate", &goom->rate);
goom->bps = goom->channels * sizeof (gint16);
- return res;
+ return TRUE;
}
static gboolean
-gst_goom_src_setcaps (GstPad * pad, GstCaps * caps)
+gst_goom_src_setcaps (GstGoom * goom, GstCaps * caps)
{
- GstGoom *goom;
GstStructure *structure;
-
- goom = GST_GOOM (GST_PAD_PARENT (pad));
+ gboolean res;
structure = gst_caps_get_structure (caps, 0);
-
if (!gst_structure_get_int (structure, "width", &goom->width) ||
!gst_structure_get_int (structure, "height", &goom->height) ||
!gst_structure_get_fraction (structure, "framerate", &goom->fps_n,
&goom->fps_d))
- return FALSE;
+ goto error;
goom_set_resolution (goom->plugin, goom->width, goom->height);
GST_DEBUG_OBJECT (goom, "dimension %dx%d, framerate %d/%d, spf %d",
goom->width, goom->height, goom->fps_n, goom->fps_d, goom->spf);
- return TRUE;
+ res = gst_pad_push_event (goom->srcpad, gst_event_new_caps (caps));
+
+ return res;
+
+ /* ERRORS */
+error:
+ {
+ GST_DEBUG_OBJECT (goom, "error parsing caps");
+ return FALSE;
+ }
}
static gboolean
{
GstCaps *othercaps, *target;
GstStructure *structure;
- const GstCaps *templ;
+ GstCaps *templ;
+ GstQuery *query;
+ GstBufferPool *pool = NULL;
+ guint size, min, max, prefix, alignment;
templ = gst_pad_get_pad_template_caps (goom->srcpad);
GST_DEBUG_OBJECT (goom, "performing negotiation");
/* see what the peer can do */
- othercaps = gst_pad_peer_get_caps (goom->srcpad);
+ othercaps = gst_pad_peer_get_caps (goom->srcpad, NULL);
if (othercaps) {
target = gst_caps_intersect (othercaps, templ);
gst_caps_unref (othercaps);
+ gst_caps_unref (templ);
if (gst_caps_is_empty (target))
goto no_format;
gst_caps_truncate (target);
} else {
- target = gst_caps_ref ((GstCaps *) templ);
+ target = templ;
}
structure = gst_caps_get_structure (target, 0);
gst_structure_fixate_field_nearest_fraction (structure, "framerate",
DEFAULT_FPS_N, DEFAULT_FPS_D);
- gst_pad_set_caps (goom->srcpad, target);
+ gst_goom_src_setcaps (goom, target);
+
+ /* try to get a bufferpool now */
+ /* find a pool for the negotiated caps now */
+ query = gst_query_new_allocation (target, TRUE);
+
+ if (gst_pad_peer_query (goom->srcpad, query)) {
+ /* we got configuration from our peer, parse them */
+ gst_query_parse_allocation_params (query, &size, &min, &max, &prefix,
+ &alignment, &pool);
+ } else {
+ size = goom->outsize;
+ min = max = 0;
+ prefix = 0;
+ alignment = 0;
+ }
+
+ if (pool == NULL) {
+ GstStructure *config;
+
+ /* we did not get a pool, make one ourselves then */
+ pool = gst_buffer_pool_new ();
+
+ config = gst_buffer_pool_get_config (pool);
+ gst_buffer_pool_config_set (config, target, size, min, max, prefix,
+ alignment);
+ gst_buffer_pool_set_config (pool, config);
+ }
+
+ if (goom->pool)
+ gst_object_unref (goom->pool);
+ goom->pool = pool;
+
+ /* and activate */
+ gst_buffer_pool_set_active (pool, TRUE);
+
gst_caps_unref (target);
return TRUE;
GstClockTimeDiff diff;
GstClockTime timestamp;
- gst_event_parse_qos (event, &proportion, &diff, ×tamp);
+ gst_event_parse_qos (event, NULL, &proportion, &diff, ×tamp);
/* save stuff for the _chain() function */
GST_OBJECT_LOCK (goom);
goom = GST_GOOM (gst_pad_get_parent (pad));
switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_CAPS:
+ {
+ GstCaps *caps;
+
+ gst_event_parse_caps (event, &caps);
+ res = gst_goom_sink_setcaps (goom, caps);
+ break;
+ }
case GST_EVENT_FLUSH_START:
res = gst_pad_push_event (goom->srcpad, event);
break;
gst_goom_reset (goom);
res = gst_pad_push_event (goom->srcpad, event);
break;
- case GST_EVENT_NEWSEGMENT:
+ case GST_EVENT_SEGMENT:
{
- GstFormat format;
- gdouble rate, arate;
- gint64 start, stop, time;
- gboolean update;
-
/* the newsegment values are used to clip the input samples
* and to convert the incomming timestamps to running time so
* we can do QoS */
- gst_event_parse_new_segment_full (event, &update, &rate, &arate, &format,
- &start, &stop, &time);
-
- /* now configure the values */
- gst_segment_set_newsegment_full (&goom->segment, update,
- rate, arate, format, start, stop, time);
+ gst_event_copy_segment (event, &goom->segment);
res = gst_pad_push_event (goom->srcpad, event);
break;
return res;
}
+/* make sure we are negotiated */
static GstFlowReturn
-get_buffer (GstGoom * goom, GstBuffer ** outbuf)
+ensure_negotiated (GstGoom * goom)
{
- GstFlowReturn ret;
+ gboolean reconfigure;
+
+ GST_OBJECT_LOCK (goom->srcpad);
+ reconfigure = GST_PAD_NEEDS_RECONFIGURE (goom->srcpad);
+ GST_OBJECT_FLAG_UNSET (goom->srcpad, GST_PAD_NEED_RECONFIGURE);
+ GST_OBJECT_UNLOCK (goom->srcpad);
- if (GST_PAD_CAPS (goom->srcpad) == NULL) {
+ /* we don't know an output format yet, pick one */
+ if (reconfigure || !gst_pad_has_current_caps (goom->srcpad)) {
if (!gst_goom_src_negotiate (goom))
return GST_FLOW_NOT_NEGOTIATED;
}
-
- GST_DEBUG_OBJECT (goom, "allocating output buffer with caps %"
- GST_PTR_FORMAT, GST_PAD_CAPS (goom->srcpad));
-
- ret =
- gst_pad_alloc_buffer_and_set_caps (goom->srcpad,
- GST_BUFFER_OFFSET_NONE, goom->outsize,
- GST_PAD_CAPS (goom->srcpad), outbuf);
- if (ret != GST_FLOW_OK)
- return ret;
-
return GST_FLOW_OK;
}
+
static GstFlowReturn
gst_goom_chain (GstPad * pad, GstBuffer * buffer)
{
goto beach;
}
- /* If we don't have an output format yet, preallocate a buffer to try and
- * set one */
- if (GST_PAD_CAPS (goom->srcpad) == NULL) {
- ret = get_buffer (goom, &outbuf);
- if (ret != GST_FLOW_OK) {
- gst_buffer_unref (buffer);
- goto beach;
- }
+ /* Make sure have an output format */
+ ret = ensure_negotiated (goom);
+ if (ret != GST_FLOW_OK) {
+ gst_buffer_unref (buffer);
+ goto beach;
}
/* don't try to combine samples from discont buffer */
GST_DEBUG_OBJECT (goom,
"Input buffer has %d samples, time=%" G_GUINT64_FORMAT,
- GST_BUFFER_SIZE (buffer) / goom->bps, GST_BUFFER_TIMESTAMP (buffer));
+ gst_buffer_get_size (buffer) / goom->bps, GST_BUFFER_TIMESTAMP (buffer));
/* Collect samples until we have enough for an output frame */
gst_adapter_push (goom->adapter, buffer);
/* get next GOOM_SAMPLES, we have at least this amount of samples */
data =
- (const guint16 *) gst_adapter_peek (goom->adapter,
+ (const guint16 *) gst_adapter_map (goom->adapter,
GOOM_SAMPLES * goom->bps);
if (goom->channels == 2) {
/* alloc a buffer if we don't have one yet, this happens
* when we pushed a buffer in this while loop before */
if (outbuf == NULL) {
- ret = get_buffer (goom, &outbuf);
+ GST_DEBUG_OBJECT (goom, "allocating output buffer");
+ ret = gst_buffer_pool_acquire_buffer (goom->pool, &outbuf, NULL);
if (ret != GST_FLOW_OK) {
+ gst_adapter_unmap (goom->adapter, 0);
goto beach;
}
}
GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
GST_BUFFER_DURATION (outbuf) = goom->duration;
- GST_BUFFER_SIZE (outbuf) = goom->outsize;
out_frame = (guchar *) goom_update (goom->plugin, goom->datain, 0, 0);
- memcpy (GST_BUFFER_DATA (outbuf), out_frame, goom->outsize);
+ gst_buffer_fill (outbuf, 0, out_frame, goom->outsize);
+
+ gst_adapter_unmap (goom->adapter, 0);
GST_DEBUG ("Pushing frame with time=%" GST_TIME_FORMAT ", duration=%"
GST_TIME_FORMAT, GST_TIME_ARGS (timestamp),
switch (transition) {
case GST_STATE_CHANGE_PAUSED_TO_READY:
+ if (goom->pool) {
+ gst_buffer_pool_set_active (goom->pool, FALSE);
+ gst_object_unref (goom->pool);
+ goom->pool = NULL;
+ }
break;
case GST_STATE_CHANGE_READY_TO_NULL:
break;
gint height;
GstClockTime duration;
guint outsize;
+ GstBufferPool *pool;
/* samples per frame */
guint spf;
&stop_type, &stop);
gst_event_unref (event);
- flush = ! !(flags & GST_SEEK_FLAG_FLUSH);
+ flush = !!(flags & GST_SEEK_FLAG_FLUSH);
if (format != GST_FORMAT_TIME && format != GST_FORMAT_DEFAULT) {
GST_ERROR_OBJECT (pad, "Seek in invalid format: %s",
$(GST_PLUGINS_BASE_LIBS) \
-lgstriff-@GST_MAJORMINOR@ \
-lgstaudio-@GST_MAJORMINOR@ \
+ -lgstvideo-@GST_MAJORMINOR@ \
-lgstrtp-@GST_MAJORMINOR@ \
-lgsttag-@GST_MAJORMINOR@ \
-lgstpbutils-@GST_MAJORMINOR@ \
atom_data_new_from_gst_buffer (guint32 fourcc, const GstBuffer * buf)
{
AtomData *data = atom_data_new (fourcc);
+ gsize size = gst_buffer_get_size ((GstBuffer *) buf);
- atom_data_alloc_mem (data, GST_BUFFER_SIZE (buf));
- g_memmove (data->data, GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (buf));
+ atom_data_alloc_mem (data, size);
+ gst_buffer_extract ((GstBuffer *) buf, 0, data->data, size);
return data;
}
}
}
+static GstBuffer *
+_gst_buffer_new_wrapped (gpointer mem, gsize size, GFreeFunc free_func)
+{
+ GstBuffer *buf;
+
+ buf = gst_buffer_new ();
+ gst_buffer_take_memory (buf, -1,
+ gst_memory_new_wrapped (free_func ? 0 : GST_MEMORY_FLAG_READONLY,
+ mem, free_func, size, 0, size));
+
+ return buf;
+}
+
void
atom_moov_add_blob_tag (AtomMOOV * moov, guint8 * data, guint size)
{
if (len > size)
return;
- buf = gst_buffer_new ();
- GST_BUFFER_SIZE (buf) = len - 8;
- GST_BUFFER_DATA (buf) = data + 8;
-
+ buf = _gst_buffer_new_wrapped (data + 8, len - 8, NULL);
data_atom = atom_data_new_from_gst_buffer (fourcc, buf);
gst_buffer_unref (buf);
guint8 *bdata;
/* need full atom */
- buf = gst_buffer_new_and_alloc (size + 4);
- bdata = GST_BUFFER_DATA (buf);
+ bdata = g_malloc (size + 4);
/* full atom: version and flags */
GST_WRITE_UINT32_BE (bdata, 0);
memcpy (bdata + 4, data, size);
+ buf = _gst_buffer_new_wrapped (bdata, size + 4, g_free);
data_atom = atom_data_new_from_gst_buffer (fourcc, buf);
gst_buffer_unref (buf);
GstBuffer *buf;
guint8 *data;
- buf = gst_buffer_new_and_alloc (8);
- data = GST_BUFFER_DATA (buf);
-
+ data = g_malloc (8);
/* ihdr = image header box */
GST_WRITE_UINT32_BE (data, par_width);
GST_WRITE_UINT32_BE (data + 4, par_height);
+ buf = _gst_buffer_new_wrapped (data, 8, g_free);
atom_data = atom_data_new_from_gst_buffer (FOURCC_pasp, buf);
gst_buffer_unref (buf);
/* optional DecoderSpecificInfo */
if (codec_data) {
DecoderSpecificInfoDescriptor *desc;
+ gsize size;
esds->es.dec_conf_desc.dec_specific_info = desc =
desc_dec_specific_info_new ();
- desc_dec_specific_info_alloc_data (desc, GST_BUFFER_SIZE (codec_data));
-
- memcpy (desc->data, GST_BUFFER_DATA (codec_data),
- GST_BUFFER_SIZE (codec_data));
+ size = gst_buffer_get_size ((GstBuffer *) codec_data);
+ desc_dec_specific_info_alloc_data (desc, size);
+ gst_buffer_extract ((GstBuffer *) codec_data, 0, desc->data, size);
}
return build_atom_info_wrapper ((Atom *) esds, atom_esds_copy_data,
{
AtomData *atom_data;
GstBuffer *buf;
+ guint8 *data;
if (buffer_size_db == 0 && avg_bitrate == 0 && max_bitrate == 0)
return 0;
- buf = gst_buffer_new_and_alloc (12);
-
- GST_WRITE_UINT32_BE (GST_BUFFER_DATA (buf), buffer_size_db);
- GST_WRITE_UINT32_BE (GST_BUFFER_DATA (buf) + 4, max_bitrate);
- GST_WRITE_UINT32_BE (GST_BUFFER_DATA (buf) + 8, avg_bitrate);
+ data = g_malloc (12);
+ GST_WRITE_UINT32_BE (data, buffer_size_db);
+ GST_WRITE_UINT32_BE (data + 4, max_bitrate);
+ GST_WRITE_UINT32_BE (data + 8, avg_bitrate);
+ buf = _gst_buffer_new_wrapped (data, 12, g_free);
atom_data =
atom_data_new_from_gst_buffer (GST_MAKE_FOURCC ('b', 't', 'r', 't'), buf);
gst_buffer_unref (buf);
{
AtomInfo *esds, *mp4a;
GstBuffer *buf;
+ guint32 tmp = 0;
/* Add ESDS atom to WAVE */
esds = build_esds_extension (trak, ESDS_OBJECT_TYPE_MPEG4_P3,
/* Add MP4A atom to the WAVE:
* not really in spec, but makes offset based players happy */
- buf = gst_buffer_new_and_alloc (4);
- *((guint32 *) GST_BUFFER_DATA (buf)) = 0;
+ buf = _gst_buffer_new_wrapped (&tmp, 4, NULL);
mp4a = build_codec_data_extension (FOURCC_mp4a, buf);
gst_buffer_unref (buf);
{
AtomData *atom_data;
GstBuffer *buf;
+ guint8 f = fields;
if (fields == 1) {
return NULL;
}
- buf = gst_buffer_new_and_alloc (1);
- GST_BUFFER_DATA (buf)[0] = (guint8) fields;
-
+ buf = _gst_buffer_new_wrapped (&f, 1, NULL);
atom_data =
atom_data_new_from_gst_buffer (GST_MAKE_FOURCC ('f', 'i', 'e', 'l'), buf);
gst_buffer_unref (buf);
cdef_size = 8 + 2 + cdef_array_size * 6;
}
- buf = gst_buffer_new_and_alloc (idhr_size + colr_size + cmap_size +
- cdef_size);
- gst_byte_writer_init_with_buffer (&writer, buf, FALSE);
+ gst_byte_writer_init_with_size (&writer,
+ idhr_size + colr_size + cmap_size + cdef_size, TRUE);
/* ihdr = image header box */
gst_byte_writer_put_uint32_be (&writer, 22);
}
g_assert (gst_byte_writer_get_remaining (&writer) == 0);
+ buf = gst_byte_writer_reset_and_get_buffer (&writer);
atom_data = atom_data_new_from_gst_buffer (FOURCC_jp2h, buf);
gst_buffer_unref (buf);
GstBuffer *buf;
AtomInfo *res;
- buf = gst_buffer_new ();
- GST_BUFFER_DATA (buf) = ext;
- GST_BUFFER_SIZE (buf) = sizeof (ext);
-
/* vendor */
GST_WRITE_UINT32_LE (ext, 0);
/* decoder version */
/* frames per sample */
GST_WRITE_UINT8 (ext + 8, 1);
+ buf = _gst_buffer_new_wrapped (ext, sizeof (ext), NULL);
res = build_codec_data_extension (GST_MAKE_FOURCC ('d', 'a', 'm', 'r'), buf);
gst_buffer_unref (buf);
return res;
GstBuffer *buf;
AtomInfo *res;
- buf = gst_buffer_new ();
- GST_BUFFER_DATA (buf) = ext;
- GST_BUFFER_SIZE (buf) = sizeof (ext);
-
/* vendor */
GST_WRITE_UINT32_LE (ext, 0);
/* decoder version */
GST_WRITE_UINT8 (ext + 5, 10);
GST_WRITE_UINT8 (ext + 6, 0);
+ buf = _gst_buffer_new_wrapped (ext, sizeof (ext), NULL);
res = build_codec_data_extension (GST_MAKE_FOURCC ('d', '2', '6', '3'), buf);
gst_buffer_unref (buf);
return res;
/* convert to uint32 from fixed point */
gamma_fp = (guint32) 65536 *gamma;
- buf = gst_buffer_new_and_alloc (4);
- GST_WRITE_UINT32_BE (GST_BUFFER_DATA (buf), gamma_fp);
+ gamma_fp = GUINT32_TO_BE (gamma_fp);
+ buf = _gst_buffer_new_wrapped (&gamma_fp, 4, NULL);
res = build_codec_data_extension (FOURCC_gama, buf);
gst_buffer_unref (buf);
return res;
{
AtomInfo *res;
GstBuffer *buf;
+ gsize size;
+ guint8 *data;
/* the seqh plus its size and fourcc */
- buf = gst_buffer_new_and_alloc (GST_BUFFER_SIZE (seqh) + 8);
+ size = gst_buffer_get_size ((GstBuffer *) seqh);
+ data = g_malloc (size + 8);
- GST_WRITE_UINT32_LE (GST_BUFFER_DATA (buf), FOURCC_SEQH);
- GST_WRITE_UINT32_BE (GST_BUFFER_DATA (buf) + 4, GST_BUFFER_SIZE (seqh));
- memcpy (GST_BUFFER_DATA (buf) + 8, GST_BUFFER_DATA (seqh),
- GST_BUFFER_SIZE (seqh));
+ GST_WRITE_UINT32_LE (data, FOURCC_SEQH);
+ GST_WRITE_UINT32_BE (data + 4, size + 8);
+ gst_buffer_extract ((GstBuffer *) seqh, 0, data + 8, size);
+ buf = _gst_buffer_new_wrapped (data, size + 8, g_free);
res = build_codec_data_extension (FOURCC_SMI_, buf);
gst_buffer_unref (buf);
return res;
within the WAVE header (below), it's little endian. */
fourcc = MS_WAVE_FOURCC (0x11);
- buf = gst_buffer_new_and_alloc (ima_adpcm_atom_size);
- data = GST_BUFFER_DATA (buf);
+ data = g_malloc (ima_adpcm_atom_size);
/* This atom's content is a WAVE header, including 2 bytes of extra data.
Note that all of this is little-endian, unlike most stuff in qt. */
GST_WRITE_UINT16_LE (data + 16, 2); /* Two extra bytes */
GST_WRITE_UINT16_LE (data + 18, samplesperblock);
+ buf = _gst_buffer_new_wrapped (data, ima_adpcm_atom_size, g_free);
atom_data = atom_data_new_from_gst_buffer (fourcc, buf);
gst_buffer_unref (buf);
build_uuid_xmp_atom (GstBuffer * xmp_data)
{
AtomUUID *uuid;
+ gsize size;
static guint8 xmp_uuid[] = { 0xBE, 0x7A, 0xCF, 0xCB,
0x97, 0xA9, 0x42, 0xE8,
0x9C, 0x71, 0x99, 0x94,
uuid = atom_uuid_new ();
memcpy (uuid->uuid, xmp_uuid, 16);
- uuid->data = g_malloc (GST_BUFFER_SIZE (xmp_data));
- uuid->datalen = GST_BUFFER_SIZE (xmp_data);
- memcpy (uuid->data, GST_BUFFER_DATA (xmp_data), GST_BUFFER_SIZE (xmp_data));
+ size = gst_buffer_get_size (xmp_data);
+ uuid->data = g_malloc (size);
+ uuid->datalen = size;
+ gst_buffer_extract (xmp_data, 0, uuid->data, size);
return build_atom_info_wrapper ((Atom *) uuid, atom_uuid_copy_data,
atom_uuid_free);
guint64 size = 0;
if (prefix) {
- if (fwrite (GST_BUFFER_DATA (prefix), 1, GST_BUFFER_SIZE (prefix), f) !=
- GST_BUFFER_SIZE (prefix)) {
+ guint8 *bdata;
+ gsize bsize;
+
+ bdata = gst_buffer_map (prefix, &bsize, NULL, GST_MAP_READ);
+ if (fwrite (bdata, 1, bsize, f) != bsize) {
+ gst_buffer_unmap (prefix, bdata, bsize);
return FALSE;
}
+ gst_buffer_unmap (prefix, bdata, bsize);
}
if (!atom_ftyp_copy_data (ftyp, &data, &size, &offset)) {
return FALSE;
PROP_FAST_START_MODE
};
-GST_BOILERPLATE (GstQTMoovRecover, gst_qt_moov_recover, GstPipeline,
- GST_TYPE_PIPELINE);
+#define gst_qt_moov_recover_parent_class parent_class
+G_DEFINE_TYPE (GstQTMoovRecover, gst_qt_moov_recover, GST_TYPE_PIPELINE);
/* property functions */
static void gst_qt_moov_recover_set_property (GObject * object,
static void gst_qt_moov_recover_finalize (GObject * object);
static void
-gst_qt_moov_recover_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-#if 0
- GstQTMoovRecoverClass *klass = (GstQTMoovRecoverClass *) g_class;
-#endif
- gst_element_class_set_details_simple (element_class, "QT Moov Recover",
- "Util", "Recovers unfinished qtmux files",
- "Thiago Santos <thiago.sousa.santos@collabora.co.uk>");
-}
-
-static void
gst_qt_moov_recover_class_init (GstQTMoovRecoverClass * klass)
{
GObjectClass *gobject_class;
GST_DEBUG_CATEGORY_INIT (gst_qt_moov_recover_debug, "qtmoovrecover", 0,
"QT Moovie Recover");
+
+ gst_element_class_set_details_simple (gstelement_class, "QT Moov Recover",
+ "Util", "Recovers unfinished qtmux files",
+ "Thiago Santos <thiago.sousa.santos@collabora.co.uk>");
}
static void
-gst_qt_moov_recover_init (GstQTMoovRecover * qtmr,
- GstQTMoovRecoverClass * qtmr_klass)
+gst_qt_moov_recover_init (GstQTMoovRecover * qtmr)
{
}
#include <gst/gst.h>
#include <gst/base/gstcollectpads.h>
+#include <gst/video/video.h>
#include <gst/tag/xmpwriter.h>
#include <sys/types.h>
/* pad functions */
static GstPad *gst_qt_mux_request_new_pad (GstElement * element,
- GstPadTemplate * templ, const gchar * name);
+ GstPadTemplate * templ, const gchar * name, const GstCaps * caps);
static void gst_qt_mux_release_pad (GstElement * element, GstPad * pad);
/* event */
GstQTMux * qtmux)
{
GstBuffer *newbuf;
+ guint8 *data;
+ gsize size;
GST_LOG_OBJECT (qtmux, "Preparing jpc buffer");
if (buf == NULL)
return NULL;
- newbuf = gst_buffer_new_and_alloc (GST_BUFFER_SIZE (buf) + 8);
- gst_buffer_copy_metadata (newbuf, buf, GST_BUFFER_COPY_ALL);
+ size = gst_buffer_get_size (buf);
+ newbuf = gst_buffer_new_and_alloc (size + 8);
+ gst_buffer_copy_into (newbuf, buf, GST_BUFFER_COPY_ALL, 8, size);
- GST_WRITE_UINT32_BE (GST_BUFFER_DATA (newbuf), GST_BUFFER_SIZE (newbuf));
- GST_WRITE_UINT32_LE (GST_BUFFER_DATA (newbuf) + 4, FOURCC_jp2c);
+ data = gst_buffer_map (newbuf, &size, NULL, GST_MAP_WRITE);
+ GST_WRITE_UINT32_BE (data, size);
+ GST_WRITE_UINT32_LE (data + 4, FOURCC_jp2c);
- memcpy (GST_BUFFER_DATA (newbuf) + 8, GST_BUFFER_DATA (buf),
- GST_BUFFER_SIZE (buf));
+ gst_buffer_unmap (buf, data, size);
gst_buffer_unref (buf);
return newbuf;
GstCaps *caps;
GstStructure *structure;
gint flags = 0;
+ guint8 *data;
+ gsize size;
g_return_if_fail (gst_tag_get_type (tag) == GST_TYPE_BUFFER);
if (!buf)
goto done;
- caps = gst_buffer_get_caps (buf);
+ /* FIXME-0.11 caps metadata ? */
+ /* caps = gst_buffer_get_caps (buf); */
+ caps = NULL;
if (!caps) {
GST_WARNING_OBJECT (qtmux, "preview image without caps");
goto done;
goto done;
}
+ data = gst_buffer_map (buf, &size, NULL, GST_MAP_READ);
GST_DEBUG_OBJECT (qtmux, "Adding tag %" GST_FOURCC_FORMAT
- " -> image size %d", GST_FOURCC_ARGS (fourcc), GST_BUFFER_SIZE (buf));
- atom_moov_add_tag (qtmux->moov, fourcc, flags, GST_BUFFER_DATA (buf),
- GST_BUFFER_SIZE (buf));
+ " -> image size %d", GST_FOURCC_ARGS (fourcc), size);
+ atom_moov_add_tag (qtmux->moov, fourcc, flags, data, size);
+ gst_buffer_unmap (buf, data, size);
done:
g_value_unset (&value);
}
GstCaps *caps = NULL;
val = gst_tag_list_get_value_index (list, GST_QT_DEMUX_PRIVATE_TAG, i);
- buf = (GstBuffer *) gst_value_get_mini_object (val);
+ buf = (GstBuffer *) gst_value_get_buffer (val);
- if (buf && (caps = gst_buffer_get_caps (buf))) {
+ /* FIXME-0.11 */
+ if (buf && (caps = NULL /*gst_buffer_get_caps (buf) */ )) {
GstStructure *s;
const gchar *style = NULL;
+ guint8 *data;
+ gsize size;
+ data = gst_buffer_map (buf, &size, NULL, GST_MAP_READ);
GST_DEBUG_OBJECT (qtmux, "Found private tag %d/%d; size %d, caps %"
- GST_PTR_FORMAT, i, num_tags, GST_BUFFER_SIZE (buf), caps);
+ GST_PTR_FORMAT, i, num_tags, size, caps);
s = gst_caps_get_structure (caps, 0);
if (s && (style = gst_structure_get_string (s, "style"))) {
/* try to prevent some style tag ending up into another variant
(strcmp (style, "iso") == 0 &&
qtmux_klass->format == GST_QT_MUX_FORMAT_3GP)) {
GST_DEBUG_OBJECT (qtmux, "Adding private tag");
- atom_moov_add_blob_tag (qtmux->moov, GST_BUFFER_DATA (buf),
- GST_BUFFER_SIZE (buf));
+ atom_moov_add_blob_tag (qtmux->moov, data, size);
}
}
+ gst_buffer_unmap (buf, data, size);
gst_caps_unref (caps);
}
}
GstBuffer *buf;
buf = gst_buffer_new ();
- GST_BUFFER_DATA (buf) = GST_BUFFER_MALLOCDATA (buf) = data;
- GST_BUFFER_SIZE (buf) = size;
+ gst_buffer_take_memory (buf, -1,
+ gst_memory_new_wrapped (0, data, g_free, size, 0, size));
return buf;
}
gboolean mind_fast)
{
GstFlowReturn res;
- guint8 *data;
- guint size;
+ gsize size;
g_return_val_if_fail (buf != NULL, GST_FLOW_ERROR);
- data = GST_BUFFER_DATA (buf);
- size = GST_BUFFER_SIZE (buf);
-
+ size = gst_buffer_get_size (buf);
GST_LOG_OBJECT (qtmux, "sending buffer size %d", size);
if (mind_fast && qtmux->fast_start_file) {
gint ret;
+ guint8 *data;
GST_LOG_OBJECT (qtmux, "to temporary file");
+ data = gst_buffer_map (buf, &size, NULL, GST_MAP_READ);
ret = fwrite (data, sizeof (guint8), size, qtmux->fast_start_file);
+ gst_buffer_unmap (buf, data, size);
gst_buffer_unref (buf);
if (ret != size)
goto write_error;
res = GST_FLOW_OK;
} else {
GST_LOG_OBJECT (qtmux, "downstream");
-
- buf = gst_buffer_make_metadata_writable (buf);
- gst_buffer_set_caps (buf, GST_PAD_CAPS (qtmux->srcpad));
res = gst_pad_push (qtmux->srcpad, buf);
}
* (somehow optimize copy?) */
GST_DEBUG_OBJECT (qtmux, "Sending buffered data");
while (ret == GST_FLOW_OK) {
- gint r;
const int bufsize = 4096;
+ guint8 *data;
+ gsize size;
buf = gst_buffer_new_and_alloc (bufsize);
- r = fread (GST_BUFFER_DATA (buf), sizeof (guint8), bufsize,
- qtmux->fast_start_file);
- if (r == 0)
+ gst_buffer_map (buf, &size, NULL, GST_MAP_WRITE);
+ size = fread (data, sizeof (guint8), bufsize, qtmux->fast_start_file);
+ if (size == 0) {
+ gst_buffer_unmap (buf, data, -1);
break;
- GST_BUFFER_SIZE (buf) = r;
- GST_LOG_OBJECT (qtmux, "Pushing buffered buffer of size %d", r);
+ }
+ gst_buffer_unmap (buf, data, size);
+ GST_LOG_OBJECT (qtmux, "Pushing buffered buffer of size %d", (gint) size);
ret = gst_qt_mux_send_buffer (qtmux, buf, offset, FALSE);
buf = NULL;
}
gst_qt_mux_update_mdat_size (GstQTMux * qtmux, guint64 mdat_pos,
guint64 mdat_size, guint64 * offset)
{
- GstEvent *event;
GstBuffer *buf;
gboolean large_file;
+ GstSegment segment;
+ guint8 *data;
+ gsize size;
large_file = (mdat_size > MDAT_LARGE_FILE_LIMIT);
mdat_pos += 8;
/* seek and rewrite the header */
- event = gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_BYTES,
- mdat_pos, GST_CLOCK_TIME_NONE, 0);
- gst_pad_push_event (qtmux->srcpad, event);
+ gst_segment_init (&segment, GST_FORMAT_BYTES);
+ segment.start = mdat_pos;
+ gst_pad_push_event (qtmux->srcpad, gst_event_new_segment (&segment));
if (large_file) {
buf = gst_buffer_new_and_alloc (sizeof (guint64));
- GST_WRITE_UINT64_BE (GST_BUFFER_DATA (buf), mdat_size + 16);
+ data = gst_buffer_map (buf, &size, NULL, GST_MAP_WRITE);
+ GST_WRITE_UINT64_BE (data, mdat_size + 16);
} else {
- guint8 *data;
-
buf = gst_buffer_new_and_alloc (16);
- data = GST_BUFFER_DATA (buf);
+ data = gst_buffer_map (buf, &size, NULL, GST_MAP_WRITE);
GST_WRITE_UINT32_BE (data, 8);
GST_WRITE_UINT32_LE (data + 4, FOURCC_free);
GST_WRITE_UINT32_BE (data + 8, mdat_size + 8);
GST_WRITE_UINT32_LE (data + 12, FOURCC_mdat);
}
+ gst_buffer_unmap (buf, data, size);
return gst_qt_mux_send_buffer (qtmux, buf, offset, FALSE);
}
GstStructure *structure;
GValue array = { 0 };
GValue value = { 0 };
- GstCaps *caps = GST_PAD_CAPS (mux->srcpad);
+ GstCaps *caps, *tcaps;
+
+ tcaps = gst_pad_get_current_caps (mux->srcpad);
+ caps = gst_caps_copy (tcaps);
+ gst_caps_unref (tcaps);
- caps = gst_caps_copy (GST_PAD_CAPS (mux->srcpad));
structure = gst_caps_get_structure (caps, 0);
g_value_init (&array, GST_TYPE_ARRAY);
{
GstFlowReturn ret = GST_FLOW_OK;
GstCaps *caps;
+ GstSegment segment;
GST_DEBUG_OBJECT (qtmux, "starting file");
gst_caps_unref (caps);
/* let downstream know we think in BYTES and expect to do seeking later on */
- gst_pad_push_event (qtmux->srcpad,
- gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_BYTES, 0, -1, 0));
+ gst_segment_init (&segment, GST_FORMAT_BYTES);
+ gst_pad_push_event (qtmux->srcpad, gst_event_new_segment (&segment));
/* initialize our moov recovery file */
GST_OBJECT_LOCK (qtmux);
}
if (qtmux->fragment_sequence) {
- GstEvent *event;
+ GstSegment segment;
if (qtmux->mfra) {
guint8 *data = NULL;
return GST_FLOW_OK;
}
-
timescale = qtmux->timescale;
/* only mvex duration is updated,
* mvhd should be consistent with empty moov
GST_DEBUG_OBJECT (qtmux, "rewriting moov with mvex duration %"
GST_TIME_FORMAT, GST_TIME_ARGS (first_ts));
/* seek and rewrite the header */
- event = gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_BYTES,
- qtmux->mdat_pos, GST_CLOCK_TIME_NONE, 0);
- gst_pad_push_event (qtmux->srcpad, event);
+ gst_segment_init (&segment, GST_FORMAT_BYTES);
+ segment.start = qtmux->mdat_pos;
+ gst_pad_push_event (qtmux->srcpad, gst_event_new_segment (&segment));
/* no need to seek back */
return gst_qt_mux_send_moov (qtmux, NULL, FALSE);
}
pad->traf = NULL;
atom_moof_copy_data (moof, &data, &size, &offset);
buffer = _gst_buffer_new_take_data (data, offset);
- GST_LOG_OBJECT (qtmux, "writing moof size %d", GST_BUFFER_SIZE (buffer));
+ GST_LOG_OBJECT (qtmux, "writing moof size %d",
+ gst_buffer_get_size (buffer));
ret = gst_qt_mux_send_buffer (qtmux, buffer, &qtmux->header_size, FALSE);
/* and actual data */
total_size = 0;
for (i = 0; i < atom_array_get_len (&pad->fragment_buffers); i++) {
total_size +=
- GST_BUFFER_SIZE (atom_array_index (&pad->fragment_buffers, i));
+ gst_buffer_get_size (atom_array_index (&pad->fragment_buffers, i));
}
GST_LOG_OBJECT (qtmux, "writing %d buffers, total_size %d",
buf = pad->buf_entries[pad->buf_head];
pad->buf_entries[pad->buf_head++] = NULL;
pad->buf_head %= wrap;
- buf = gst_buffer_make_metadata_writable (buf);
+ buf = gst_buffer_make_writable (buf);
/* track original ts (= pts ?) for later */
GST_BUFFER_OFFSET_END (buf) = GST_BUFFER_TIMESTAMP (buf);
GST_BUFFER_TIMESTAMP (buf) = ts;
buffer (= chunk)), but can also be fixed-packet-size codecs like ADPCM
*/
sample_size = pad->sample_size;
- if (GST_BUFFER_SIZE (last_buf) % sample_size != 0)
+ if (gst_buffer_get_size (last_buf) % sample_size != 0)
goto fragmented_sample;
/* note: qt raw audio storage warps it implicitly into a timewise
* perfect stream, discarding buffer times */
nsamples = gst_util_uint64_scale_round (GST_BUFFER_DURATION (last_buf),
atom_trak_get_timescale (pad->trak), GST_SECOND);
} else {
- nsamples = GST_BUFFER_SIZE (last_buf) / sample_size;
+ nsamples = gst_buffer_get_size (last_buf) / sample_size;
}
duration = GST_BUFFER_DURATION (last_buf) / nsamples;
pad->last_dts += duration * nsamples;
} else {
nsamples = 1;
- sample_size = GST_BUFFER_SIZE (last_buf);
+ sample_size = gst_buffer_get_size (last_buf);
if (pad->have_dts) {
gint64 scaled_dts;
pad->last_dts = GST_BUFFER_OFFSET_END (last_buf);
GST_LOG_OBJECT (qtmux, "selected pad %s with time %" GST_TIME_FORMAT,
GST_PAD_NAME (best_pad->collect.pad), GST_TIME_ARGS (best_time));
buf = gst_collect_pads_pop (pads, &best_pad->collect);
- buf = gst_buffer_make_metadata_writable (buf);
+ buf = gst_buffer_make_writable (buf);
GST_BUFFER_TIMESTAMP (buf) = best_time;
ret = gst_qt_mux_add_buffer (qtmux, best_pad, buf);
} else {
AtomInfo *ext_atom = NULL;
gint constant_size = 0;
const gchar *stream_format;
- GstCaps *current_caps = NULL;
/* find stream data */
qtpad = (GstQTPad *) gst_pad_get_element_private (pad);
* the old caps are a subset of the new one (this means upstream
* added more info to the caps, as both should be 'fixed' caps) */
if (qtpad->fourcc) {
- g_object_get (pad, "caps", ¤t_caps, NULL);
+ GstCaps *current_caps;
+
+ current_caps = gst_pad_get_current_caps (pad);
g_assert (caps != NULL);
if (!gst_qtmux_caps_is_subset_full (qtmux, current_caps, caps)) {
+ gst_caps_unref (current_caps);
goto refuse_renegotiation;
}
GST_DEBUG_OBJECT (qtmux,
"pad %s accepted renegotiation to %" GST_PTR_FORMAT " from %"
- GST_PTR_FORMAT, GST_PAD_NAME (pad), caps, GST_PAD_CAPS (pad));
+ GST_PTR_FORMAT, GST_PAD_NAME (pad), caps, current_caps);
+ gst_caps_unref (current_caps);
}
GST_DEBUG_OBJECT (qtmux, "%s:%s, caps=%" GST_PTR_FORMAT,
"assuming 'raw'");
}
- if (!codec_data || GST_BUFFER_SIZE (codec_data) < 2)
+ if (!codec_data || gst_buffer_get_size ((GstBuffer *) codec_data) < 2)
GST_WARNING_OBJECT (qtmux, "no (valid) codec_data for AAC audio");
else {
- guint8 profile = GST_READ_UINT8 (GST_BUFFER_DATA (codec_data));
+ guint8 profile;
+ gst_buffer_extract ((GstBuffer *) codec_data, 0, &profile, 1);
/* warn if not Low Complexity profile */
profile >>= 3;
if (profile != 2)
} else if (strcmp (mimetype, "audio/x-alac") == 0) {
GstBuffer *codec_config;
gint len;
+ guint8 *data;
+ gsize size;
entry.fourcc = FOURCC_alac;
+ data = gst_buffer_map ((GstBuffer *) codec_data, &size, NULL, GST_MAP_READ);
/* let's check if codec data already comes with 'alac' atom prefix */
- if (!codec_data || (len = GST_BUFFER_SIZE (codec_data)) < 28) {
+ if (!codec_data || (len = size) < 28) {
GST_DEBUG_OBJECT (qtmux, "broken caps, codec data missing");
+ gst_buffer_unmap ((GstBuffer *) codec_data, data, size);
goto refuse_caps;
}
- if (GST_READ_UINT32_LE (GST_BUFFER_DATA (codec_data) + 4) == FOURCC_alac) {
+ if (GST_READ_UINT32_LE (data + 4) == FOURCC_alac) {
len -= 8;
- codec_config = gst_buffer_create_sub ((GstBuffer *) codec_data, 8, len);
+ codec_config =
+ gst_buffer_copy_region ((GstBuffer *) codec_data, 0, 8, len);
} else {
codec_config = gst_buffer_ref ((GstBuffer *) codec_data);
}
+ gst_buffer_unmap ((GstBuffer *) codec_data, data, size);
if (len != 28) {
/* does not look good, but perhaps some trailing unneeded stuff */
GST_WARNING_OBJECT (qtmux, "unexpected codec-data size, possibly broken");
else
ext_atom = build_codec_data_extension (FOURCC_alac, codec_config);
/* set some more info */
+ data = gst_buffer_map (codec_config, &size, NULL, GST_MAP_READ);
entry.bytes_per_sample = 2;
- entry.samples_per_packet =
- GST_READ_UINT32_BE (GST_BUFFER_DATA (codec_config) + 4);
+ entry.samples_per_packet = GST_READ_UINT32_BE (data + 4);
+ gst_buffer_unmap (codec_config, data, size);
gst_buffer_unref (codec_config);
}
GList *ext_atom_list = NULL;
gboolean sync = FALSE;
int par_num, par_den;
- GstCaps *current_caps = NULL;
/* find stream data */
qtpad = (GstQTPad *) gst_pad_get_element_private (pad);
* the old caps are a subset of the new one (this means upstream
* added more info to the caps, as both should be 'fixed' caps) */
if (qtpad->fourcc) {
- g_object_get (pad, "caps", ¤t_caps, NULL);
+ GstCaps *current_caps;
+
+ current_caps = gst_pad_get_current_caps (pad);
g_assert (caps != NULL);
if (!gst_qtmux_caps_is_subset_full (qtmux, current_caps, caps)) {
+ gst_caps_unref (current_caps);
goto refuse_renegotiation;
}
GST_DEBUG_OBJECT (qtmux,
"pad %s accepted renegotiation to %" GST_PTR_FORMAT " from %"
- GST_PTR_FORMAT, GST_PAD_NAME (pad), caps, GST_PAD_CAPS (pad));
+ GST_PTR_FORMAT, GST_PAD_NAME (pad), caps, current_caps);
+ gst_caps_unref (current_caps);
}
GST_DEBUG_OBJECT (qtmux, "%s:%s, caps=%" GST_PTR_FORMAT,
sync = TRUE;
/* now map onto a fourcc, and some extra properties */
- if (strcmp (mimetype, "video/x-raw-rgb") == 0) {
- gint bpp;
+ if (strcmp (mimetype, "video/x-raw") == 0) {
+ const gchar *format;
+ GstVideoFormat fmt;
- entry.fourcc = FOURCC_raw_;
- gst_structure_get_int (structure, "bpp", &bpp);
- entry.depth = bpp;
- sync = FALSE;
- } else if (strcmp (mimetype, "video/x-raw-yuv") == 0) {
- guint32 format = 0;
+ format = gst_structure_get_string (structure, "format");
+ fmt = gst_video_format_from_string (format);
- sync = FALSE;
- gst_structure_get_fourcc (structure, "format", &format);
- switch (format) {
- case GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y'):
+ switch (fmt) {
+ case GST_VIDEO_FORMAT_UYVY:
if (depth == -1)
depth = 24;
entry.fourcc = FOURCC_2vuy;
entry.depth = depth;
+ sync = FALSE;
+ break;
+ default:
+ if (gst_video_format_is_rgb (fmt)) {
+ entry.fourcc = FOURCC_raw_;
+ entry.depth = gst_video_format_get_pixel_stride (fmt, 0) * 8;
+ sync = FALSE;
+ }
break;
}
} else if (strcmp (mimetype, "video/x-h263") == 0) {
refuse_renegotiation:
{
GST_WARNING_OBJECT (qtmux,
- "pad %s refused renegotiation to %" GST_PTR_FORMAT " from %"
- GST_PTR_FORMAT, GST_PAD_NAME (pad), caps, GST_PAD_CAPS (pad));
+ "pad %s refused renegotiation to %" GST_PTR_FORMAT, GST_PAD_NAME (pad),
+ caps);
gst_object_unref (qtmux);
return FALSE;
}
guint32 avg_bitrate = 0, max_bitrate = 0;
qtmux = GST_QT_MUX_CAST (gst_pad_get_parent (pad));
+
switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_CAPS:
+ {
+ GstCaps *caps;
+ GstQTPad *collect_pad;
+
+ gst_event_parse_caps (event, &caps);
+
+ /* find stream data */
+ collect_pad = (GstQTPad *) gst_pad_get_element_private (pad);
+ g_assert (collect_pad);
+ g_assert (collect_pad->set_caps);
+
+ collect_pad->set_caps (pad, caps);
+ break;
+ }
case GST_EVENT_TAG:{
GstTagList *list;
GstTagSetter *setter = GST_TAG_SETTER (qtmux);
static GstPad *
gst_qt_mux_request_new_pad (GstElement * element,
- GstPadTemplate * templ, const gchar * req_name)
+ GstPadTemplate * templ, const gchar * req_name, const GstCaps * caps)
{
GstElementClass *klass = GST_ELEMENT_GET_CLASS (element);
GstQTMux *qtmux = GST_QT_MUX_CAST (element);
GstPad *newpad;
gboolean audio;
gchar *name;
+ gint pad_id;
if (templ->direction != GST_PAD_SINK)
goto wrong_direction;
if (templ == gst_element_class_get_pad_template (klass, "audio_%d")) {
audio = TRUE;
- name = g_strdup_printf ("audio_%02d", qtmux->audio_pads++);
+ if (req_name != NULL && sscanf (req_name, "audio_%02d", &pad_id) == 1) {
+ name = g_strdup (req_name);
+ } else {
+ name = g_strdup_printf ("audio_%02d", qtmux->audio_pads++);
+ }
} else if (templ == gst_element_class_get_pad_template (klass, "video_%d")) {
audio = FALSE;
- name = g_strdup_printf ("video_%02d", qtmux->video_pads++);
+ if (req_name != NULL && sscanf (req_name, "video_%02d", &pad_id) == 1) {
+ name = g_strdup (req_name);
+ } else {
+ name = g_strdup_printf ("video_%02d", qtmux->video_pads++);
+ }
} else
goto wrong_template;
/* set up pad functions */
if (audio)
- gst_pad_set_setcaps_function (newpad,
- GST_DEBUG_FUNCPTR (gst_qt_mux_audio_sink_set_caps));
+ collect_pad->set_caps = GST_DEBUG_FUNCPTR (gst_qt_mux_audio_sink_set_caps);
else
- gst_pad_set_setcaps_function (newpad,
- GST_DEBUG_FUNCPTR (gst_qt_mux_video_sink_set_caps));
+ collect_pad->set_caps = GST_DEBUG_FUNCPTR (gst_qt_mux_video_sink_set_caps);
/* FIXME: hacked way to override/extend the event function of
* GstCollectPads; because it sets its own event function giving the
/* if nothing is set, it won't be called */
GstQTPadPrepareBufferFunc prepare_buf_func;
+ gboolean (*set_caps) (GstPad * pad, GstCaps * caps);
};
typedef enum _GstQTMuxState
"GstQTMux",
GST_STATIC_CAPS ("video/quicktime, variant = (string) apple; "
"video/quicktime"),
- GST_STATIC_CAPS ("video/x-raw-rgb, "
- COMMON_VIDEO_CAPS "; "
- "video/x-raw-yuv, "
- "format = (fourcc) UYVY, "
+ GST_STATIC_CAPS ("video/x-raw, "
+ "format = (string) { RGB, UYVY }, "
COMMON_VIDEO_CAPS "; "
MPEG4V_CAPS "; "
H263_CAPS "; "
break;
}
case GST_QT_MUX_FORMAT_MJ2:
+ {
+ guint8 *bdata;
+
major = FOURCC_mjp2;
comp = mjp2_brands;
version = 0;
prefix = gst_buffer_new_and_alloc (sizeof (mjp2_prefix));
- memcpy (GST_BUFFER_DATA (prefix), mjp2_prefix, GST_BUFFER_SIZE (prefix));
+ bdata = gst_buffer_map (prefix, NULL, NULL, GST_MAP_WRITE);
+ memcpy (bdata, mjp2_prefix, sizeof (mjp2_prefix));
+ gst_buffer_unmap (prefix, bdata, -1);
break;
+ }
default:
g_assert_not_reached ();
break;
"encoding-name = (string) { \"X-QT\", \"X-QUICKTIME\" }")
);
-GST_BOILERPLATE (GstRtpXQTDepay, gst_rtp_xqt_depay, GstBaseRTPDepayload,
- GST_TYPE_BASE_RTP_DEPAYLOAD);
+#define gst_rtp_xqt_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpXQTDepay, gst_rtp_xqt_depay, GST_TYPE_BASE_RTP_DEPAYLOAD);
static void gst_rtp_xqt_depay_finalize (GObject * object);
static GstStateChangeReturn gst_rtp_xqt_depay_change_state (GstElement *
element, GstStateChange transition);
-static void
-gst_rtp_xqt_depay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_xqt_depay_src_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_xqt_depay_sink_template));
-
- gst_element_class_set_details_simple (element_class, "RTP packet depayloader",
- "Codec/Depayloader/Network",
- "Extracts Quicktime audio/video from RTP packets",
- "Wim Taymans <wim@fluendo.com>");
-}
static void
gst_rtp_xqt_depay_class_init (GstRtpXQTDepayClass * klass)
GST_DEBUG_CATEGORY_INIT (rtpxqtdepay_debug, "rtpxqtdepay", 0,
"QT Media RTP Depayloader");
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_xqt_depay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_xqt_depay_sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP packet depayloader", "Codec/Depayloader/Network",
+ "Extracts Quicktime audio/video from RTP packets",
+ "Wim Taymans <wim@fluendo.com>");
}
static void
-gst_rtp_xqt_depay_init (GstRtpXQTDepay * rtpxqtdepay,
- GstRtpXQTDepayClass * klass)
+gst_rtp_xqt_depay_init (GstRtpXQTDepay * rtpxqtdepay)
{
rtpxqtdepay->adapter = gst_adapter_new ();
}
GstBuffer *buf;
gint size;
GstCaps *caps;
+ guint8 *bdata;
GST_DEBUG_OBJECT (rtpxqtdepay, "found avcC codec_data in sd, %u",
chlen);
size = len - 8;
buf = gst_buffer_new_and_alloc (size);
- memcpy (GST_BUFFER_DATA (buf), data + 8, size);
+ bdata = gst_buffer_map (buf, NULL, NULL, GST_MAP_WRITE);
+ memcpy (bdata, data + 8, size);
+ gst_buffer_unmap (buf, bdata, -1);
caps = gst_caps_new_simple ("video/x-h264",
"codec_data", GST_TYPE_BUFFER, buf, NULL);
gst_buffer_unref (buf);
gst_rtp_xqt_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
{
GstRtpXQTDepay *rtpxqtdepay;
- GstBuffer *outbuf;
+ GstBuffer *outbuf = NULL;
gboolean m;
+ GstRTPBuffer rtp;
rtpxqtdepay = GST_RTP_XQT_DEPAY (depayload);
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
+
if (!gst_rtp_buffer_validate (buf))
goto bad_packet;
GST_DEBUG_OBJECT (rtpxqtdepay, "we need resync");
}
- m = gst_rtp_buffer_get_marker (buf);
+ m = gst_rtp_buffer_get_marker (&rtp);
GST_LOG_OBJECT (rtpxqtdepay, "marker: %d", m);
{
guint8 *payload;
guint8 ver, pck;
gboolean s, q, l, d;
+ guint8 *bdata;
+ gsize bsize;
- payload_len = gst_rtp_buffer_get_payload_len (buf);
- payload = gst_rtp_buffer_get_payload (buf);
+ payload_len = gst_rtp_buffer_get_payload_len (&rtp);
+ payload = gst_rtp_buffer_get_payload (&rtp);
/* 1 2 3
* 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
{
/* multiple samples per packet. */
outbuf = gst_buffer_new_and_alloc (payload_len);
- memcpy (GST_BUFFER_DATA (outbuf), payload, payload_len);
- return outbuf;
+ bdata = gst_buffer_map (outbuf, &bsize, NULL, GST_MAP_WRITE);
+ memcpy (bdata, payload, payload_len);
+ gst_buffer_unmap (outbuf, bdata, bsize);
+
+ goto done;
}
case 2:
{
slen = payload_len;
outbuf = gst_buffer_new_and_alloc (slen);
- memcpy (GST_BUFFER_DATA (outbuf), payload, slen);
+ bdata = gst_buffer_map (outbuf, &bsize, NULL, GST_MAP_WRITE);
+ memcpy (bdata, payload, slen);
+ gst_buffer_unmap (outbuf, bdata, bsize);
if (!s)
GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT);
{
/* one sample per packet, use adapter to combine based on marker bit. */
outbuf = gst_buffer_new_and_alloc (payload_len);
- memcpy (GST_BUFFER_DATA (outbuf), payload, payload_len);
+ bdata = gst_buffer_map (outbuf, &bsize, NULL, GST_MAP_WRITE);
+ memcpy (bdata, payload, payload_len);
+ gst_buffer_unmap (outbuf, bdata, bsize);
gst_adapter_push (rtpxqtdepay->adapter, outbuf);
GST_DEBUG_OBJECT (rtpxqtdepay,
"gst_rtp_xqt_depay_chain: pushing buffer of size %u", avail);
- return outbuf;
+ goto done;
}
}
}
done:
- return NULL;
+ gst_rtp_buffer_unmap (&rtp);
+ return outbuf;
bad_packet:
{
GST_ELEMENT_WARNING (rtpxqtdepay, STREAM, DECODE,
("Packet did not validate."), (NULL));
- return NULL;
+ goto done;
}
need_resync:
{
GST_DEBUG_OBJECT (rtpxqtdepay, "waiting for marker");
- return NULL;
+ goto done;
}
wrong_version:
{
GST_ELEMENT_WARNING (rtpxqtdepay, STREAM, DECODE,
("Unknown payload version."), (NULL));
- return NULL;
+ goto done;
}
pck_reserved:
{
GST_ELEMENT_WARNING (rtpxqtdepay, STREAM, DECODE,
("PCK reserved 0."), (NULL));
- return NULL;
+ goto done;
}
wrong_length:
{
GST_ELEMENT_WARNING (rtpxqtdepay, STREAM, DECODE,
("Wrong payload length."), (NULL));
- return NULL;
+ goto done;
}
unknown_format:
{
GST_ELEMENT_WARNING (rtpxqtdepay, STREAM, DECODE,
("Unknown payload format."), (NULL));
- return NULL;
+ goto done;
}
}
GST_PAD_SOMETIMES,
GST_STATIC_CAPS_ANY);
-GST_BOILERPLATE (GstQTDemux, gst_qtdemux, GstQTDemux, GST_TYPE_ELEMENT);
+#define gst_qtdemux_parent_class parent_class
+G_DEFINE_TYPE (GstQTDemux, gst_qtdemux, GST_TYPE_ELEMENT);
static void gst_qtdemux_dispose (GObject * object);
static GstFlowReturn qtdemux_expose_streams (GstQTDemux * qtdemux);
static void
-gst_qtdemux_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_qtdemux_sink_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_qtdemux_videosrc_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_qtdemux_audiosrc_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_qtdemux_subsrc_template));
- gst_element_class_set_details_simple (element_class, "QuickTime demuxer",
- "Codec/Demuxer",
- "Demultiplex a QuickTime file into audio and video streams",
- "David Schleef <ds@schleef.org>, Wim Taymans <wim@fluendo.com>");
-
- GST_DEBUG_CATEGORY_INIT (qtdemux_debug, "qtdemux", 0, "qtdemux plugin");
-}
-
-static void
gst_qtdemux_class_init (GstQTDemuxClass * klass)
{
GObjectClass *gobject_class;
gstelement_class->get_index = GST_DEBUG_FUNCPTR (gst_qtdemux_get_index);
gst_tag_register_musicbrainz_tags ();
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_qtdemux_sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_qtdemux_videosrc_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_qtdemux_audiosrc_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_qtdemux_subsrc_template));
+ gst_element_class_set_details_simple (gstelement_class, "QuickTime demuxer",
+ "Codec/Demuxer",
+ "Demultiplex a QuickTime file into audio and video streams",
+ "David Schleef <ds@schleef.org>, Wim Taymans <wim@fluendo.com>");
+
+ GST_DEBUG_CATEGORY_INIT (qtdemux_debug, "qtdemux", 0, "qtdemux plugin");
+
}
static void
-gst_qtdemux_init (GstQTDemux * qtdemux, GstQTDemuxClass * klass)
+gst_qtdemux_init (GstQTDemux * qtdemux)
{
qtdemux->sinkpad =
gst_pad_new_from_static_template (&gst_qtdemux_sink_template, "sink");
}
}
+static void
+_gst_buffer_copy_into_mem (GstBuffer * dest, const guint8 * src,
+ gsize offset, gsize size)
+{
+ guint8 *bdata;
+ gsize bsize;
+
+ g_return_if_fail (gst_buffer_is_writable (dest));
+
+ bsize = gst_buffer_get_size (dest);
+ g_return_if_fail (bsize >= offset + size);
+
+ bdata = gst_buffer_map (dest, &bsize, NULL, GST_MAP_WRITE);
+ memcpy (bdata + offset, src, size);
+ gst_buffer_unmap (dest, bdata, bsize);
+}
+
+static GstBuffer *
+_gst_buffer_new_wrapped (gpointer mem, gsize size, GFreeFunc free_func)
+{
+ GstBuffer *buf;
+
+ buf = gst_buffer_new ();
+ gst_buffer_take_memory (buf, -1,
+ gst_memory_new_wrapped (free_func ? 0 : GST_MEMORY_FLAG_READONLY,
+ mem, free_func, size, 0, size));
+
+ return buf;
+}
+
static GstFlowReturn
gst_qtdemux_pull_atom (GstQTDemux * qtdemux, guint64 offset, guint64 size,
GstBuffer ** buf)
{
GstFlowReturn flow;
+ guint8 *bdata;
+ gsize bsize;
if (G_UNLIKELY (size == 0)) {
GstFlowReturn ret;
if (ret != GST_FLOW_OK)
return ret;
- size = QT_UINT32 (GST_BUFFER_DATA (tmp));
+ bdata = gst_buffer_map (tmp, &bsize, NULL, GST_MAP_READ);
+ size = QT_UINT32 (bdata);
GST_DEBUG_OBJECT (qtdemux, "size 0x%08" G_GINT64_MODIFIER "x", size);
+ gst_buffer_unmap (tmp, bdata, bsize);
gst_buffer_unref (tmp);
}
if (G_UNLIKELY (flow != GST_FLOW_OK))
return flow;
+ bsize = gst_buffer_get_size (*buf);
/* Catch short reads - we don't want any partial atoms */
- if (G_UNLIKELY (GST_BUFFER_SIZE (*buf) < size)) {
+ if (G_UNLIKELY (bsize < size)) {
GST_WARNING_OBJECT (qtdemux, "short read: %u < %" G_GUINT64_FORMAT,
- GST_BUFFER_SIZE (*buf), size);
+ bsize, size);
gst_buffer_unref (*buf);
*buf = NULL;
return GST_FLOW_UNEXPECTED;
switch (GST_QUERY_TYPE (query)) {
case GST_QUERY_POSITION:
- if (GST_CLOCK_TIME_IS_VALID (qtdemux->segment.last_stop)) {
+ if (GST_CLOCK_TIME_IS_VALID (qtdemux->segment.position)) {
gst_query_set_position (query, GST_FORMAT_TIME,
- qtdemux->segment.last_stop);
+ qtdemux->segment.position);
res = TRUE;
}
break;
gint64 desired_offset;
gint n;
- desired_offset = segment->last_stop;
+ desired_offset = segment->position;
GST_DEBUG_OBJECT (qtdemux, "seeking to %" GST_TIME_FORMAT,
GST_TIME_ARGS (desired_offset));
stream->segment_index = -1;
stream->last_ret = GST_FLOW_OK;
stream->sent_eos = FALSE;
+
+ if (segment->flags & GST_SEEK_FLAG_FLUSH)
+ gst_segment_init (&stream->segment, GST_FORMAT_TIME);
}
- segment->last_stop = desired_offset;
+ segment->position = desired_offset;
segment->time = desired_offset;
/* we stop at the end */
if (event) {
/* configure the segment with the seek variables */
GST_DEBUG_OBJECT (qtdemux, "configuring seek");
- gst_segment_set_seek (&seeksegment, rate, format, flags,
+ gst_segment_do_seek (&seeksegment, rate, format, flags,
cur_type, cur, stop_type, stop, &update);
}
/* prepare for streaming again */
if (flush) {
- gst_pad_push_event (qtdemux->sinkpad, gst_event_new_flush_stop ());
- gst_qtdemux_push_event (qtdemux, gst_event_new_flush_stop ());
- } else if (qtdemux->segment_running) {
- /* we are running the current segment and doing a non-flushing seek,
- * close the segment first based on the last_stop. */
- GST_DEBUG_OBJECT (qtdemux, "closing running segment %" G_GINT64_FORMAT
- " to %" G_GINT64_FORMAT, qtdemux->segment.start,
- qtdemux->segment.last_stop);
-
- if (qtdemux->segment.rate >= 0) {
- /* FIXME, rate is the product of the global rate and the (quicktime)
- * segment rate. */
- qtdemux->pending_newsegment = gst_event_new_new_segment (TRUE,
- qtdemux->segment.rate, qtdemux->segment.format,
- qtdemux->segment.start, qtdemux->segment.last_stop,
- qtdemux->segment.time);
- } else { /* For Reverse Playback */
- guint64 stop;
-
- if ((stop = qtdemux->segment.stop) == -1)
- stop = qtdemux->segment.duration;
- /* for reverse playback, we played from stop to last_stop. */
- qtdemux->pending_newsegment = gst_event_new_new_segment (TRUE,
- qtdemux->segment.rate, qtdemux->segment.format,
- qtdemux->segment.last_stop, stop, qtdemux->segment.last_stop);
- }
+ gst_pad_push_event (qtdemux->sinkpad, gst_event_new_flush_stop (TRUE));
+ gst_qtdemux_push_event (qtdemux, gst_event_new_flush_stop (TRUE));
}
/* commit the new segment */
if (qtdemux->segment.flags & GST_SEEK_FLAG_SEGMENT) {
gst_element_post_message (GST_ELEMENT_CAST (qtdemux),
gst_message_new_segment_start (GST_OBJECT_CAST (qtdemux),
- qtdemux->segment.format, qtdemux->segment.last_stop));
+ qtdemux->segment.format, qtdemux->segment.position));
}
- /* restart streaming, NEWSEGMENT will be sent from the streaming
- * thread. */
- qtdemux->segment_running = TRUE;
+ /* restart streaming, NEWSEGMENT will be sent from the streaming thread. */
for (i = 0; i < qtdemux->n_streams; i++)
qtdemux->streams[i]->last_ret = GST_FLOW_OK;
GST_LOG_OBJECT (demux, "handling %s event", GST_EVENT_TYPE_NAME (event));
switch (GST_EVENT_TYPE (event)) {
- case GST_EVENT_NEWSEGMENT:
+ case GST_EVENT_SEGMENT:
{
- GstFormat format;
- gdouble rate, arate;
- gint64 start, stop, time, offset = 0;
+ gint64 offset = 0;
QtDemuxStream *stream;
gint idx;
- gboolean update;
GstSegment segment;
/* some debug output */
- gst_segment_init (&segment, GST_FORMAT_UNDEFINED);
- gst_event_parse_new_segment_full (event, &update, &rate, &arate, &format,
- &start, &stop, &time);
- gst_segment_set_newsegment_full (&segment, update, rate, arate, format,
- start, stop, time);
- GST_DEBUG_OBJECT (demux,
- "received format %d newsegment %" GST_SEGMENT_FORMAT, format,
+ gst_event_copy_segment (event, &segment);
+ GST_DEBUG_OBJECT (demux, "received newsegment %" GST_SEGMENT_FORMAT,
&segment);
/* chain will send initial newsegment after pads have been added */
}
/* we only expect a BYTE segment, e.g. following a seek */
- if (format == GST_FORMAT_BYTES) {
- if (start > 0) {
+ if (segment.format == GST_FORMAT_BYTES) {
+ if (GST_CLOCK_TIME_IS_VALID (segment.start)) {
gint64 requested_seek_time;
guint64 seek_offset;
- offset = start;
+ offset = segment.start;
GST_OBJECT_LOCK (demux);
requested_seek_time = demux->requested_seek_time;
GST_OBJECT_UNLOCK (demux);
if (offset == seek_offset) {
- start = requested_seek_time;
+ segment.start = requested_seek_time;
} else {
- gst_qtdemux_find_sample (demux, start, TRUE, FALSE, NULL, NULL,
- &start);
- start = MAX (start, 0);
+ gst_qtdemux_find_sample (demux, segment.start, TRUE, FALSE, NULL,
+ NULL, (gint64 *) & segment.start);
+ segment.start = MAX (segment.start, 0);
}
}
- if (stop > 0) {
- gst_qtdemux_find_sample (demux, stop, FALSE, FALSE, NULL, NULL,
- &stop);
+ if (GST_CLOCK_TIME_IS_VALID (segment.stop)) {
+ gst_qtdemux_find_sample (demux, segment.stop, FALSE, FALSE, NULL,
+ NULL, (gint64 *) & segment.stop);
/* keyframe seeking should already arrange for start >= stop,
* but make sure in other rare cases */
- stop = MAX (stop, start);
+ segment.stop = MAX (segment.stop, segment.start);
}
} else {
GST_DEBUG_OBJECT (demux, "unsupported segment format, ignoring");
}
/* accept upstream's notion of segment and distribute along */
- gst_segment_set_newsegment_full (&demux->segment, update, rate, arate,
- GST_FORMAT_TIME, start, stop, start);
- GST_DEBUG_OBJECT (demux, "Pushing newseg update %d, rate %g, "
- "applied rate %g, format %d, start %" GST_TIME_FORMAT ", "
- "stop %" GST_TIME_FORMAT, update, rate, arate, GST_FORMAT_TIME,
- GST_TIME_ARGS (start), GST_TIME_ARGS (stop));
+ segment.time = segment.start;
+ segment.duration = demux->segment.duration;
+ segment.base = gst_segment_to_running_time (&demux->segment,
+ GST_FORMAT_TIME, demux->segment.position);
- gst_qtdemux_push_event (demux,
- gst_event_new_new_segment_full (update, rate, arate, GST_FORMAT_TIME,
- start, stop, start));
+ gst_segment_copy_into (&segment, &demux->segment);
+ GST_DEBUG_OBJECT (demux, "Pushing newseg %" GST_SEGMENT_FORMAT, &segment);
+ gst_qtdemux_push_event (demux, gst_event_new_segment (&segment));
/* clear leftover in current segment, if any */
gst_adapter_clear (demux->adapter);
case GST_EVENT_FLUSH_STOP:
{
gint i;
+ GstClockTime dur;
/* clean up, force EOS if no more info follows */
gst_adapter_clear (demux->adapter);
demux->streams[i]->last_ret = GST_FLOW_OK;
demux->streams[i]->sent_eos = FALSE;
}
+ dur = demux->segment.duration;
+ gst_segment_init (&demux->segment, GST_FORMAT_TIME);
+ demux->segment.duration = dur;
break;
}
case GST_EVENT_EOS:
GST_DEBUG_OBJECT (qtdemux, "major brand: %" GST_FOURCC_FORMAT,
GST_FOURCC_ARGS (qtdemux->major_brand));
buf = qtdemux->comp_brands = gst_buffer_new_and_alloc (length - 16);
- memcpy (GST_BUFFER_DATA (buf), buffer + 16, GST_BUFFER_SIZE (buf));
+ _gst_buffer_copy_into_mem (buf, buffer + 16, 0, length - 16);
}
}
GstBuffer *buf;
GstTagList *taglist;
- buf = gst_buffer_new ();
- GST_BUFFER_DATA (buf) = (guint8 *) buffer + offset + 16;
- GST_BUFFER_SIZE (buf) = length - offset - 16;
-
+ buf = _gst_buffer_new_wrapped ((guint8 *) buffer + offset + 16,
+ length - offset - 16, NULL);
taglist = gst_tag_list_from_xmp_buffer (buf);
gst_buffer_unref (buf);
GstBuffer *buf = NULL;
GstFlowReturn ret = GST_FLOW_OK;
guint64 cur_offset = qtdemux->offset;
+ guint8 *data;
+ gsize size;
ret = gst_pad_pull_range (qtdemux->sinkpad, cur_offset, 16, &buf);
if (G_UNLIKELY (ret != GST_FLOW_OK))
goto beach;
- if (G_LIKELY (GST_BUFFER_SIZE (buf) >= 8))
- extract_initial_length_and_fourcc (GST_BUFFER_DATA (buf),
- GST_BUFFER_SIZE (buf), &length, &fourcc);
+ data = gst_buffer_map (buf, &size, NULL, GST_MAP_READ);
+ if (G_LIKELY (size >= 8))
+ extract_initial_length_and_fourcc (data, size, &length, &fourcc);
+ gst_buffer_unmap (buf, data, size);
gst_buffer_unref (buf);
/* maybe we already got most we needed, so only consider this eof */
ret = gst_pad_pull_range (qtdemux->sinkpad, cur_offset, length, &moov);
if (ret != GST_FLOW_OK)
goto beach;
- if (length != GST_BUFFER_SIZE (moov)) {
+ data = gst_buffer_map (moov, &size, NULL, GST_MAP_READ);
+ if (length != size) {
/* Some files have a 'moov' atom at the end of the file which contains
* a terminal 'free' atom where the body of the atom is missing.
* Check for, and permit, this special case.
*/
- if (GST_BUFFER_SIZE (moov) >= 8) {
- guint8 *final_data = GST_BUFFER_DATA (moov) +
- (GST_BUFFER_SIZE (moov) - 8);
+ if (size >= 8) {
+ guint8 *final_data = data + (size - 8);
guint32 final_length = QT_UINT32 (final_data);
guint32 final_fourcc = QT_FOURCC (final_data + 4);
- if (final_fourcc == FOURCC_free &&
- GST_BUFFER_SIZE (moov) + final_length - 8 == length) {
+ gst_buffer_unmap (moov, data, size);
+ if (final_fourcc == FOURCC_free && size + final_length - 8 == length) {
/* Ok, we've found that special case. Allocate a new buffer with
* that free atom actually present. */
GstBuffer *newmoov = gst_buffer_new_and_alloc (length);
- gst_buffer_copy_metadata (newmoov, moov,
- GST_BUFFER_COPY_FLAGS | GST_BUFFER_COPY_TIMESTAMPS |
- GST_BUFFER_COPY_CAPS);
- memcpy (GST_BUFFER_DATA (newmoov), GST_BUFFER_DATA (moov),
- GST_BUFFER_SIZE (moov));
- memset (GST_BUFFER_DATA (newmoov) + GST_BUFFER_SIZE (moov), 0,
- final_length - 8);
+ gst_buffer_copy_into (newmoov, moov, 0, 0, size);
+ data = gst_buffer_map (newmoov, &size, NULL, GST_MAP_WRITE);
+ memset (data + length - final_length + 8, 0, final_length - 8);
gst_buffer_unref (moov);
moov = newmoov;
}
}
}
- if (length != GST_BUFFER_SIZE (moov)) {
+ if (length != size) {
GST_ELEMENT_ERROR (qtdemux, STREAM, DEMUX,
(_("This file is incomplete and cannot be played.")),
("We got less than expected (received %u, wanted %u, offset %"
- G_GUINT64_FORMAT ")",
- GST_BUFFER_SIZE (moov), (guint) length, cur_offset));
+ G_GUINT64_FORMAT ")", size, (guint) length, cur_offset));
+ gst_buffer_unmap (moov, data, size);
gst_buffer_unref (moov);
ret = GST_FLOW_ERROR;
goto beach;
}
qtdemux->offset += length;
- qtdemux_parse_moov (qtdemux, GST_BUFFER_DATA (moov), length);
+ qtdemux_parse_moov (qtdemux, data, length);
qtdemux_node_dump (qtdemux, qtdemux->moov_node);
qtdemux_parse_tree (qtdemux);
g_node_destroy (qtdemux->moov_node);
+ gst_buffer_unmap (moov, data, size);
gst_buffer_unref (moov);
qtdemux->moov_node = NULL;
qtdemux->got_moov = TRUE;
if (ret != GST_FLOW_OK)
goto beach;
qtdemux->offset += length;
- qtdemux_parse_ftyp (qtdemux, GST_BUFFER_DATA (ftyp),
- GST_BUFFER_SIZE (ftyp));
+ data = gst_buffer_map (ftyp, &size, NULL, GST_MAP_READ);
+ qtdemux_parse_ftyp (qtdemux, data, size);
+ gst_buffer_unmap (ftyp, data, size);
gst_buffer_unref (ftyp);
break;
}
if (ret != GST_FLOW_OK)
goto beach;
qtdemux->offset += length;
- qtdemux_parse_uuid (qtdemux, GST_BUFFER_DATA (uuid),
- GST_BUFFER_SIZE (uuid));
+ data = gst_buffer_map (uuid, &size, NULL, GST_MAP_READ);
+ qtdemux_parse_uuid (qtdemux, data, size);
+ gst_buffer_unmap (uuid, data, size);
gst_buffer_unref (uuid);
break;
}
ret = gst_qtdemux_pull_atom (qtdemux, cur_offset, length, &unknown);
if (ret != GST_FLOW_OK)
goto beach;
- GST_MEMDUMP ("Unknown tag", GST_BUFFER_DATA (unknown),
- GST_BUFFER_SIZE (unknown));
+ data = gst_buffer_map (unknown, &size, NULL, GST_MAP_READ);
+ GST_MEMDUMP ("Unknown tag", data, size);
+ gst_buffer_unmap (unknown, data, size);
gst_buffer_unref (unknown);
qtdemux->offset += length;
break;
QtDemuxStream *str = qtdemux->streams[n];
seg_idx = gst_qtdemux_find_segment (qtdemux, str,
- qtdemux->segment.last_stop);
+ qtdemux->segment.position);
/* segment not found, continue with normal flow */
if (seg_idx == -1)
k_index, GST_TIME_ARGS (k_pos));
/* Set last_stop with the keyframe timestamp we pushed of that stream */
- gst_segment_set_last_stop (&qtdemux->segment, GST_FORMAT_TIME, last_stop);
+ qtdemux->segment.position = last_stop;
GST_DEBUG_OBJECT (qtdemux, "last_stop now is %" GST_TIME_FORMAT,
GST_TIME_ARGS (last_stop));
/* update the segment values used for clipping */
gst_segment_init (&stream->segment, GST_FORMAT_TIME);
- gst_segment_set_newsegment (&stream->segment, FALSE, rate, GST_FORMAT_TIME,
- start, stop, time);
+ /* accumulate previous segments */
+ if (GST_CLOCK_TIME_IS_VALID (stream->segment.stop))
+ stream->segment.base += (stream->segment.stop - stream->segment.start) /
+ ABS (stream->segment.rate);
+ stream->segment.rate = rate;
+ stream->segment.start = start;
+ stream->segment.stop = stop;
+ stream->segment.time = time;
/* now prepare and send the segment */
if (stream->pad) {
- event = gst_event_new_new_segment (FALSE, rate, GST_FORMAT_TIME,
- start, stop, time);
+ event = gst_event_new_segment (&stream->segment);
gst_pad_push_event (stream->pad, event);
/* assume we can send more data now */
stream->last_ret = GST_FLOW_OK;
end_time = stream->segments[stream->n_segments - 1].stop_time;
GST_LOG_OBJECT (demux, "current position: %" GST_TIME_FORMAT
", stream end: %" GST_TIME_FORMAT,
- GST_TIME_ARGS (demux->segment.last_stop), GST_TIME_ARGS (end_time));
- if (end_time + 2 * GST_SECOND < demux->segment.last_stop) {
+ GST_TIME_ARGS (demux->segment.position), GST_TIME_ARGS (end_time));
+ if (end_time + 2 * GST_SECOND < demux->segment.position) {
GST_DEBUG_OBJECT (demux, "sending EOS for stream %s",
GST_PAD_NAME (stream->pad));
stream->sent_eos = TRUE;
gst_qtdemux_clip_buffer (GstQTDemux * qtdemux, QtDemuxStream * stream,
GstBuffer * buf)
{
- gint64 start, stop, cstart, cstop, diff;
+ guint64 start, stop, cstart, cstop, diff;
GstClockTime timestamp = GST_CLOCK_TIME_NONE, duration = GST_CLOCK_TIME_NONE;
- guint8 *data;
guint size;
gint num_rate, denom_rate;
gint frame_size;
gboolean clip_data;
+ guint offset;
- data = GST_BUFFER_DATA (buf);
- size = GST_BUFFER_SIZE (buf);
+ size = gst_buffer_get_size (buf);
/* depending on the type, setup the clip parameters */
if (stream->subtype == FOURCC_soun) {
"clipping start to %" GST_TIME_FORMAT " %"
G_GUINT64_FORMAT " bytes", GST_TIME_ARGS (cstart), diff);
- data += diff;
+ offset = diff;
size -= diff;
}
}
}
}
+ gst_buffer_resize (buf, offset, size);
GST_BUFFER_TIMESTAMP (buf) = timestamp;
GST_BUFFER_DURATION (buf) = duration;
- GST_BUFFER_SIZE (buf) = size;
- GST_BUFFER_DATA (buf) = data;
return buf;
GstBuffer * buf)
{
guint8 *data;
- guint size, nsize = 0;
+ guint nsize = 0;
+ gsize size;
gchar *str;
- data = GST_BUFFER_DATA (buf);
- size = GST_BUFFER_SIZE (buf);
-
/* not many cases for now */
if (G_UNLIKELY (stream->fourcc == FOURCC_mp4s)) {
/* send a one time dvd clut event */
/* takes care of UTF-8 validation or UTF-16 recognition,
* no other encoding expected */
+ data = gst_buffer_map (buf, &size, NULL, GST_MAP_READ);
str = gst_tag_freeform_string_to_utf8 ((gchar *) data + 2, nsize, NULL);
+ gst_buffer_unmap (buf, data, size);
if (str) {
gst_buffer_unref (buf);
- buf = gst_buffer_new ();
- GST_BUFFER_DATA (buf) = GST_BUFFER_MALLOCDATA (buf) = (guint8 *) str;
- GST_BUFFER_SIZE (buf) = strlen (str);
+ buf = _gst_buffer_new_wrapped (str, strlen (str), g_free);
} else {
/* may be 0-size subtitle, which is also sent to keep pipeline going */
- GST_BUFFER_DATA (buf) = data + 2;
- GST_BUFFER_SIZE (buf) = nsize;
+ gst_buffer_resize (buf, 2, nsize);
}
/* FIXME ? convert optional subsequent style info to markup */
if (G_UNLIKELY (stream->fourcc == FOURCC_rtsp)) {
gchar *url;
+ guint8 *bdata;
+ gsize bsize;
- url = g_strndup ((gchar *) GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (buf));
+ bdata = gst_buffer_map (buf, &bsize, NULL, GST_MAP_READ);
+ url = g_strndup ((gchar *) bdata, bsize);
+ gst_buffer_unmap (buf, bdata, bsize);
if (url != NULL && strlen (url) != 0) {
/* we have RTSP redirect now */
gst_element_post_message (GST_ELEMENT_CAST (qtdemux),
/* position reporting */
if (qtdemux->segment.rate >= 0) {
- gst_segment_set_last_stop (&qtdemux->segment, GST_FORMAT_TIME, position);
+ qtdemux->segment.position = position;
gst_qtdemux_sync_streams (qtdemux);
}
GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT);
stream->discont = FALSE;
}
- gst_buffer_set_caps (buffer, stream->caps);
gst_pad_push (stream->pad, buffer);
}
/* we're going to modify the metadata */
- buf = gst_buffer_make_metadata_writable (buf);
+ buf = gst_buffer_make_writable (buf);
if (G_UNLIKELY (stream->need_process))
buf = gst_qtdemux_process_buffer (qtdemux, stream, buf);
GST_BUFFER_OFFSET_END (buf) = -1;
if (G_UNLIKELY (stream->padding)) {
- GST_BUFFER_DATA (buf) += stream->padding;
- GST_BUFFER_SIZE (buf) -= stream->padding;
+ gst_buffer_resize (buf, stream->padding, -1);
}
if (G_UNLIKELY (qtdemux->element_index)) {
if (!keyframe)
GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
- gst_buffer_set_caps (buf, stream->caps);
-
GST_LOG_OBJECT (qtdemux,
"Pushing buffer with time %" GST_TIME_FORMAT ", duration %"
GST_TIME_FORMAT " on pad %s", GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
{
GST_ELEMENT_ERROR (qtdemux, STREAM, FAILED,
(NULL), ("streaming stopped, invalid state"));
- qtdemux->segment_running = FALSE;
gst_pad_pause_task (pad);
gst_qtdemux_push_event (qtdemux, gst_event_new_eos ());
goto done;
GST_LOG_OBJECT (qtdemux, "pausing task, reason %s", reason);
- qtdemux->segment_running = FALSE;
gst_pad_pause_task (pad);
/* fatal errors need special actions */
if (qtdemux->segment.flags & GST_SEEK_FLAG_SEGMENT) {
gint64 stop;
- /* FIXME: I am not sure this is the right fix. If the sinks are
- * supposed to detect the segment is complete and accumulate
- * automatically, it does not seem to work here. Need more work */
- qtdemux->segment_running = TRUE;
-
if ((stop = qtdemux->segment.stop) == -1)
stop = qtdemux->segment.duration;
guint32 fourcc;
guint64 size;
- data = gst_adapter_peek (demux->adapter, demux->neededbytes);
+ data = gst_adapter_map (demux->adapter, demux->neededbytes);
/* get fourcc/length, set neededbytes */
extract_initial_length_and_fourcc ((guint8 *) data, demux->neededbytes,
&size, &fourcc);
+ gst_adapter_unmap (demux->adapter, 0);
+ data = NULL;
GST_DEBUG_OBJECT (demux, "Peeking found [%" GST_FOURCC_FORMAT "] "
"size: %" G_GUINT64_FORMAT, GST_FOURCC_ARGS (fourcc), size);
if (size == 0) {
/* there may be multiple mdat (or alike) buffers */
/* sanity check */
if (demux->mdatbuffer)
- bs = GST_BUFFER_SIZE (demux->mdatbuffer);
+ bs = gst_buffer_get_size (demux->mdatbuffer);
else
bs = 0;
if (size + bs > 10 * (1 << 20))
GST_DEBUG_OBJECT (demux, "In header");
- data = gst_adapter_peek (demux->adapter, demux->neededbytes);
+ data = gst_adapter_map (demux->adapter, demux->neededbytes);
/* parse the header */
extract_initial_length_and_fourcc (data, demux->neededbytes, NULL,
demux->got_moov = TRUE;
/* prepare newsegment to send when streaming actually starts */
- if (!demux->pending_newsegment) {
- demux->pending_newsegment =
- gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME,
- 0, GST_CLOCK_TIME_NONE, 0);
- }
+ if (!demux->pending_newsegment)
+ demux->pending_newsegment = gst_event_new_segment (&demux->segment);
qtdemux_parse_moov (demux, data, demux->neededbytes);
qtdemux_node_dump (demux, demux->moov_node);
GST_DEBUG_OBJECT (demux, "Parsing [moof]");
if (!qtdemux_parse_moof (demux, data, demux->neededbytes,
demux->offset, NULL)) {
+ gst_adapter_unmap (demux->adapter, 0);
ret = GST_FLOW_ERROR;
goto done;
}
GST_FOURCC_ARGS (fourcc));
/* Let's jump that one and go back to initial state */
}
+ gst_adapter_unmap (demux->adapter, 0);
+ data = NULL;
if (demux->mdatbuffer && demux->n_streams) {
/* the mdat was before the header */
}
case QTDEMUX_STATE_BUFFER_MDAT:{
GstBuffer *buf;
+ guint8 fourcc[4];
GST_DEBUG_OBJECT (demux, "Got our buffer at offset %" G_GUINT64_FORMAT,
demux->offset);
buf = gst_adapter_take_buffer (demux->adapter, demux->neededbytes);
+ gst_buffer_extract (buf, 0, fourcc, 4);
GST_DEBUG_OBJECT (demux, "mdatbuffer starts with %" GST_FOURCC_FORMAT,
- GST_FOURCC_ARGS (QT_FOURCC (GST_BUFFER_DATA (buf) + 4)));
+ GST_FOURCC_ARGS (QT_FOURCC (fourcc)));
if (demux->mdatbuffer)
demux->mdatbuffer = gst_buffer_join (demux->mdatbuffer, buf);
else
static gboolean
qtdemux_sink_activate (GstPad * sinkpad)
{
- if (gst_pad_check_pull_range (sinkpad))
- return gst_pad_activate_pull (sinkpad, TRUE);
- else
+ GstQuery *query;
+ gboolean pull_mode;
+
+ query = gst_query_new_scheduling ();
+
+ if (!gst_pad_peer_query (sinkpad, query)) {
+ gst_query_unref (query);
+ goto activate_push;
+ }
+
+ gst_query_parse_scheduling (query, &pull_mode, NULL, NULL, NULL, NULL, NULL);
+ gst_query_unref (query);
+
+ if (!pull_mode)
+ goto activate_push;
+
+ GST_DEBUG_OBJECT (sinkpad, "activating pull");
+ return gst_pad_activate_pull (sinkpad, TRUE);
+
+activate_push:
+ {
+ GST_DEBUG_OBJECT (sinkpad, "activating push");
return gst_pad_activate_push (sinkpad, TRUE);
+ }
}
static gboolean
if (active) {
demux->pullbased = TRUE;
- demux->segment_running = TRUE;
return gst_pad_start_task (sinkpad, (GstTaskFunction) gst_qtdemux_loop,
sinkpad);
} else {
- demux->segment_running = FALSE;
return gst_pad_stop_task (sinkpad);
}
}
switch (type) {
case FOURCC_tCtH:
buffer = gst_buffer_new_and_alloc (size);
- memcpy (GST_BUFFER_DATA (buffer), buf, size);
+ _gst_buffer_copy_into_mem (buffer, buf, 0, size);
stream->buffers = g_slist_append (stream->buffers, buffer);
GST_LOG_OBJECT (qtdemux, "parsing theora header");
break;
case FOURCC_tCt_:
buffer = gst_buffer_new_and_alloc (size);
- memcpy (GST_BUFFER_DATA (buffer), buf, size);
+ _gst_buffer_copy_into_mem (buffer, buf, 0, size);
stream->buffers = g_slist_append (stream->buffers, buffer);
GST_LOG_OBJECT (qtdemux, "parsing theora comment");
break;
case FOURCC_tCtC:
buffer = gst_buffer_new_and_alloc (size);
- memcpy (GST_BUFFER_DATA (buffer), buf, size);
+ _gst_buffer_copy_into_mem (buffer, buf, 0, size);
stream->buffers = g_slist_append (stream->buffers, buffer);
GST_LOG_OBJECT (qtdemux, "parsing theora codebook");
break;
{
/* consistent default for push based mode */
gst_segment_init (&stream->segment, GST_FORMAT_TIME);
- gst_segment_set_newsegment (&stream->segment, FALSE, 1.0, GST_FORMAT_TIME,
- 0, GST_CLOCK_TIME_NONE, 0);
if (stream->subtype == FOURCC_vide) {
gchar *name = g_strdup_printf ("video_%02d", qtdemux->n_video_streams);
/* make sure it's not writable. We leave MALLOCDATA to NULL so that we
* don't free any of the buffer data. */
- palette = gst_buffer_new ();
- GST_BUFFER_FLAG_SET (palette, GST_BUFFER_FLAG_READONLY);
- GST_BUFFER_DATA (palette) = (guint8 *) palette_data;
- GST_BUFFER_SIZE (palette) = sizeof (guint32) * palette_count;
+ palette = _gst_buffer_new_wrapped ((gpointer) palette_data,
+ palette_count, NULL);
gst_caps_set_simple (stream->caps, "palette_data",
GST_TYPE_BUFFER, palette, NULL);
if (stream->pending_tags)
gst_tag_list_free (stream->pending_tags);
stream->pending_tags = list;
- if (list) {
- /* post now, send event on pad later */
- GST_DEBUG_OBJECT (qtdemux, "Posting tags %" GST_PTR_FORMAT, list);
- gst_element_post_message (GST_ELEMENT (qtdemux),
- gst_message_new_tag_full (GST_OBJECT (qtdemux), stream->pad,
- gst_tag_list_copy (list)));
- }
/* global tags go on each pad anyway */
stream->send_global_tags = TRUE;
}
G_GUINT64_FORMAT, GST_FOURCC_ARGS (fourcc), *offset);
while (TRUE) {
+ guint8 *bdata;
+ gsize bsize;
+
ret = gst_pad_pull_range (qtdemux->sinkpad, *offset, 16, &buf);
if (G_UNLIKELY (ret != GST_FLOW_OK))
goto locate_failed;
- if (G_LIKELY (GST_BUFFER_SIZE (buf) != 16)) {
+ if (G_UNLIKELY (gst_buffer_get_size (buf) != 16)) {
/* likely EOF */
ret = GST_FLOW_UNEXPECTED;
gst_buffer_unref (buf);
goto locate_failed;
}
- extract_initial_length_and_fourcc (GST_BUFFER_DATA (buf), 16, length,
- &lfourcc);
+ bdata = gst_buffer_map (buf, &bsize, NULL, GST_MAP_READ);
+ extract_initial_length_and_fourcc (bdata, 16, length, &lfourcc);
+ gst_buffer_unmap (buf, bdata, bsize);
gst_buffer_unref (buf);
if (G_UNLIKELY (*length == 0)) {
guint64 length, offset;
GstBuffer *buf = NULL;
GstFlowReturn ret = GST_FLOW_OK;
- GstFlowReturn res = TRUE;
+ GstFlowReturn res = GST_FLOW_OK;
+ guint8 *bdata;
+ gsize bsize;
offset = qtdemux->moof_offset;
GST_DEBUG_OBJECT (qtdemux, "next moof at offset %" G_GUINT64_FORMAT, offset);
ret = gst_qtdemux_pull_atom (qtdemux, offset, length, &buf);
if (G_UNLIKELY (ret != GST_FLOW_OK))
goto flow_failed;
- if (!qtdemux_parse_moof (qtdemux, GST_BUFFER_DATA (buf),
- GST_BUFFER_SIZE (buf), offset, NULL)) {
+ bdata = gst_buffer_map (buf, &bsize, NULL, GST_MAP_READ);
+ if (!qtdemux_parse_moof (qtdemux, bdata, bsize, offset, NULL)) {
+ gst_buffer_unmap (buf, bdata, bsize);
gst_buffer_unref (buf);
buf = NULL;
goto parse_failed;
}
+ gst_buffer_unmap (buf, bdata, bsize);
gst_buffer_unref (buf);
buf = NULL;
/* sync sample atom */
stream->stps_present = FALSE;
if ((stream->stss_present =
- ! !qtdemux_tree_get_child_by_type_full (stbl, FOURCC_stss,
+ !!qtdemux_tree_get_child_by_type_full (stbl, FOURCC_stss,
&stream->stss) ? TRUE : FALSE) == TRUE) {
/* copy atom data into a new buffer for later use */
stream->stss.data = g_memdup (stream->stss.data, stream->stss.size);
/* partial sync sample atom */
if ((stream->stps_present =
- ! !qtdemux_tree_get_child_by_type_full (stbl, FOURCC_stps,
+ !!qtdemux_tree_get_child_by_type_full (stbl, FOURCC_stps,
&stream->stps) ? TRUE : FALSE) == TRUE) {
/* copy atom data into a new buffer for later use */
stream->stps.data = g_memdup (stream->stps.data, stream->stps.size);
/* composition time-to-sample */
if ((stream->ctts_present =
- ! !qtdemux_tree_get_child_by_type_full (stbl, FOURCC_ctts,
+ !!qtdemux_tree_get_child_by_type_full (stbl, FOURCC_ctts,
&stream->ctts) ? TRUE : FALSE) == TRUE) {
/* copy atom data into a new buffer for later use */
stream->ctts.data = g_memdup (stream->ctts.data, stream->ctts.size);
seqh_size = QT_UINT32 (data + 4);
if (seqh_size > 0) {
_seqh = gst_buffer_new_and_alloc (seqh_size);
- memcpy (GST_BUFFER_DATA (_seqh), data + 8, seqh_size);
+ _gst_buffer_copy_into_mem (_seqh, data + 8, 0, seqh_size);
}
}
}
static const guint wb_bitrates[] = {
6600, 8850, 12650, 14250, 15850, 18250, 19850, 23050, 23850
};
- const guint8 *data = GST_BUFFER_DATA (buf);
- guint size = QT_UINT32 (data), max_mode;
+ guint8 *data;
+ gsize size, max_mode;
guint16 mode_set;
- if (GST_BUFFER_SIZE (buf) != 0x11) {
+ data = gst_buffer_map (buf, &size, NULL, GST_MAP_READ);
+
+ if (size != 0x11) {
GST_DEBUG ("Atom should have size 0x11, not %u", size);
goto bad_data;
}
goto bad_data;
}
+ gst_buffer_unmap (buf, data, size);
return wb ? wb_bitrates[max_mode] : nb_bitrates[max_mode];
bad_data:
+ gst_buffer_unmap (buf, data, size);
return 0;
}
avc_data + 8 + 1, size - 1);
buf = gst_buffer_new_and_alloc (size);
- memcpy (GST_BUFFER_DATA (buf), avc_data + 0x8, size);
+ _gst_buffer_copy_into_mem (buf, avc_data + 0x8, 0, size);
gst_caps_set_simple (stream->caps,
"codec_data", GST_TYPE_BUFFER, buf, NULL);
gst_buffer_unref (buf);
if (len > 0x8) {
len -= 0x8;
buf = gst_buffer_new_and_alloc (len);
- memcpy (GST_BUFFER_DATA (buf), data + 8, len);
+ _gst_buffer_copy_into_mem (buf, data + 8, 0, len);
gst_caps_set_simple (stream->caps,
"codec_data", GST_TYPE_BUFFER, buf, NULL);
gst_buffer_unref (buf);
if (len > 0x8) {
len -= 0x8;
buf = gst_buffer_new_and_alloc (len);
- memcpy (GST_BUFFER_DATA (buf), data + 8, len);
+ _gst_buffer_copy_into_mem (buf, data + 8, 0, len);
gst_caps_set_simple (stream->caps,
"codec_data", GST_TYPE_BUFFER, buf, NULL);
gst_buffer_unref (buf);
GST_DEBUG_OBJECT (qtdemux, "found codec_data in stsd");
buf = gst_buffer_new_and_alloc (len);
- memcpy (GST_BUFFER_DATA (buf), stsd_data, len);
+ _gst_buffer_copy_into_mem (buf, stsd_data, 0, len);
gst_caps_set_simple (stream->caps,
"codec_data", GST_TYPE_BUFFER, buf, NULL);
gst_buffer_unref (buf);
case FOURCC_ovc1:
{
GNode *ovc1;
- gchar *ovc1_data;
+ guint8 *ovc1_data;
guint ovc1_len;
GstBuffer *buf;
break;
}
buf = gst_buffer_new_and_alloc (ovc1_len - 198);
- memcpy (GST_BUFFER_DATA (buf), ovc1_data + 198, ovc1_len - 198);
+ _gst_buffer_copy_into_mem (buf, ovc1_data + 198, 0, ovc1_len - 198);
gst_caps_set_simple (stream->caps,
"codec_data", GST_TYPE_BUFFER, buf, NULL);
gst_buffer_unref (buf);
case FOURCC_owma:
{
GNode *owma;
- const gchar *owma_data, *codec_name = NULL;
+ const guint8 *owma_data;
+ const gchar *codec_name = NULL;
guint owma_len;
GstBuffer *buf;
gint version = 1;
}
wfex = (WAVEFORMATEX *) (owma_data + 36);
buf = gst_buffer_new_and_alloc (owma_len - 54);
- memcpy (GST_BUFFER_DATA (buf), owma_data + 54, owma_len - 54);
+ _gst_buffer_copy_into_mem (buf, owma_data + 54, 0, owma_len - 54);
if (wfex->wFormatTag == 0x0161) {
codec_name = "Windows Media Audio";
version = 2;
waveheader += 8;
headerlen -= 8;
- headerbuf = gst_buffer_new ();
- GST_BUFFER_DATA (headerbuf) = (guint8 *) waveheader;
- GST_BUFFER_SIZE (headerbuf) = headerlen;
+ headerbuf = gst_buffer_new_and_alloc (headerlen);
+ _gst_buffer_copy_into_mem (headerbuf, waveheader, 0, headerlen);
if (gst_riff_parse_strf_auds (GST_ELEMENT_CAST (qtdemux),
headerbuf, &header, &extra)) {
if (len > 0x4C) {
GstBuffer *buf = gst_buffer_new_and_alloc (len - 0x4C);
- memcpy (GST_BUFFER_DATA (buf), stsd_data + 0x4C, len - 0x4C);
+ _gst_buffer_copy_into_mem (buf, stsd_data + 0x4C, 0, len - 0x4C);
gst_caps_set_simple (stream->caps,
"codec_data", GST_TYPE_BUFFER, buf, NULL);
gst_buffer_unref (buf);
/* codec-data contains alac atom size and prefix,
* ffmpeg likes it that way, not quite gst-ish though ...*/
buf = gst_buffer_new_and_alloc (len);
- memcpy (GST_BUFFER_DATA (buf), alac->data, len);
+ _gst_buffer_copy_into_mem (buf, alac->data, 0, len);
gst_caps_set_simple (stream->caps,
"codec_data", GST_TYPE_BUFFER, buf, NULL);
gst_buffer_unref (buf);
GstBuffer *buf = gst_buffer_new_and_alloc (len - 0x34);
guint bitrate;
- memcpy (GST_BUFFER_DATA (buf), stsd_data + 0x34, len - 0x34);
+ _gst_buffer_copy_into_mem (buf, stsd_data + 0x34, 0, len - 0x34);
/* If we have enough data, let's try to get the 'damr' atom. See
* the 3GPP container spec (26.244) for more details. */
return ((qtdemux->major_brand & GST_MAKE_FOURCC (255, 255, 0, 0)) ==
GST_MAKE_FOURCC ('3', 'g', 0, 0));
} else if (qtdemux->comp_brands != NULL) {
- guint8 *data = GST_BUFFER_DATA (qtdemux->comp_brands);
- guint size = GST_BUFFER_SIZE (qtdemux->comp_brands);
+ guint8 *data;
+ gsize size;
gboolean res = FALSE;
+ data = gst_buffer_map (qtdemux->comp_brands, &size, NULL, GST_MAP_READ);
while (size >= 4) {
res = res || ((QT_FOURCC (data) & GST_MAKE_FOURCC (255, 255, 0, 0)) ==
GST_MAKE_FOURCC ('3', 'g', 0, 0));
data += 4;
size -= 4;
}
+ gst_buffer_unmap (qtdemux->comp_brands, data, size);
return res;
} else {
return FALSE;
data = node->data;
len = QT_UINT32 (data);
buf = gst_buffer_new_and_alloc (len);
- memcpy (GST_BUFFER_DATA (buf), data, len);
+ _gst_buffer_copy_into_mem (buf, data, 0, len);
/* heuristic to determine style of tag */
if (QT_FOURCC (data + 4) == FOURCC_____ ||
GST_DEBUG_OBJECT (demux, "media type %s", media_type);
caps = gst_caps_new_simple (media_type, "style", G_TYPE_STRING, style, NULL);
- gst_buffer_set_caps (buf, caps);
+ // TODO conver to metadata or ???
+// gst_buffer_set_caps (buf, caps);
gst_caps_unref (caps);
g_free (media_type);
GST_DEBUG_OBJECT (demux, "adding private tag; size %d, caps %" GST_PTR_FORMAT,
- GST_BUFFER_SIZE (buf), caps);
+ len, caps);
gst_tag_list_add (demux->tag_list, GST_TAG_MERGE_APPEND,
GST_QT_DEMUX_PRIVATE_TAG, buf, NULL);
GstBuffer *buf;
GstTagList *taglist;
- buf = gst_buffer_new ();
- GST_BUFFER_DATA (buf) = ((guint8 *) xmp_->data) + 8;
- GST_BUFFER_SIZE (buf) = QT_UINT32 ((guint8 *) xmp_->data) - 8;
-
+ buf = _gst_buffer_new_wrapped (((guint8 *) xmp_->data) + 8,
+ QT_UINT32 ((guint8 *) xmp_->data) - 8, NULL);
taglist = gst_tag_list_from_xmp_buffer (buf);
gst_buffer_unref (buf);
/* set duration in the segment info */
gst_qtdemux_get_duration (qtdemux, &duration);
if (duration)
- gst_segment_set_duration (&qtdemux->segment, GST_FORMAT_TIME, duration);
+ qtdemux->segment.duration = duration;
/* parse all traks */
trak = qtdemux_tree_get_child_by_type (qtdemux->moov_node, FOURCC_trak);
GstBuffer *buffer;
buffer = gst_buffer_new_and_alloc (data_len);
- memcpy (GST_BUFFER_DATA (buffer), data_ptr, data_len);
+ _gst_buffer_copy_into_mem (buffer, data_ptr, 0, data_len);
GST_DEBUG_OBJECT (qtdemux, "setting codec_data from esds");
GST_MEMDUMP_OBJECT (qtdemux, "codec_data from esds", data_ptr, data_len);
/* configured playback region */
GstSegment segment;
- gboolean segment_running;
GstEvent *pending_newsegment;
/* gst index support */
PROP_PEAK_FALLOFF
};
-GST_BOILERPLATE (GstLevel, gst_level, GstBaseTransform,
- GST_TYPE_BASE_TRANSFORM);
+#define gst_level_parent_class parent_class
+G_DEFINE_TYPE (GstLevel, gst_level, GST_TYPE_BASE_TRANSFORM);
static void gst_level_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void
-gst_level_base_init (gpointer g_class)
-{
- GstElementClass *element_class = g_class;
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&sink_template_factory));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&src_template_factory));
- gst_element_class_set_details_simple (element_class, "Level",
- "Filter/Analyzer/Audio",
- "RMS/Peak/Decaying Peak Level messager for audio/raw",
- "Thomas Vander Stichele <thomas at apestaart dot org>");
-}
-
-static void
gst_level_class_init (GstLevelClass * klass)
{
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
+ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
GstBaseTransformClass *trans_class = GST_BASE_TRANSFORM_CLASS (klass);
gobject_class->set_property = gst_level_set_property;
GST_DEBUG_CATEGORY_INIT (level_debug, "level", 0, "Level calculation");
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&sink_template_factory));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&src_template_factory));
+ gst_element_class_set_details_simple (element_class, "Level",
+ "Filter/Analyzer/Audio",
+ "RMS/Peak/Decaying Peak Level messager for audio/raw",
+ "Thomas Vander Stichele <thomas at apestaart dot org>");
+
trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_level_set_caps);
trans_class->start = GST_DEBUG_FUNCPTR (gst_level_start);
trans_class->transform_ip = GST_DEBUG_FUNCPTR (gst_level_transform_ip);
}
static void
-gst_level_init (GstLevel * filter, GstLevelClass * g_class)
+gst_level_init (GstLevel * filter)
{
filter->CS = NULL;
filter->peak = NULL;
gst_level_transform_ip (GstBaseTransform * trans, GstBuffer * in)
{
GstLevel *filter;
- guint8 *in_data;
+ guint8 *in_data, *data;
+ gsize in_size;
gdouble CS;
guint i;
guint num_frames = 0;
filter = GST_LEVEL (trans);
- in_data = GST_BUFFER_DATA (in);
- num_int_samples = GST_BUFFER_SIZE (in) / (filter->width / 8);
+ in_data = data = gst_buffer_map (in, &in_size, NULL, GST_MAP_READ);
+ num_int_samples = in_size / (filter->width / 8);
GST_LOG_OBJECT (filter, "analyzing %u sample frames at ts %" GST_TIME_FORMAT,
num_int_samples, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (in)));
filter->num_frames = 0;
}
+ gst_buffer_unmap (in, data, in_size);
+
return GST_FLOW_OK;
}
entry->pos + demux->common.ebml_segment_start);
}
- flush = ! !(flags & GST_SEEK_FLAG_FLUSH);
- keyunit = ! !(flags & GST_SEEK_FLAG_KEY_UNIT);
+ flush = !!(flags & GST_SEEK_FLAG_FLUSH);
+ keyunit = !!(flags & GST_SEEK_FLAG_KEY_UNIT);
if (flush) {
GST_DEBUG_OBJECT (demux, "Starting flush");
return multi_file_sync_next_type;
}
-GST_BOILERPLATE (GstMultiFileSink, gst_multi_file_sink, GstBaseSink,
- GST_TYPE_BASE_SINK);
-
-static void
-gst_multi_file_sink_base_init (gpointer g_class)
-{
- GstElementClass *gstelement_class = GST_ELEMENT_CLASS (g_class);
-
- GST_DEBUG_CATEGORY_INIT (gst_multi_file_sink_debug, "multifilesink", 0,
- "multifilesink element");
-
- gst_element_class_add_pad_template (gstelement_class,
- gst_static_pad_template_get (&sinktemplate));
- gst_element_class_set_details_simple (gstelement_class, "Multi-File Sink",
- "Sink/File",
- "Write buffers to a sequentially named set of files",
- "David Schleef <ds@schleef.org>");
-}
+#define gst_multi_file_sink_parent_class parent_class
+G_DEFINE_TYPE (GstMultiFileSink, gst_multi_file_sink, GST_TYPE_BASE_SINK);
static void
gst_multi_file_sink_class_init (GstMultiFileSinkClass * klass)
{
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
+ GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
GstBaseSinkClass *gstbasesink_class = GST_BASE_SINK_CLASS (klass);
gobject_class->set_property = gst_multi_file_sink_set_property;
gstbasesink_class->render = GST_DEBUG_FUNCPTR (gst_multi_file_sink_render);
gstbasesink_class->set_caps =
GST_DEBUG_FUNCPTR (gst_multi_file_sink_set_caps);
+
+ GST_DEBUG_CATEGORY_INIT (gst_multi_file_sink_debug, "multifilesink", 0,
+ "multifilesink element");
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&sinktemplate));
+ gst_element_class_set_details_simple (gstelement_class, "Multi-File Sink",
+ "Sink/File",
+ "Write buffers to a sequentially named set of files",
+ "David Schleef <ds@schleef.org>");
}
static void
-gst_multi_file_sink_init (GstMultiFileSink * multifilesink,
- GstMultiFileSinkClass * g_class)
+gst_multi_file_sink_init (GstMultiFileSink * multifilesink)
{
multifilesink->filename = g_strdup (DEFAULT_LOCATION);
multifilesink->index = DEFAULT_INDEX;
gst_multi_file_sink_render (GstBaseSink * sink, GstBuffer * buffer)
{
GstMultiFileSink *multifilesink;
- guint size;
+ gsize size;
guint8 *data;
gchar *filename;
gboolean ret;
GError *error = NULL;
- size = GST_BUFFER_SIZE (buffer);
- data = GST_BUFFER_DATA (buffer);
+ data = gst_buffer_map (buffer, &size, NULL, GST_MAP_READ);
multifilesink = GST_MULTI_FILE_SINK (sink);
goto stdio_write_error;
}
- ret = fwrite (GST_BUFFER_DATA (buffer), GST_BUFFER_SIZE (buffer), 1,
- multifilesink->file);
+ ret = fwrite (data, size, 1, multifilesink->file);
if (ret != 1)
goto stdio_write_error;
if (multifilesink->streamheaders) {
for (i = 0; i < multifilesink->n_streamheaders; i++) {
- ret = fwrite (GST_BUFFER_DATA (multifilesink->streamheaders[i]),
- GST_BUFFER_SIZE (multifilesink->streamheaders[i]), 1,
- multifilesink->file);
+ guint8 *sdata;
+ gsize ssize;
+
+ sdata = gst_buffer_map (multifilesink->streamheaders[i], &ssize,
+ NULL, GST_MAP_READ);
+ ret = fwrite (data, ssize, 1, multifilesink->file);
+ gst_buffer_unmap (multifilesink->streamheaders[i], sdata, ssize);
if (ret != 1)
goto stdio_write_error;
}
}
}
- ret = fwrite (GST_BUFFER_DATA (buffer), GST_BUFFER_SIZE (buffer), 1,
- multifilesink->file);
+ ret = fwrite (data, size, 1, multifilesink->file);
if (ret != 1)
goto stdio_write_error;
g_assert_not_reached ();
}
+ gst_buffer_unmap (buffer, data, size);
return GST_FLOW_OK;
/* ERRORS */
g_error_free (error);
g_free (filename);
+ gst_buffer_unmap (buffer, data, size);
return GST_FLOW_ERROR;
}
stdio_write_error:
- GST_ELEMENT_ERROR (multifilesink, RESOURCE, WRITE,
- ("Error while writing to file."), (NULL));
- return GST_FLOW_ERROR;
+ {
+ GST_ELEMENT_ERROR (multifilesink, RESOURCE, WRITE,
+ ("Error while writing to file."), (NULL));
+ gst_buffer_unmap (buffer, data, size);
+ return GST_FLOW_ERROR;
+ }
}
static gboolean
const GValue * value, GParamSpec * pspec);
static void gst_multi_file_src_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
-static GstCaps *gst_multi_file_src_getcaps (GstBaseSrc * src);
+static GstCaps *gst_multi_file_src_getcaps (GstBaseSrc * src, GstCaps * filter);
static gboolean gst_multi_file_src_query (GstBaseSrc * src, GstQuery * query);
#define DEFAULT_LOCATION "%05d"
#define DEFAULT_INDEX 0
+#define gst_multi_file_src_parent_class parent_class
+G_DEFINE_TYPE (GstMultiFileSrc, gst_multi_file_src, GST_TYPE_PUSH_SRC);
-GST_BOILERPLATE (GstMultiFileSrc, gst_multi_file_src, GstPushSrc,
- GST_TYPE_PUSH_SRC);
-
-static void
-gst_multi_file_src_base_init (gpointer g_class)
-{
- GstElementClass *gstelement_class = GST_ELEMENT_CLASS (g_class);
-
- GST_DEBUG_CATEGORY_INIT (gst_multi_file_src_debug, "multifilesrc", 0,
- "multifilesrc element");
-
- gst_element_class_add_pad_template (gstelement_class,
- gst_static_pad_template_get (&gst_multi_file_src_pad_template));
- gst_element_class_set_details_simple (gstelement_class, "Multi-File Source",
- "Source/File",
- "Read a sequentially named set of files into buffers",
- "David Schleef <ds@schleef.org>");
-}
static void
gst_multi_file_src_class_init (GstMultiFileSrcClass * klass)
{
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
+ GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
GstPushSrcClass *gstpushsrc_class = GST_PUSH_SRC_CLASS (klass);
GstBaseSrcClass *gstbasesrc_class = GST_BASE_SRC_CLASS (klass);
GST_LOG ("No large file support, sizeof (off_t) = %" G_GSIZE_FORMAT,
sizeof (off_t));
}
+
+ GST_DEBUG_CATEGORY_INIT (gst_multi_file_src_debug, "multifilesrc", 0,
+ "multifilesrc element");
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_multi_file_src_pad_template));
+ gst_element_class_set_details_simple (gstelement_class, "Multi-File Source",
+ "Source/File",
+ "Read a sequentially named set of files into buffers",
+ "David Schleef <ds@schleef.org>");
}
static void
-gst_multi_file_src_init (GstMultiFileSrc * multifilesrc,
- GstMultiFileSrcClass * g_class)
+gst_multi_file_src_init (GstMultiFileSrc * multifilesrc)
{
multifilesrc->index = DEFAULT_INDEX;
multifilesrc->filename = g_strdup (DEFAULT_LOCATION);
}
static GstCaps *
-gst_multi_file_src_getcaps (GstBaseSrc * src)
+gst_multi_file_src_getcaps (GstBaseSrc * src, GstCaps * filter)
{
GstMultiFileSrc *multi_file_src = GST_MULTI_FILE_SRC (src);
GST_DEBUG_OBJECT (src, "returning %" GST_PTR_FORMAT, multi_file_src->caps);
if (multi_file_src->caps) {
- return gst_caps_ref (multi_file_src->caps);
+ if (filter)
+ return gst_caps_intersect_full (filter, multi_file_src->caps,
+ GST_CAPS_INTERSECT_FIRST);
+ else
+ return gst_caps_ref (multi_file_src->caps);
} else {
- return gst_caps_new_any ();
+ if (filter)
+ return gst_caps_ref (filter);
+ else
+ return gst_caps_new_any ();
}
}
multifilesrc->index++;
buf = gst_buffer_new ();
- GST_BUFFER_DATA (buf) = (unsigned char *) data;
- GST_BUFFER_MALLOCDATA (buf) = GST_BUFFER_DATA (buf);
- GST_BUFFER_SIZE (buf) = size;
+ gst_buffer_take_memory (buf, -1,
+ gst_memory_new_wrapped (0, data, g_free, size, 0, size));
GST_BUFFER_OFFSET (buf) = multifilesrc->offset;
GST_BUFFER_OFFSET_END (buf) = multifilesrc->offset + size;
multifilesrc->offset += size;
- gst_buffer_set_caps (buf, multifilesrc->caps);
GST_DEBUG_OBJECT (multifilesrc, "read file \"%s\".", filename);
libgstreplaygain_la_CFLAGS = \
$(GST_PLUGINS_BASE_CFLAGS) $(GST_BASE_CFLAGS) $(GST_CFLAGS)
libgstreplaygain_la_LIBADD = \
- $(GST_PLUGINS_BASE_LIBS) -lgstpbutils-0.10 \
+ $(GST_PLUGINS_BASE_LIBS) -lgstpbutils-$(GST_MAJORMINOR) \
$(GST_BASE_LIBS) $(GST_LIBS) $(LIBM)
libgstreplaygain_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
libgstreplaygain_la_LIBTOOLFLAGS = --tag=disable-static
"signed = (boolean) true, " "endianness = (int) BYTE_ORDER, "
REPLAY_GAIN_CAPS));
-GST_BOILERPLATE (GstRgAnalysis, gst_rg_analysis, GstBaseTransform,
- GST_TYPE_BASE_TRANSFORM);
+#define gst_rg_analysis_parent_class parent_class
+G_DEFINE_TYPE (GstRgAnalysis, gst_rg_analysis, GST_TYPE_BASE_TRANSFORM);
static void gst_rg_analysis_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
GstTagList ** tag_list);
static void
-gst_rg_analysis_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&src_factory));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&sink_factory));
- gst_element_class_set_details_simple (element_class, "ReplayGain analysis",
- "Filter/Analyzer/Audio",
- "Perform the ReplayGain analysis",
- "Ren\xc3\xa9 Stadler <mail@renestadler.de>");
-
- GST_DEBUG_CATEGORY_INIT (gst_rg_analysis_debug, "rganalysis", 0,
- "ReplayGain analysis element");
-}
-
-static void
gst_rg_analysis_class_init (GstRgAnalysisClass * klass)
{
GObjectClass *gobject_class;
+ GstElementClass *element_class;
GstBaseTransformClass *trans_class;
gobject_class = (GObjectClass *) klass;
+ element_class = (GstElementClass *) klass;
+
gobject_class->set_property = gst_rg_analysis_set_property;
gobject_class->get_property = gst_rg_analysis_get_property;
trans_class->event = GST_DEBUG_FUNCPTR (gst_rg_analysis_event);
trans_class->stop = GST_DEBUG_FUNCPTR (gst_rg_analysis_stop);
trans_class->passthrough_on_same_caps = TRUE;
+
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&src_factory));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&sink_factory));
+ gst_element_class_set_details_simple (element_class, "ReplayGain analysis",
+ "Filter/Analyzer/Audio",
+ "Perform the ReplayGain analysis",
+ "Ren\xc3\xa9 Stadler <mail@renestadler.de>");
+
+ GST_DEBUG_CATEGORY_INIT (gst_rg_analysis_debug, "rganalysis", 0,
+ "ReplayGain analysis element");
}
static void
-gst_rg_analysis_init (GstRgAnalysis * filter, GstRgAnalysisClass * gclass)
+gst_rg_analysis_init (GstRgAnalysis * filter)
{
GstBaseTransform *base = GST_BASE_TRANSFORM (filter);
gst_rg_analysis_transform_ip (GstBaseTransform * base, GstBuffer * buf)
{
GstRgAnalysis *filter = GST_RG_ANALYSIS (base);
+ guint8 *data;
+ gsize size;
g_return_val_if_fail (filter->ctx != NULL, GST_FLOW_WRONG_STATE);
g_return_val_if_fail (filter->analyze != NULL, GST_FLOW_NOT_NEGOTIATED);
if (filter->skip)
return GST_FLOW_OK;
- GST_LOG_OBJECT (filter, "processing buffer of size %u",
- GST_BUFFER_SIZE (buf));
+ data = gst_buffer_map (buf, &size, NULL, GST_MAP_READ);
+ GST_LOG_OBJECT (filter, "processing buffer of size %" G_GSIZE_FORMAT, size);
rg_analysis_start_buffer (filter->ctx, GST_BUFFER_TIMESTAMP (buf));
- filter->analyze (filter->ctx, GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (buf),
- filter->depth);
+ filter->analyze (filter->ctx, data, size, filter->depth);
+
+ gst_buffer_unmap (buf, data, size);
return GST_FLOW_OK;
}
"width = (int) 32, channels = (int) [1, MAX], "
"rate = (int) [1, MAX], endianness = (int) BYTE_ORDER"));
-GST_BOILERPLATE (GstRgLimiter, gst_rg_limiter, GstBaseTransform,
- GST_TYPE_BASE_TRANSFORM);
+#define gst_rg_limiter_parent_class parent_class
+G_DEFINE_TYPE (GstRgLimiter, gst_rg_limiter, GST_TYPE_BASE_TRANSFORM);
static void gst_rg_limiter_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
GstBuffer * buf);
static void
-gst_rg_limiter_base_init (gpointer g_class)
-{
- GstElementClass *element_class = g_class;
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&src_factory));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&sink_factory));
- gst_element_class_set_details_simple (element_class, "ReplayGain limiter",
- "Filter/Effect/Audio",
- "Apply signal compression to raw audio data",
- "Ren\xc3\xa9 Stadler <mail@renestadler.de>");
-
- GST_DEBUG_CATEGORY_INIT (gst_rg_limiter_debug, "rglimiter", 0,
- "ReplayGain limiter element");
-}
-
-static void
gst_rg_limiter_class_init (GstRgLimiterClass * klass)
{
GObjectClass *gobject_class;
+ GstElementClass *element_class;
GstBaseTransformClass *trans_class;
gobject_class = (GObjectClass *) klass;
+ element_class = (GstElementClass *) klass;
gobject_class->set_property = gst_rg_limiter_set_property;
gobject_class->get_property = gst_rg_limiter_get_property;
trans_class = GST_BASE_TRANSFORM_CLASS (klass);
trans_class->transform_ip = GST_DEBUG_FUNCPTR (gst_rg_limiter_transform_ip);
trans_class->passthrough_on_same_caps = FALSE;
+
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&src_factory));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&sink_factory));
+ gst_element_class_set_details_simple (element_class, "ReplayGain limiter",
+ "Filter/Effect/Audio",
+ "Apply signal compression to raw audio data",
+ "Ren\xc3\xa9 Stadler <mail@renestadler.de>");
+
+ GST_DEBUG_CATEGORY_INIT (gst_rg_limiter_debug, "rglimiter", 0,
+ "ReplayGain limiter element");
}
static void
-gst_rg_limiter_init (GstRgLimiter * filter, GstRgLimiterClass * gclass)
+gst_rg_limiter_init (GstRgLimiter * filter)
{
GstBaseTransform *base = GST_BASE_TRANSFORM (filter);
{
GstRgLimiter *filter = GST_RG_LIMITER (base);
gfloat *input;
+ guint8 *data;
guint count;
guint i;
if (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_GAP))
return GST_FLOW_OK;
- input = (gfloat *) GST_BUFFER_DATA (buf);
- count = GST_BUFFER_SIZE (buf) / sizeof (gfloat);
+ data = gst_buffer_map (buf, NULL, NULL, GST_MAP_READ);
+ input = (gfloat *) data;
+ count = gst_buffer_get_size (buf) / sizeof (gfloat);
for (i = count; i--;) {
if (*input > THRES)
input++;
}
+ gst_buffer_unmap (buf, data, -1);
+
return GST_FLOW_OK;
}
"endianness = (int) BYTE_ORDER, "
"width = (int) 16, " "depth = (int) 16, " "signed = (bool) TRUE"));
-GST_BOILERPLATE (GstRgVolume, gst_rg_volume, GstBin, GST_TYPE_BIN);
+#define gst_rg_volume_parent_class parent_class
+G_DEFINE_TYPE (GstRgVolume, gst_rg_volume, GST_TYPE_BIN);
static void gst_rg_volume_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
gdouble * target_gain, gdouble * result_gain);
static void
-gst_rg_volume_base_init (gpointer g_class)
-{
- GstElementClass *element_class = g_class;
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&src_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&sink_template));
- gst_element_class_set_details_simple (element_class, "ReplayGain volume",
- "Filter/Effect/Audio",
- "Apply ReplayGain volume adjustment",
- "Ren\xc3\xa9 Stadler <mail@renestadler.de>");
-
- GST_DEBUG_CATEGORY_INIT (gst_rg_volume_debug, "rgvolume", 0,
- "ReplayGain volume element");
-}
-
-static void
gst_rg_volume_class_init (GstRgVolumeClass * klass)
{
GObjectClass *gobject_class;
* mess with our internals. */
bin_class->add_element = NULL;
bin_class->remove_element = NULL;
+
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&src_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&sink_template));
+ gst_element_class_set_details_simple (element_class, "ReplayGain volume",
+ "Filter/Effect/Audio",
+ "Apply ReplayGain volume adjustment",
+ "Ren\xc3\xa9 Stadler <mail@renestadler.de>");
+
+ GST_DEBUG_CATEGORY_INIT (gst_rg_volume_debug, "rgvolume", 0,
+ "ReplayGain volume element");
}
static void
-gst_rg_volume_init (GstRgVolume * self, GstRgVolumeClass * gclass)
+gst_rg_volume_init (GstRgVolume * self)
{
GObjectClass *volume_class;
GstPad *volume_pad, *ghost_pad;
fnv1hash.c \
gstrtp.c \
gstrtpchannels.c \
- gstrtpdepay.c \
gstrtpac3depay.c \
gstrtpac3pay.c \
gstrtpbvdepay.c \
gstrtpmp4gpay.h \
gstrtpmp4adepay.h \
gstrtpmp4apay.h \
- gstrtpdepay.h \
gstasteriskh263.h \
gstrtpqcelpdepay.h \
gstrtpqdmdepay.h \
#include "config.h"
#endif
-#include "gstrtpdepay.h"
#include "gstrtpac3depay.h"
#include "gstrtpac3pay.h"
#include "gstrtpbvdepay.h"
static gboolean
plugin_init (GstPlugin * plugin)
{
- if (!gst_rtp_depay_plugin_init (plugin))
- return FALSE;
-
if (!gst_rtp_ac3_depay_plugin_init (plugin))
return FALSE;
"encoding-name = (string) \"AC3\"")
);
-GST_BOILERPLATE (GstRtpAC3Depay, gst_rtp_ac3_depay, GstBaseRTPDepayload,
- GST_TYPE_BASE_RTP_DEPAYLOAD);
+G_DEFINE_TYPE (GstRtpAC3Depay, gst_rtp_ac3_depay, GST_TYPE_BASE_RTP_DEPAYLOAD);
static gboolean gst_rtp_ac3_depay_setcaps (GstBaseRTPDepayload * depayload,
GstCaps * caps);
GstBuffer * buf);
static void
-gst_rtp_ac3_depay_base_init (gpointer klass)
+gst_rtp_ac3_depay_class_init (GstRtpAC3DepayClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstElementClass *gstelement_class;
+ GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
+
+ gstelement_class = (GstElementClass *) klass;
+ gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
- gst_element_class_add_pad_template (element_class,
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_ac3_depay_src_template));
- gst_element_class_add_pad_template (element_class,
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_ac3_depay_sink_template));
- gst_element_class_set_details_simple (element_class, "RTP AC3 depayloader",
+ gst_element_class_set_details_simple (gstelement_class, "RTP AC3 depayloader",
"Codec/Depayloader/Network/RTP",
"Extracts AC3 audio from RTP packets (RFC 4184)",
"Wim Taymans <wim.taymans@gmail.com>");
-}
-
-static void
-gst_rtp_ac3_depay_class_init (GstRtpAC3DepayClass * klass)
-{
- GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
-
- gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
gstbasertpdepayload_class->set_caps = gst_rtp_ac3_depay_setcaps;
gstbasertpdepayload_class->process = gst_rtp_ac3_depay_process;
}
static void
-gst_rtp_ac3_depay_init (GstRtpAC3Depay * rtpac3depay,
- GstRtpAC3DepayClass * klass)
+gst_rtp_ac3_depay_init (GstRtpAC3Depay * rtpac3depay)
{
- /* needed because of GST_BOILERPLATE */
+ /* needed because of G_DEFINE_TYPE */
}
static gboolean
{
GstRtpAC3Depay *rtpac3depay;
GstBuffer *outbuf;
+ GstRTPBuffer rtp = { NULL, };
+ guint8 *payload;
+ guint16 FT, NF;
rtpac3depay = GST_RTP_AC3_DEPAY (depayload);
- {
- guint8 *payload;
- guint16 FT, NF;
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
- if (gst_rtp_buffer_get_payload_len (buf) < 2)
- goto empty_packet;
+ if (gst_rtp_buffer_get_payload_len (&rtp) < 2)
+ goto empty_packet;
- payload = gst_rtp_buffer_get_payload (buf);
+ payload = gst_rtp_buffer_get_payload (&rtp);
- /* strip off header
- *
- * 0 1
- * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5
- * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
- * | MBZ | FT| NF |
- * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
- */
- FT = payload[0] & 0x3;
- NF = payload[1];
+ /* strip off header
+ *
+ * 0 1
+ * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | MBZ | FT| NF |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ */
+ FT = payload[0] & 0x3;
+ NF = payload[1];
- GST_DEBUG_OBJECT (rtpac3depay, "FT: %d, NF: %d", FT, NF);
+ GST_DEBUG_OBJECT (rtpac3depay, "FT: %d, NF: %d", FT, NF);
- /* We don't bother with fragmented packets yet */
- outbuf = gst_rtp_buffer_get_payload_subbuffer (buf, 2, -1);
+ /* We don't bother with fragmented packets yet */
+ outbuf = gst_rtp_buffer_get_payload_subbuffer (&rtp, 2, -1);
- if (outbuf)
- GST_DEBUG_OBJECT (rtpac3depay, "pushing buffer of size %d",
- GST_BUFFER_SIZE (outbuf));
+ gst_rtp_buffer_unmap (&rtp);
- return outbuf;
- }
+ if (outbuf)
+ GST_DEBUG_OBJECT (rtpac3depay, "pushing buffer of size %d",
+ gst_buffer_get_size (outbuf));
- return NULL;
+ return outbuf;
/* ERRORS */
empty_packet:
{
GST_ELEMENT_WARNING (rtpac3depay, STREAM, DECODE,
("Empty Payload."), (NULL));
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
}
static gboolean gst_rtp_ac3_pay_setcaps (GstBaseRTPPayload * payload,
GstCaps * caps);
-static gboolean gst_rtp_ac3_pay_handle_event (GstPad * pad, GstEvent * event);
+static gboolean gst_rtp_ac3_pay_handle_event (GstBaseRTPPayload * payload,
+ GstEvent * event);
static GstFlowReturn gst_rtp_ac3_pay_flush (GstRtpAC3Pay * rtpac3pay);
static GstFlowReturn gst_rtp_ac3_pay_handle_buffer (GstBaseRTPPayload * payload,
GstBuffer * buffer);
-GST_BOILERPLATE (GstRtpAC3Pay, gst_rtp_ac3_pay, GstBaseRTPPayload,
- GST_TYPE_BASE_RTP_PAYLOAD)
-
- static void gst_rtp_ac3_pay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_ac3_pay_src_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_ac3_pay_sink_template));
-
- gst_element_class_set_details_simple (element_class,
- "RTP AC3 audio payloader", "Codec/Payloader/Network/RTP",
- "Payload AC3 audio as RTP packets (RFC 4184)",
- "Wim Taymans <wim.taymans@gmail.com>");
-}
+#define gst_rtp_ac3_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpAC3Pay, gst_rtp_ac3_pay, GST_TYPE_BASE_RTP_PAYLOAD);
static void
gst_rtp_ac3_pay_class_init (GstRtpAC3PayClass * klass)
GstElementClass *gstelement_class;
GstBaseRTPPayloadClass *gstbasertppayload_class;
+ GST_DEBUG_CATEGORY_INIT (rtpac3pay_debug, "rtpac3pay", 0,
+ "AC3 Audio RTP Depayloader");
+
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
gstelement_class->change_state = gst_rtp_ac3_pay_change_state;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_ac3_pay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_ac3_pay_sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP AC3 audio payloader", "Codec/Payloader/Network/RTP",
+ "Payload AC3 audio as RTP packets (RFC 4184)",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
gstbasertppayload_class->set_caps = gst_rtp_ac3_pay_setcaps;
gstbasertppayload_class->handle_event = gst_rtp_ac3_pay_handle_event;
gstbasertppayload_class->handle_buffer = gst_rtp_ac3_pay_handle_buffer;
-
- GST_DEBUG_CATEGORY_INIT (rtpac3pay_debug, "rtpac3pay", 0,
- "AC3 Audio RTP Depayloader");
}
static void
-gst_rtp_ac3_pay_init (GstRtpAC3Pay * rtpac3pay, GstRtpAC3PayClass * klass)
+gst_rtp_ac3_pay_init (GstRtpAC3Pay * rtpac3pay)
{
rtpac3pay->adapter = gst_adapter_new ();
}
}
static gboolean
-gst_rtp_ac3_pay_handle_event (GstPad * pad, GstEvent * event)
+gst_rtp_ac3_pay_handle_event (GstBaseRTPPayload * payload, GstEvent * event)
{
+ gboolean res;
GstRtpAC3Pay *rtpac3pay;
- rtpac3pay = GST_RTP_AC3_PAY (gst_pad_get_parent (pad));
+ rtpac3pay = GST_RTP_AC3_PAY (payload);
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_EOS:
break;
}
- gst_object_unref (rtpac3pay);
+ res =
+ GST_BASE_RTP_PAYLOAD_CLASS (parent_class)->handle_event (payload, event);
- /* FALSE to let the parent handle the event as well */
- return FALSE;
+ return res;
}
struct frmsize_s
guint8 *payload;
guint payload_len;
guint packet_len;
+ GstRTPBuffer rtp = { NULL, };
/* this will be the total length of the packet */
packet_len = gst_rtp_buffer_calc_packet_len (2 + avail, 0, 0);
* 3: other fragment
* NF: amount of frames if FT = 0, else number of fragments.
*/
+ gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
GST_LOG_OBJECT (rtpac3pay, "FT %u, NF %u", FT, NF);
- payload = gst_rtp_buffer_get_payload (outbuf);
+ payload = gst_rtp_buffer_get_payload (&rtp);
payload[0] = (FT & 3);
payload[1] = NF;
payload_len -= 2;
avail -= payload_len;
if (avail == 0)
- gst_rtp_buffer_set_marker (outbuf, TRUE);
+ gst_rtp_buffer_set_marker (&rtp, TRUE);
+ gst_rtp_buffer_unmap (&rtp);
GST_BUFFER_TIMESTAMP (outbuf) = rtpac3pay->first_ts;
GST_BUFFER_DURATION (outbuf) = rtpac3pay->duration;
{
GstRtpAC3Pay *rtpac3pay;
GstFlowReturn ret;
- guint size, avail, left, NF;
+ gsize size, avail, left, NF;
guint8 *data, *p;
guint packet_len;
GstClockTime duration, timestamp;
rtpac3pay = GST_RTP_AC3_PAY (basepayload);
- size = GST_BUFFER_SIZE (buffer);
- data = GST_BUFFER_DATA (buffer);
+ data = gst_buffer_map (buffer, &size, NULL, GST_MAP_READ);
duration = GST_BUFFER_DURATION (buffer);
timestamp = GST_BUFFER_TIMESTAMP (buffer);
p += frame_size;
left -= frame_size;
}
+ gst_buffer_unmap (buffer, data, size);
if (NF == 0)
goto no_frames;
static GstBuffer *gst_rtp_amr_depay_process (GstBaseRTPDepayload * depayload,
GstBuffer * buf);
-GST_BOILERPLATE (GstRtpAMRDepay, gst_rtp_amr_depay, GstBaseRTPDepayload,
- GST_TYPE_BASE_RTP_DEPAYLOAD);
+#define gst_rtp_amr_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpAMRDepay, gst_rtp_amr_depay, GST_TYPE_BASE_RTP_DEPAYLOAD);
static void
-gst_rtp_amr_depay_base_init (gpointer klass)
+gst_rtp_amr_depay_class_init (GstRtpAMRDepayClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstElementClass *gstelement_class;
+ GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
+
+ gstelement_class = (GstElementClass *) klass;
+ gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
- gst_element_class_add_pad_template (element_class,
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_amr_depay_src_template));
- gst_element_class_add_pad_template (element_class,
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_amr_depay_sink_template));
- gst_element_class_set_details_simple (element_class, "RTP AMR depayloader",
+ gst_element_class_set_details_simple (gstelement_class, "RTP AMR depayloader",
"Codec/Depayloader/Network/RTP",
"Extracts AMR or AMR-WB audio from RTP packets (RFC 3267)",
"Wim Taymans <wim.taymans@gmail.com>");
-}
-
-static void
-gst_rtp_amr_depay_class_init (GstRtpAMRDepayClass * klass)
-{
- GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
-
- gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
gstbasertpdepayload_class->process = gst_rtp_amr_depay_process;
gstbasertpdepayload_class->set_caps = gst_rtp_amr_depay_setcaps;
}
static void
-gst_rtp_amr_depay_init (GstRtpAMRDepay * rtpamrdepay,
- GstRtpAMRDepayClass * klass)
+gst_rtp_amr_depay_init (GstRtpAMRDepay * rtpamrdepay)
{
GstBaseRTPDepayload *depayload;
const gint *frame_size;
GstBuffer *outbuf = NULL;
gint payload_len;
+ GstRTPBuffer rtp = { NULL };
+ guint8 *odata;
+ gsize osize;
rtpamrdepay = GST_RTP_AMR_DEPAY (depayload);
else
frame_size = wb_frame_size;
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
+
/* when we get here, 1 channel, 8000/16000 Hz, octet aligned, no CRC,
* no robust sorting, no interleaving data is to be depayloaded */
{
gint amr_len;
gint ILL, ILP;
- payload_len = gst_rtp_buffer_get_payload_len (buf);
+ payload_len = gst_rtp_buffer_get_payload_len (&rtp);
/* need at least 2 bytes for the header */
if (payload_len < 2)
goto too_small;
- payload = gst_rtp_buffer_get_payload (buf);
+ payload = gst_rtp_buffer_get_payload (&rtp);
/* depay CMR. The CMR is used by the sender to request
* a new encoding mode.
outbuf = gst_buffer_new_and_alloc (payload_len);
/* point to destination */
- p = GST_BUFFER_DATA (outbuf);
+ odata = gst_buffer_map (outbuf, &osize, NULL, GST_MAP_WRITE);
+
/* point to first data packet */
+ p = odata;
dp = payload + num_packets;
if (rtpamrdepay->crc) {
/* skip CRC if present */
dp += fr_size;
}
}
+ gst_buffer_unmap (outbuf, odata, osize);
+
/* we can set the duration because each packet is 20 milliseconds */
GST_BUFFER_DURATION (outbuf) = num_packets * 20 * GST_MSECOND;
- if (gst_rtp_buffer_get_marker (buf)) {
+ if (gst_rtp_buffer_get_marker (&rtp)) {
/* marker bit marks a discont buffer after a talkspurt. */
GST_DEBUG_OBJECT (depayload, "marker bit was set");
GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
}
GST_DEBUG_OBJECT (depayload, "pushing buffer of size %d",
- GST_BUFFER_SIZE (outbuf));
+ gst_buffer_get_size (outbuf));
}
return outbuf;
static GstStateChangeReturn
gst_rtp_amr_pay_change_state (GstElement * element, GstStateChange transition);
-GST_BOILERPLATE (GstRtpAMRPay, gst_rtp_amr_pay, GstBaseRTPPayload,
- GST_TYPE_BASE_RTP_PAYLOAD);
+#define gst_rtp_amr_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpAMRPay, gst_rtp_amr_pay, GST_TYPE_BASE_RTP_PAYLOAD);
static void
-gst_rtp_amr_pay_base_init (gpointer klass)
+gst_rtp_amr_pay_class_init (GstRtpAMRPayClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstElementClass *gstelement_class;
+ GstBaseRTPPayloadClass *gstbasertppayload_class;
+
+ gstelement_class = (GstElementClass *) klass;
+ gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
- gst_element_class_add_pad_template (element_class,
+ gstelement_class->change_state = gst_rtp_amr_pay_change_state;
+
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_amr_pay_src_template));
- gst_element_class_add_pad_template (element_class,
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_amr_pay_sink_template));
- gst_element_class_set_details_simple (element_class, "RTP AMR payloader",
+ gst_element_class_set_details_simple (gstelement_class, "RTP AMR payloader",
"Codec/Payloader/Network/RTP",
"Payload-encode AMR or AMR-WB audio into RTP packets (RFC 3267)",
"Wim Taymans <wim.taymans@gmail.com>");
-}
-
-static void
-gst_rtp_amr_pay_class_init (GstRtpAMRPayClass * klass)
-{
- GstBaseRTPPayloadClass *gstbasertppayload_class;
- GstElementClass *gstelement_class;
-
- gstelement_class = (GstElementClass *) klass;
- gstelement_class->change_state = gst_rtp_amr_pay_change_state;
-
- gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
gstbasertppayload_class->set_caps = gst_rtp_amr_pay_setcaps;
gstbasertppayload_class->handle_buffer = gst_rtp_amr_pay_handle_buffer;
}
static void
-gst_rtp_amr_pay_init (GstRtpAMRPay * rtpamrpay, GstRtpAMRPayClass * klass)
+gst_rtp_amr_pay_init (GstRtpAMRPay * rtpamrpay)
{
- /* needed because of GST_BOILERPLATE */
}
static void
GstRtpAMRPay *rtpamrpay;
const gint *frame_size;
GstFlowReturn ret;
- guint size, payload_len;
+ guint payload_len;
+ gsize size;
GstBuffer *outbuf;
- guint8 *payload, *data, *payload_amr;
+ guint8 *payload, *data, *ptr, *payload_amr;
GstClockTime timestamp, duration;
guint packet_len, mtu;
gint i, num_packets, num_nonempty_packets;
gint amr_len;
gboolean sid = FALSE;
+ GstRTPBuffer rtp = { NULL };
rtpamrpay = GST_RTP_AMR_PAY (basepayload);
mtu = GST_BASE_RTP_PAYLOAD_MTU (rtpamrpay);
- size = GST_BUFFER_SIZE (buffer);
- data = GST_BUFFER_DATA (buffer);
+ data = gst_buffer_map (buffer, &size, NULL, GST_MAP_READ);
+
timestamp = GST_BUFFER_TIMESTAMP (buffer);
duration = GST_BUFFER_DURATION (buffer);
/* now alloc output buffer */
outbuf = gst_rtp_buffer_new_allocate (payload_len, 0, 0);
+ gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
+
/* copy timestamp */
GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
if (GST_BUFFER_IS_DISCONT (buffer)) {
GST_DEBUG_OBJECT (basepayload, "discont, setting marker bit");
GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
- gst_rtp_buffer_set_marker (outbuf, TRUE);
+ gst_rtp_buffer_set_marker (&rtp, TRUE);
gst_rtp_amr_pay_recalc_rtp_time (rtpamrpay, timestamp);
}
(num_packets * 160) << (rtpamrpay->mode == GST_RTP_AMR_P_MODE_WB);
/* get payload, this is now writable */
- payload = gst_rtp_buffer_get_payload (outbuf);
+ payload = gst_rtp_buffer_get_payload (&rtp);
/* 0 1 2 3 4 5 6 7
* +-+-+-+-+-+-+-+-+
/* copy data in payload, first we copy all the FTs then all
* the AMR data. The last FT has to have the F flag cleared. */
+ ptr = data;
for (i = 1; i <= num_packets; i++) {
guint8 FT;
gint fr_size;
* |F| FT |Q|P|P| more FT...
* +-+-+-+-+-+-+-+-+
*/
- FT = (*data & 0x78) >> 3;
+ FT = (*ptr & 0x78) >> 3;
fr_size = frame_size[FT];
if (i == num_packets)
/* last packet, clear F flag */
- payload[i] = *data & 0x7f;
+ payload[i] = *ptr & 0x7f;
else
/* set F flag */
- payload[i] = *data | 0x80;
+ payload[i] = *ptr | 0x80;
- memcpy (payload_amr, &data[1], fr_size);
+ memcpy (payload_amr, &ptr[1], fr_size);
/* all sizes are > 0 since we checked for that above */
- data += fr_size + 1;
+ ptr += fr_size + 1;
payload_amr += fr_size;
}
+ gst_buffer_unmap (buffer, data, size);
gst_buffer_unref (buffer);
+ gst_rtp_buffer_unmap (&rtp);
+
ret = gst_basertppayload_push (basepayload, outbuf);
return ret;
{
GST_ELEMENT_ERROR (basepayload, STREAM, FORMAT,
(NULL), ("received AMR frame with size <= 0"));
+ gst_buffer_unmap (buffer, data, size);
gst_buffer_unref (buffer);
return GST_FLOW_ERROR;
{
GST_ELEMENT_ERROR (basepayload, STREAM, FORMAT,
(NULL), ("received incomplete AMR frames"));
+ gst_buffer_unmap (buffer, data, size);
gst_buffer_unref (buffer);
return GST_FLOW_ERROR;
{
GST_ELEMENT_ERROR (basepayload, STREAM, FORMAT,
(NULL), ("received too many AMR frames for MTU"));
+ gst_buffer_unmap (buffer, data, size);
gst_buffer_unref (buffer);
return GST_FLOW_ERROR;
static gboolean gst_rtp_bv_depay_setcaps (GstBaseRTPDepayload * depayload,
GstCaps * caps);
-GST_BOILERPLATE (GstRTPBVDepay, gst_rtp_bv_depay, GstBaseRTPDepayload,
- GST_TYPE_BASE_RTP_DEPAYLOAD);
+#define gst_rtp_bv_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRTPBVDepay, gst_rtp_bv_depay, GST_TYPE_BASE_RTP_DEPAYLOAD);
static void
-gst_rtp_bv_depay_base_init (gpointer klass)
+gst_rtp_bv_depay_class_init (GstRTPBVDepayClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstElementClass *gstelement_class;
+ GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
+
+ gstelement_class = (GstElementClass *) klass;
+ gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
- gst_element_class_add_pad_template (element_class,
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_bv_depay_src_template));
- gst_element_class_add_pad_template (element_class,
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_bv_depay_sink_template));
- gst_element_class_set_details_simple (element_class,
+
+ gst_element_class_set_details_simple (gstelement_class,
"RTP BroadcomVoice depayloader", "Codec/Depayloader/Network/RTP",
"Extracts BroadcomVoice audio from RTP packets (RFC 4298)",
"Wim Taymans <wim.taymans@collabora.co.uk>");
-}
-
-static void
-gst_rtp_bv_depay_class_init (GstRTPBVDepayClass * klass)
-{
- GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
-
- gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
gstbasertpdepayload_class->process = gst_rtp_bv_depay_process;
gstbasertpdepayload_class->set_caps = gst_rtp_bv_depay_setcaps;
}
static void
-gst_rtp_bv_depay_init (GstRTPBVDepay * rtpbvdepay, GstRTPBVDepayClass * klass)
+gst_rtp_bv_depay_init (GstRTPBVDepay * rtpbvdepay)
{
rtpbvdepay->mode = -1;
}
{
GstBuffer *outbuf;
gboolean marker;
+ GstRTPBuffer rtp = { NULL, };
+
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
- marker = gst_rtp_buffer_get_marker (buf);
+ marker = gst_rtp_buffer_get_marker (&rtp);
GST_DEBUG ("process : got %d bytes, mark %d ts %u seqn %d",
- GST_BUFFER_SIZE (buf), marker,
- gst_rtp_buffer_get_timestamp (buf), gst_rtp_buffer_get_seq (buf));
+ gst_buffer_get_size (buf), marker,
+ gst_rtp_buffer_get_timestamp (&rtp), gst_rtp_buffer_get_seq (&rtp));
- outbuf = gst_rtp_buffer_get_payload_buffer (buf);
+ outbuf = gst_rtp_buffer_get_payload_buffer (&rtp);
+ gst_rtp_buffer_unmap (&rtp);
if (marker && outbuf) {
/* mark start of talkspurt with DISCONT */
static GstCaps *gst_rtp_bv_pay_sink_getcaps (GstBaseRTPPayload * payload,
- GstPad * pad);
+ GstPad * pad, GstCaps * filter);
static gboolean gst_rtp_bv_pay_sink_setcaps (GstBaseRTPPayload * payload,
GstCaps * caps);
-GST_BOILERPLATE (GstRTPBVPay, gst_rtp_bv_pay, GstBaseRTPAudioPayload,
- GST_TYPE_BASE_RTP_AUDIO_PAYLOAD);
+#define gst_rtp_bv_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRTPBVPay, gst_rtp_bv_pay, GST_TYPE_BASE_RTP_AUDIO_PAYLOAD);
static void
-gst_rtp_bv_pay_base_init (gpointer klass)
+gst_rtp_bv_pay_class_init (GstRTPBVPayClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstElementClass *gstelement_class;
+ GstBaseRTPPayloadClass *gstbasertppayload_class;
+
+ GST_DEBUG_CATEGORY_INIT (rtpbvpay_debug, "rtpbvpay", 0,
+ "BroadcomVoice audio RTP payloader");
- gst_element_class_add_pad_template (element_class,
+ gstelement_class = (GstElementClass *) klass;
+ gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
+
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_bv_pay_sink_template));
- gst_element_class_add_pad_template (element_class,
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_bv_pay_src_template));
- gst_element_class_set_details_simple (element_class, "RTP BV Payloader",
+
+ gst_element_class_set_details_simple (gstelement_class, "RTP BV Payloader",
"Codec/Payloader/Network/RTP",
"Packetize BroadcomVoice audio streams into RTP packets (RFC 4298)",
"Wim Taymans <wim.taymans@collabora.co.uk>");
-}
-
-static void
-gst_rtp_bv_pay_class_init (GstRTPBVPayClass * klass)
-{
- GstBaseRTPPayloadClass *gstbasertppayload_class;
-
- gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
gstbasertppayload_class->set_caps = gst_rtp_bv_pay_sink_setcaps;
gstbasertppayload_class->get_caps = gst_rtp_bv_pay_sink_getcaps;
-
- GST_DEBUG_CATEGORY_INIT (rtpbvpay_debug, "rtpbvpay", 0,
- "BroadcomVoice audio RTP payloader");
}
static void
-gst_rtp_bv_pay_init (GstRTPBVPay * rtpbvpay, GstRTPBVPayClass * klass)
+gst_rtp_bv_pay_init (GstRTPBVPay * rtpbvpay)
{
GstBaseRTPAudioPayload *basertpaudiopayload;
/* we return the padtemplate caps with the mode field fixated to a value if we
* can */
static GstCaps *
-gst_rtp_bv_pay_sink_getcaps (GstBaseRTPPayload * rtppayload, GstPad * pad)
+gst_rtp_bv_pay_sink_getcaps (GstBaseRTPPayload * rtppayload, GstPad * pad,
+ GstCaps * filter)
{
GstCaps *otherpadcaps;
GstCaps *caps;
static gboolean gst_rtp_celt_depay_setcaps (GstBaseRTPDepayload * depayload,
GstCaps * caps);
-GST_BOILERPLATE (GstRtpCELTDepay, gst_rtp_celt_depay, GstBaseRTPDepayload,
+#define gst_rtp_celt_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpCELTDepay, gst_rtp_celt_depay,
GST_TYPE_BASE_RTP_DEPAYLOAD);
static void
-gst_rtp_celt_depay_base_init (gpointer klass)
+gst_rtp_celt_depay_class_init (GstRtpCELTDepayClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_celt_depay_src_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_celt_depay_sink_template));
- gst_element_class_set_details_simple (element_class, "RTP CELT depayloader",
- "Codec/Depayloader/Network/RTP",
- "Extracts CELT audio from RTP packets",
- "Wim Taymans <wim.taymans@gmail.com>");
+ GstElementClass *gstelement_class;
+ GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
GST_DEBUG_CATEGORY_INIT (rtpceltdepay_debug, "rtpceltdepay", 0,
"CELT RTP Depayloader");
-}
-
-static void
-gst_rtp_celt_depay_class_init (GstRtpCELTDepayClass * klass)
-{
- GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
+ gstelement_class = (GstElementClass *) klass;
gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_celt_depay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_celt_depay_sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP CELT depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts CELT audio from RTP packets",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
gstbasertpdepayload_class->process = gst_rtp_celt_depay_process;
gstbasertpdepayload_class->set_caps = gst_rtp_celt_depay_setcaps;
}
static void
-gst_rtp_celt_depay_init (GstRtpCELTDepay * rtpceltdepay,
- GstRtpCELTDepayClass * klass)
+gst_rtp_celt_depay_init (GstRtpCELTDepay * rtpceltdepay)
{
}
GstRtpCELTDepay *rtpceltdepay;
gint clock_rate, nb_channels = 0, frame_size = 0;
GstBuffer *buf;
- guint8 *data;
+ guint8 *data, *ptr;
+ gsize size;
const gchar *params;
GstCaps *srccaps;
gboolean res;
/* construct minimal header and comment packet for the decoder */
buf = gst_buffer_new_and_alloc (60);
- data = GST_BUFFER_DATA (buf);
- memcpy (data, "CELT ", 8);
- data += 8;
- memcpy (data, "1.1.12", 7);
- data += 20;
- GST_WRITE_UINT32_LE (data, 0x80000006); /* version */
- data += 4;
- GST_WRITE_UINT32_LE (data, 56); /* header_size */
- data += 4;
- GST_WRITE_UINT32_LE (data, clock_rate); /* rate */
- data += 4;
- GST_WRITE_UINT32_LE (data, nb_channels); /* channels */
- data += 4;
- GST_WRITE_UINT32_LE (data, frame_size); /* frame-size */
- data += 4;
- GST_WRITE_UINT32_LE (data, -1); /* overlap */
- data += 4;
- GST_WRITE_UINT32_LE (data, -1); /* bytes_per_packet */
- data += 4;
- GST_WRITE_UINT32_LE (data, 0); /* extra headers */
+ ptr = data = gst_buffer_map (buf, &size, NULL, GST_MAP_WRITE);
+ memcpy (ptr, "CELT ", 8);
+ ptr += 8;
+ memcpy (ptr, "1.1.12", 7);
+ ptr += 20;
+ GST_WRITE_UINT32_LE (ptr, 0x80000006); /* version */
+ ptr += 4;
+ GST_WRITE_UINT32_LE (ptr, 56); /* header_size */
+ ptr += 4;
+ GST_WRITE_UINT32_LE (ptr, clock_rate); /* rate */
+ ptr += 4;
+ GST_WRITE_UINT32_LE (ptr, nb_channels); /* channels */
+ ptr += 4;
+ GST_WRITE_UINT32_LE (ptr, frame_size); /* frame-size */
+ ptr += 4;
+ GST_WRITE_UINT32_LE (ptr, -1); /* overlap */
+ ptr += 4;
+ GST_WRITE_UINT32_LE (ptr, -1); /* bytes_per_packet */
+ ptr += 4;
+ GST_WRITE_UINT32_LE (ptr, 0); /* extra headers */
+ gst_buffer_unmap (buf, data, size);
srccaps = gst_caps_new_simple ("audio/x-celt", NULL);
res = gst_pad_set_caps (depayload->srcpad, srccaps);
gst_caps_unref (srccaps);
- gst_buffer_set_caps (buf, GST_PAD_CAPS (depayload->srcpad));
gst_base_rtp_depayload_push (GST_BASE_RTP_DEPAYLOAD (rtpceltdepay), buf);
buf = gst_buffer_new_and_alloc (sizeof (gst_rtp_celt_comment));
- memcpy (GST_BUFFER_DATA (buf), gst_rtp_celt_comment,
- sizeof (gst_rtp_celt_comment));
+ gst_buffer_fill (buf, 0, gst_rtp_celt_comment, sizeof (gst_rtp_celt_comment));
- gst_buffer_set_caps (buf, GST_PAD_CAPS (depayload->srcpad));
gst_base_rtp_depayload_push (GST_BASE_RTP_DEPAYLOAD (rtpceltdepay), buf);
return res;
GstClockTime framesize_ns = 0, timestamp;
guint n = 0;
GstRtpCELTDepay *rtpceltdepay;
+ GstRTPBuffer rtp = { NULL, };
rtpceltdepay = GST_RTP_CELT_DEPAY (depayload);
clock_rate = depayload->clock_rate;
timestamp = GST_BUFFER_TIMESTAMP (buf);
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
+
GST_LOG_OBJECT (depayload, "got %d bytes, mark %d ts %u seqn %d",
- GST_BUFFER_SIZE (buf),
- gst_rtp_buffer_get_marker (buf),
- gst_rtp_buffer_get_timestamp (buf), gst_rtp_buffer_get_seq (buf));
+ gst_buffer_get_size (buf),
+ gst_rtp_buffer_get_marker (&rtp),
+ gst_rtp_buffer_get_timestamp (&rtp), gst_rtp_buffer_get_seq (&rtp));
GST_LOG_OBJECT (depayload, "got clock-rate=%d, frame_size=%d, "
"_ns=%" GST_TIME_FORMAT ", timestamp=%" GST_TIME_FORMAT, clock_rate,
frame_size, GST_TIME_ARGS (framesize_ns), GST_TIME_ARGS (timestamp));
- payload = gst_rtp_buffer_get_payload (buf);
- payload_len = gst_rtp_buffer_get_payload_len (buf);
+ payload = gst_rtp_buffer_get_payload (&rtp);
+ payload_len = gst_rtp_buffer_get_payload_len (&rtp);
/* first count how many bytes are consumed by the size headers and make offset
* point to the first data byte */
total_size += size + 1;
} while (s == 0xff);
- outbuf = gst_rtp_buffer_get_payload_subbuffer (buf, offset, size);
+ outbuf = gst_rtp_buffer_get_payload_subbuffer (&rtp, offset, size);
offset += size;
if (frame_size != -1 && clock_rate != -1) {
gst_base_rtp_depayload_push (depayload, outbuf);
}
+ gst_rtp_buffer_unmap (&rtp);
+
return NULL;
}
static gboolean gst_rtp_celt_pay_setcaps (GstBaseRTPPayload * payload,
GstCaps * caps);
static GstCaps *gst_rtp_celt_pay_getcaps (GstBaseRTPPayload * payload,
- GstPad * pad);
+ GstPad * pad, GstCaps * filter);
static GstFlowReturn gst_rtp_celt_pay_handle_buffer (GstBaseRTPPayload *
payload, GstBuffer * buffer);
-GST_BOILERPLATE (GstRtpCELTPay, gst_rtp_celt_pay, GstBaseRTPPayload,
- GST_TYPE_BASE_RTP_PAYLOAD);
-
-static void
-gst_rtp_celt_pay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_celt_pay_sink_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_celt_pay_src_template));
- gst_element_class_set_details_simple (element_class, "RTP CELT payloader",
- "Codec/Payloader/Network/RTP",
- "Payload-encodes CELT audio into a RTP packet",
- "Wim Taymans <wim.taymans@gmail.com>");
-
- GST_DEBUG_CATEGORY_INIT (rtpceltpay_debug, "rtpceltpay", 0,
- "CELT RTP Payloader");
-}
+#define gst_rtp_celt_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpCELTPay, gst_rtp_celt_pay, GST_TYPE_BASE_RTP_PAYLOAD);
static void
gst_rtp_celt_pay_class_init (GstRtpCELTPayClass * klass)
GstElementClass *gstelement_class;
GstBaseRTPPayloadClass *gstbasertppayload_class;
+ GST_DEBUG_CATEGORY_INIT (rtpceltpay_debug, "rtpceltpay", 0,
+ "CELT RTP Payloader");
+
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
gstelement_class->change_state = gst_rtp_celt_pay_change_state;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_celt_pay_sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_celt_pay_src_template));
+
+ gst_element_class_set_details_simple (gstelement_class, "RTP CELT payloader",
+ "Codec/Payloader/Network/RTP",
+ "Payload-encodes CELT audio into a RTP packet",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
gstbasertppayload_class->set_caps = gst_rtp_celt_pay_setcaps;
gstbasertppayload_class->get_caps = gst_rtp_celt_pay_getcaps;
gstbasertppayload_class->handle_buffer = gst_rtp_celt_pay_handle_buffer;
}
static void
-gst_rtp_celt_pay_init (GstRtpCELTPay * rtpceltpay, GstRtpCELTPayClass * klass)
+gst_rtp_celt_pay_init (GstRtpCELTPay * rtpceltpay)
{
rtpceltpay->queue = g_queue_new ();
}
static GstCaps *
-gst_rtp_celt_pay_getcaps (GstBaseRTPPayload * payload, GstPad * pad)
+gst_rtp_celt_pay_getcaps (GstBaseRTPPayload * payload, GstPad * pad,
+ GstCaps * filter)
{
GstCaps *otherpadcaps;
GstCaps *caps;
guint8 *payload, *spayload;
guint payload_len;
GstClockTime duration;
+ GstRTPBuffer rtp = { NULL, };
payload_len = rtpceltpay->bytes + rtpceltpay->sbytes;
duration = rtpceltpay->qduration;
GST_BUFFER_DURATION (outbuf) = duration;
+ gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
+
/* point to the payload for size headers and data */
- spayload = gst_rtp_buffer_get_payload (outbuf);
+ spayload = gst_rtp_buffer_get_payload (&rtp);
payload = spayload + rtpceltpay->sbytes;
while ((buf = g_queue_pop_head (rtpceltpay->queue))) {
GST_BUFFER_TIMESTAMP (outbuf) = GST_BUFFER_TIMESTAMP (buf);
/* write the size to the header */
- size = GST_BUFFER_SIZE (buf);
+ size = gst_buffer_get_size (buf);
while (size > 0xff) {
*spayload++ = 0xff;
size -= 0xff;
}
*spayload++ = size;
- size = GST_BUFFER_SIZE (buf);
/* copy payload */
- memcpy (payload, GST_BUFFER_DATA (buf), size);
+ size = gst_buffer_get_size (buf);
+ gst_buffer_extract (buf, 0, payload, size);
payload += size;
gst_buffer_unref (buf);
}
+ gst_rtp_buffer_unmap (&rtp);
/* we consumed it all */
rtpceltpay->bytes = 0;
{
GstFlowReturn ret;
GstRtpCELTPay *rtpceltpay;
- guint size, payload_len;
+ gsize size, payload_len;
guint8 *data;
GstClockTime duration, packet_dur;
guint i, ssize, packet_len;
ret = GST_FLOW_OK;
- size = GST_BUFFER_SIZE (buffer);
- data = GST_BUFFER_DATA (buffer);
+ data = gst_buffer_map (buffer, &size, NULL, GST_MAP_READ);
switch (rtpceltpay->packet) {
case 0:
if (!gst_rtp_celt_pay_parse_ident (rtpceltpay, data, size))
goto parse_error;
- goto done;
+ goto cleanup;
case 1:
/* comment packet, we ignore it */
- goto done;
+ goto cleanup;
default:
/* other packets go in the payload */
break;
}
+ gst_buffer_unmap (buffer, data, size);
duration = GST_BUFFER_DURATION (buffer);
return ret;
/* ERRORS */
+cleanup:
+ {
+ gst_buffer_unmap (buffer, data, size);
+ goto done;
+ }
parse_error:
{
GST_ELEMENT_ERROR (rtpceltpay, STREAM, DECODE, (NULL),
("Error parsing first identification packet."));
+ gst_buffer_unmap (buffer, data, size);
return GST_FLOW_ERROR;
}
}
+++ /dev/null
-/* GStreamer
- * Copyright (C) <2005> Wim Taymans <wim.taymans@gmail.com>
- *
- * This library is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Library General Public
- * License as published by the Free Software Foundation; either
- * version 2 of the License, or (at your option) any later version.
- *
- * This library is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Library General Public License for more details.
- *
- * You should have received a copy of the GNU Library General Public
- * License along with this library; if not, write to the
- * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
- * Boston, MA 02111-1307, USA.
- */
-/* Element-Checklist-Version: 5 */
-
-#include "gstrtpdepay.h"
-
-GST_DEBUG_CATEGORY_STATIC (rtpdepay_debug);
-#define GST_CAT_DEFAULT (rtpdepay_debug)
-
-static GstStaticPadTemplate gst_rtp_depay_src_rtp_template =
-GST_STATIC_PAD_TEMPLATE ("srcrtp",
- GST_PAD_SRC,
- GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("application/x-rtp")
- );
-
-static GstStaticPadTemplate gst_rtp_depay_src_rtcp_template =
-GST_STATIC_PAD_TEMPLATE ("srcrtcp",
- GST_PAD_SRC,
- GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("application/x-rtcp")
- );
-
-static GstStaticPadTemplate gst_rtp_depay_sink_rtp_template =
-GST_STATIC_PAD_TEMPLATE ("sinkrtp",
- GST_PAD_SINK,
- GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("application/x-rtp")
- );
-
-static GstStaticPadTemplate gst_rtp_depay_sink_rtcp_template =
-GST_STATIC_PAD_TEMPLATE ("sinkrtcp",
- GST_PAD_SINK,
- GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("application/x-rtcp")
- );
-
-static GstCaps *gst_rtp_depay_getcaps (GstPad * pad);
-static GstFlowReturn gst_rtp_depay_chain_rtp (GstPad * pad, GstBuffer * buffer);
-static GstFlowReturn gst_rtp_depay_chain_rtcp (GstPad * pad,
- GstBuffer * buffer);
-
-GST_BOILERPLATE (GstRTPDepay, gst_rtp_depay, GstElement, GST_TYPE_ELEMENT);
-
-static void
-gst_rtp_depay_base_init (gpointer klass)
-{
- GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (gstelement_class,
- gst_static_pad_template_get (&gst_rtp_depay_src_rtp_template));
- gst_element_class_add_pad_template (gstelement_class,
- gst_static_pad_template_get (&gst_rtp_depay_src_rtcp_template));
- gst_element_class_add_pad_template (gstelement_class,
- gst_static_pad_template_get (&gst_rtp_depay_sink_rtp_template));
- gst_element_class_add_pad_template (gstelement_class,
- gst_static_pad_template_get (&gst_rtp_depay_sink_rtcp_template));
- gst_element_class_set_details_simple (gstelement_class,
- "Dummy RTP session manager", "Codec/Depayloader/Network/RTP",
- "Accepts raw RTP and RTCP packets and sends them forward",
- "Wim Taymans <wim.taymans@gmail.com>");
-}
-
-static void
-gst_rtp_depay_class_init (GstRTPDepayClass * klass)
-{
- GST_DEBUG_CATEGORY_INIT (rtpdepay_debug, "rtpdepay", 0, "RTP decoder");
-}
-
-static void
-gst_rtp_depay_init (GstRTPDepay * rtpdepay, GstRTPDepayClass * klass)
-{
- /* the input rtp pad */
- rtpdepay->sink_rtp =
- gst_pad_new_from_static_template (&gst_rtp_depay_sink_rtp_template,
- "sinkrtp");
- gst_element_add_pad (GST_ELEMENT (rtpdepay), rtpdepay->sink_rtp);
- gst_pad_set_getcaps_function (rtpdepay->sink_rtp, gst_rtp_depay_getcaps);
- gst_pad_set_chain_function (rtpdepay->sink_rtp, gst_rtp_depay_chain_rtp);
-
- /* the input rtcp pad */
- rtpdepay->sink_rtcp =
- gst_pad_new_from_static_template (&gst_rtp_depay_sink_rtcp_template,
- "sinkrtcp");
- gst_element_add_pad (GST_ELEMENT (rtpdepay), rtpdepay->sink_rtcp);
- gst_pad_set_chain_function (rtpdepay->sink_rtcp, gst_rtp_depay_chain_rtcp);
-
- /* the output rtp pad */
- rtpdepay->src_rtp =
- gst_pad_new_from_static_template (&gst_rtp_depay_src_rtp_template,
- "srcrtp");
- gst_pad_set_getcaps_function (rtpdepay->src_rtp, gst_rtp_depay_getcaps);
- gst_element_add_pad (GST_ELEMENT (rtpdepay), rtpdepay->src_rtp);
-
- /* the output rtcp pad */
- rtpdepay->src_rtcp =
- gst_pad_new_from_static_template (&gst_rtp_depay_src_rtcp_template,
- "srcrtcp");
- gst_element_add_pad (GST_ELEMENT (rtpdepay), rtpdepay->src_rtcp);
-}
-
-static GstCaps *
-gst_rtp_depay_getcaps (GstPad * pad)
-{
- GstRTPDepay *src;
- GstPad *other;
- GstCaps *caps;
-
- src = GST_RTP_DEPAY (GST_PAD_PARENT (pad));
-
- other = pad == src->src_rtp ? src->sink_rtp : src->src_rtp;
-
- caps = gst_pad_peer_get_caps (other);
-
- if (caps == NULL)
- caps = gst_caps_copy (gst_pad_get_pad_template_caps (pad));
-
- return caps;
-}
-
-static GstFlowReturn
-gst_rtp_depay_chain_rtp (GstPad * pad, GstBuffer * buffer)
-{
- GstRTPDepay *src;
-
- src = GST_RTP_DEPAY (GST_PAD_PARENT (pad));
-
- GST_DEBUG ("got rtp packet");
- return gst_pad_push (src->src_rtp, buffer);
-}
-
-static GstFlowReturn
-gst_rtp_depay_chain_rtcp (GstPad * pad, GstBuffer * buffer)
-{
- GST_DEBUG ("got rtcp packet");
-
- gst_buffer_unref (buffer);
- return GST_FLOW_OK;
-}
-
-gboolean
-gst_rtp_depay_plugin_init (GstPlugin * plugin)
-{
- return gst_element_register (plugin, "rtpdepay",
- GST_RANK_SECONDARY, GST_TYPE_RTP_DEPAY);
-}
+++ /dev/null
-/* GStreamer
- * Copyright (C) <2005> Wim Taymans <wim.taymans@gmail.com>
- *
- * This library is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Library General Public
- * License as published by the Free Software Foundation; either
- * version 2 of the License, or (at your option) any later version.
- *
- * This library is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Library General Public License for more details.
- *
- * You should have received a copy of the GNU Library General Public
- * License along with this library; if not, write to the
- * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
- * Boston, MA 02111-1307, USA.
- */
-
-#ifndef __GST_RTP_DEPAY_H__
-#define __GST_RTP_DEPAY_H__
-
-#include <gst/gst.h>
-
-G_BEGIN_DECLS
-
-#define GST_TYPE_RTP_DEPAY (gst_rtp_depay_get_type())
-#define GST_IS_RTP_DEPAY(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_DEPAY))
-#define GST_IS_RTP_DEPAY_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_DEPAY))
-#define GST_RTP_DEPAY(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_DEPAY, GstRTPDepay))
-#define GST_RTP_DEPAY_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_DEPAY, GstRTPDepayClass))
-
-typedef struct _GstRTPDepay GstRTPDepay;
-typedef struct _GstRTPDepayClass GstRTPDepayClass;
-
-struct _GstRTPDepay {
- GstElement element;
-
- GstPad *sink_rtp;
- GstPad *sink_rtcp;
- GstPad *src_rtp;
- GstPad *src_rtcp;
-};
-
-struct _GstRTPDepayClass {
- GstElementClass parent_class;
-};
-
-gboolean gst_rtp_depay_plugin_init (GstPlugin * plugin);
-
-GType gst_rtp_depay_get_type(void);
-
-G_END_DECLS
-
-#endif /* __GST_RTP_DEPAY_H__ */
static gboolean gst_rtp_dv_depay_setcaps (GstBaseRTPDepayload * depayload,
GstCaps * caps);
-GST_BOILERPLATE (GstRTPDVDepay, gst_rtp_dv_depay, GstBaseRTPDepayload,
- GST_TYPE_BASE_RTP_DEPAYLOAD)
+#define gst_rtp_dv_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRTPDVDepay, gst_rtp_dv_depay, GST_TYPE_BASE_RTP_DEPAYLOAD);
- static void gst_rtp_dv_depay_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&src_factory));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&sink_factory));
-
- gst_element_class_set_details_simple (element_class, "RTP DV Depayloader",
- "Codec/Depayloader/Network/RTP",
- "Depayloads DV from RTP packets (RFC 3189)",
- "Marcel Moreaux <marcelm@spacelabs.nl>, Wim Taymans <wim.taymans@gmail.com>");
-}
-/* initialize the plugin's class */
static void
gst_rtp_dv_depay_class_init (GstRTPDVDepayClass * klass)
{
GstBaseRTPDepayloadClass *gstbasertpdepayload_class =
(GstBaseRTPDepayloadClass *) klass;
+ GST_DEBUG_CATEGORY_INIT (rtpdvdepay_debug, "rtpdvdepay", 0,
+ "DV RTP Depayloader");
+
gstelement_class->change_state =
GST_DEBUG_FUNCPTR (gst_rtp_dv_depay_change_state);
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&src_factory));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&sink_factory));
+
+ gst_element_class_set_details_simple (gstelement_class, "RTP DV Depayloader",
+ "Codec/Depayloader/Network/RTP",
+ "Depayloads DV from RTP packets (RFC 3189)",
+ "Marcel Moreaux <marcelm@spacelabs.nl>, Wim Taymans <wim.taymans@gmail.com>");
+
gstbasertpdepayload_class->process =
GST_DEBUG_FUNCPTR (gst_rtp_dv_depay_process);
gstbasertpdepayload_class->set_caps =
GST_DEBUG_FUNCPTR (gst_rtp_dv_depay_setcaps);
-
- GST_DEBUG_CATEGORY_INIT (rtpdvdepay_debug, "rtpdvdepay", 0,
- "DV RTP Depayloader");
}
/* initialize the new element
* initialize structure
*/
static void
-gst_rtp_dv_depay_init (GstRTPDVDepay * filter, GstRTPDVDepayClass * klass)
+gst_rtp_dv_depay_init (GstRTPDVDepay * filter)
{
}
gint clock_rate;
gboolean systemstream, ret;
const gchar *encode, *media;
+ guint8 *data;
+ gsize size;
rtpdvdepay = GST_RTP_DV_DEPAY (depayload);
/* Initialize the new accumulator frame.
* If the previous frame exists, copy that into the accumulator frame.
* This way, missing packets in the stream won't show up badly. */
- memset (GST_BUFFER_DATA (rtpdvdepay->acc), 0, rtpdvdepay->frame_size);
+ data = gst_buffer_map (rtpdvdepay->acc, &size, NULL, GST_MAP_WRITE);
+ memset (data, 0, rtpdvdepay->frame_size);
+ gst_buffer_unmap (rtpdvdepay->acc, data, size);
srccaps = gst_caps_new_simple ("video/x-dv",
"systemstream", G_TYPE_BOOLEAN, systemstream,
guint payload_len, location;
GstRTPDVDepay *dvdepay = GST_RTP_DV_DEPAY (base);
gboolean marker;
+ GstRTPBuffer rtp = { NULL, };
+
+ gst_rtp_buffer_map (in, GST_MAP_READ, &rtp);
- marker = gst_rtp_buffer_get_marker (in);
+ marker = gst_rtp_buffer_get_marker (&rtp);
/* Check if the received packet contains (the start of) a new frame, we do
* this by checking the RTP timestamp. */
- rtp_ts = gst_rtp_buffer_get_timestamp (in);
+ rtp_ts = gst_rtp_buffer_get_timestamp (&rtp);
/* we cannot copy the packet yet if the marker is set, we will do that below
* after taking out the data */
}
/* Extract the payload */
- payload_len = gst_rtp_buffer_get_payload_len (in);
- payload = gst_rtp_buffer_get_payload (in);
+ payload_len = gst_rtp_buffer_get_payload_len (&rtp);
+ payload = gst_rtp_buffer_get_payload (&rtp);
/* copy all DIF chunks in their place. */
while (payload_len >= 80) {
/* And copy it in, provided the location is sane. */
if (offset >= 0 && offset <= dvdepay->frame_size - 80)
- memcpy (GST_BUFFER_DATA (dvdepay->acc) + offset, payload, 80);
+ gst_buffer_fill (dvdepay->acc, offset, payload, 80);
payload += 80;
payload_len -= 80;
}
+ gst_rtp_buffer_unmap (&rtp);
if (marker) {
GST_DEBUG_OBJECT (dvdepay, "marker bit complete frame %u", rtp_ts);
static void gst_dv_pay_get_property (GObject * object,
guint prop_id, GValue * value, GParamSpec * pspec);
-GST_BOILERPLATE (GstRTPDVPay, gst_rtp_dv_pay, GstBaseRTPPayload,
- GST_TYPE_BASE_RTP_PAYLOAD)
-
- static void gst_rtp_dv_pay_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_dv_pay_sink_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_dv_pay_src_template));
- gst_element_class_set_details_simple (element_class, "RTP DV Payloader",
- "Codec/Payloader/Network/RTP",
- "Payloads DV into RTP packets (RFC 3189)",
- "Marcel Moreaux <marcelm@spacelabs.nl>, Wim Taymans <wim.taymans@gmail.com>");
-}
+#define gst_rtp_dv_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRTPDVPay, gst_rtp_dv_pay, GST_TYPE_BASE_RTP_PAYLOAD);
static void
gst_rtp_dv_pay_class_init (GstRTPDVPayClass * klass)
{
GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
GstBaseRTPPayloadClass *gstbasertppayload_class;
+ GST_DEBUG_CATEGORY_INIT (rtpdvpay_debug, "rtpdvpay", 0, "DV RTP Payloader");
+
gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
gobject_class->set_property = gst_dv_pay_set_property;
gobject_class->get_property = gst_dv_pay_get_property;
- gstbasertppayload_class->set_caps = gst_rtp_dv_pay_setcaps;
- gstbasertppayload_class->handle_buffer = gst_rtp_dv_pay_handle_buffer;
-
g_object_class_install_property (gobject_class, PROP_MODE,
g_param_spec_enum ("mode", "Mode",
"The payload mode of payloading",
GST_TYPE_DV_PAY_MODE, DEFAULT_MODE,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
- GST_DEBUG_CATEGORY_INIT (rtpdvpay_debug, "rtpdvpay", 0, "DV RTP Payloader");
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_dv_pay_sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_dv_pay_src_template));
+
+ gst_element_class_set_details_simple (gstelement_class, "RTP DV Payloader",
+ "Codec/Payloader/Network/RTP",
+ "Payloads DV into RTP packets (RFC 3189)",
+ "Marcel Moreaux <marcelm@spacelabs.nl>, Wim Taymans <wim.taymans@gmail.com>");
+
+ gstbasertppayload_class->set_caps = gst_rtp_dv_pay_setcaps;
+ gstbasertppayload_class->handle_buffer = gst_rtp_dv_pay_handle_buffer;
}
static void
-gst_rtp_dv_pay_init (GstRTPDVPay * rtpdvpay, GstRTPDVPayClass * klass)
+gst_rtp_dv_pay_init (GstRTPDVPay * rtpdvpay)
{
}
}
static gboolean
-gst_dv_pay_negotiate (GstRTPDVPay * rtpdvpay, guint8 * data, guint size)
+gst_dv_pay_negotiate (GstRTPDVPay * rtpdvpay, guint8 * data, gsize size)
{
const gchar *encode, *media;
gboolean audio_bundled, res;
GstBuffer *outbuf;
GstFlowReturn ret = GST_FLOW_OK;
gint hdrlen;
- guint size;
- guint8 *data;
+ gsize size, osize;
+ guint8 *data, *odata;
guint8 *dest;
guint filled;
+ GstRTPBuffer rtp = { NULL, };
rtpdvpay = GST_RTP_DV_PAY (basepayload);
max_payload_size = ((GST_BASE_RTP_PAYLOAD_MTU (rtpdvpay) - hdrlen) / 80) * 80;
/* The length of the buffer to transmit. */
- size = GST_BUFFER_SIZE (buffer);
- data = GST_BUFFER_DATA (buffer);
+ data = gst_buffer_map (buffer, &size, NULL, GST_MAP_READ);
+
+ odata = data;
+ osize = size;
GST_DEBUG_OBJECT (rtpdvpay,
"DV RTP payloader got buffer of %u bytes, splitting in %u byte "
if (outbuf == NULL) {
outbuf = gst_rtp_buffer_new_allocate (max_payload_size, 0, 0);
GST_BUFFER_TIMESTAMP (outbuf) = GST_BUFFER_TIMESTAMP (buffer);
- dest = gst_rtp_buffer_get_payload (outbuf);
+
+ gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
+ dest = gst_rtp_buffer_get_payload (&rtp);
filled = 0;
}
guint hlen;
/* set marker */
- gst_rtp_buffer_set_marker (outbuf, TRUE);
+ gst_rtp_buffer_set_marker (&rtp, TRUE);
/* shrink buffer to last packet */
- hlen = gst_rtp_buffer_get_header_len (outbuf);
- gst_rtp_buffer_set_packet_len (outbuf, hlen + filled);
+ hlen = gst_rtp_buffer_get_header_len (&rtp);
+ gst_rtp_buffer_set_packet_len (&rtp, hlen + filled);
}
+
/* Push out the created piece, and check for errors. */
+ gst_rtp_buffer_unmap (&rtp);
ret = gst_basertppayload_push (basepayload, outbuf);
if (ret != GST_FLOW_OK)
break;
outbuf = NULL;
}
}
+ gst_buffer_unmap (buffer, odata, osize);
gst_buffer_unref (buffer);
return ret;
)
);
-GST_BOILERPLATE (GstRtpG722Depay, gst_rtp_g722_depay, GstBaseRTPDepayload,
+#define gst_rtp_g722_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpG722Depay, gst_rtp_g722_depay,
GST_TYPE_BASE_RTP_DEPAYLOAD);
static gboolean gst_rtp_g722_depay_setcaps (GstBaseRTPDepayload * depayload,
GstBuffer * buf);
static void
-gst_rtp_g722_depay_base_init (gpointer klass)
+gst_rtp_g722_depay_class_init (GstRtpG722DepayClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstElementClass *gstelement_class;
+ GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
+
+ GST_DEBUG_CATEGORY_INIT (rtpg722depay_debug, "rtpg722depay", 0,
+ "G722 RTP Depayloader");
- gst_element_class_add_pad_template (element_class,
+ gstelement_class = (GstElementClass *) klass;
+ gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
+
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_g722_depay_src_template));
- gst_element_class_add_pad_template (element_class,
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_g722_depay_sink_template));
- gst_element_class_set_details_simple (element_class, "RTP audio depayloader",
- "Codec/Depayloader/Network/RTP",
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP audio depayloader", "Codec/Depayloader/Network/RTP",
"Extracts G722 audio from RTP packets",
"Wim Taymans <wim.taymans@gmail.com>");
-}
-
-static void
-gst_rtp_g722_depay_class_init (GstRtpG722DepayClass * klass)
-{
- GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
-
- gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
gstbasertpdepayload_class->set_caps = gst_rtp_g722_depay_setcaps;
gstbasertpdepayload_class->process = gst_rtp_g722_depay_process;
-
- GST_DEBUG_CATEGORY_INIT (rtpg722depay_debug, "rtpg722depay", 0,
- "G722 RTP Depayloader");
}
static void
-gst_rtp_g722_depay_init (GstRtpG722Depay * rtpg722depay,
- GstRtpG722DepayClass * klass)
+gst_rtp_g722_depay_init (GstRtpG722Depay * rtpg722depay)
{
- /* needed because of GST_BOILERPLATE */
}
static gint
GstBuffer *outbuf;
gint payload_len;
gboolean marker;
+ GstRTPBuffer rtp = { NULL };
rtpg722depay = GST_RTP_G722_DEPAY (depayload);
- payload_len = gst_rtp_buffer_get_payload_len (buf);
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
+
+ payload_len = gst_rtp_buffer_get_payload_len (&rtp);
if (payload_len <= 0)
goto empty_packet;
GST_DEBUG_OBJECT (rtpg722depay, "got payload of %d bytes", payload_len);
- outbuf = gst_rtp_buffer_get_payload_buffer (buf);
- marker = gst_rtp_buffer_get_marker (buf);
+ outbuf = gst_rtp_buffer_get_payload_buffer (&rtp);
+ marker = gst_rtp_buffer_get_marker (&rtp);
+ gst_rtp_buffer_unmap (&rtp);
if (marker && outbuf) {
/* mark talk spurt with DISCONT */
{
GST_ELEMENT_WARNING (rtpg722depay, STREAM, DECODE,
("Empty Payload."), (NULL));
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
}
static gboolean gst_rtp_g722_pay_setcaps (GstBaseRTPPayload * basepayload,
GstCaps * caps);
static GstCaps *gst_rtp_g722_pay_getcaps (GstBaseRTPPayload * rtppayload,
- GstPad * pad);
+ GstPad * pad, GstCaps * filter);
-GST_BOILERPLATE (GstRtpG722Pay, gst_rtp_g722_pay, GstBaseRTPAudioPayload,
+#define gst_rtp_g722_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpG722Pay, gst_rtp_g722_pay,
GST_TYPE_BASE_RTP_AUDIO_PAYLOAD);
static void
-gst_rtp_g722_pay_base_init (gpointer klass)
+gst_rtp_g722_pay_class_init (GstRtpG722PayClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstElementClass *gstelement_class;
+ GstBaseRTPPayloadClass *gstbasertppayload_class;
+
+ GST_DEBUG_CATEGORY_INIT (rtpg722pay_debug, "rtpg722pay", 0,
+ "G722 RTP Payloader");
- gst_element_class_add_pad_template (element_class,
+ gstelement_class = (GstElementClass *) klass;
+ gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
+
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_g722_pay_src_template));
- gst_element_class_add_pad_template (element_class,
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_g722_pay_sink_template));
- gst_element_class_set_details_simple (element_class, "RTP audio payloader",
+ gst_element_class_set_details_simple (gstelement_class, "RTP audio payloader",
"Codec/Payloader/Network/RTP",
"Payload-encode Raw audio into RTP packets (RFC 3551)",
"Wim Taymans <wim.taymans@gmail.com>");
-}
-
-static void
-gst_rtp_g722_pay_class_init (GstRtpG722PayClass * klass)
-{
- GstBaseRTPPayloadClass *gstbasertppayload_class;
-
- gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
gstbasertppayload_class->set_caps = gst_rtp_g722_pay_setcaps;
gstbasertppayload_class->get_caps = gst_rtp_g722_pay_getcaps;
-
- GST_DEBUG_CATEGORY_INIT (rtpg722pay_debug, "rtpg722pay", 0,
- "G722 RTP Payloader");
}
static void
-gst_rtp_g722_pay_init (GstRtpG722Pay * rtpg722pay, GstRtpG722PayClass * klass)
+gst_rtp_g722_pay_init (GstRtpG722Pay * rtpg722pay)
{
GstBaseRTPAudioPayload *basertpaudiopayload;
}
static GstCaps *
-gst_rtp_g722_pay_getcaps (GstBaseRTPPayload * rtppayload, GstPad * pad)
+gst_rtp_g722_pay_getcaps (GstBaseRTPPayload * rtppayload, GstPad * pad,
+ GstCaps * filter)
{
GstCaps *otherpadcaps;
GstCaps *caps;
static GstBuffer *gst_rtp_g723_depay_process (GstBaseRTPDepayload * depayload,
GstBuffer * buf);
-GST_BOILERPLATE (GstRtpG723Depay, gst_rtp_g723_depay, GstBaseRTPDepayload,
+#define gst_rtp_g723_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpG723Depay, gst_rtp_g723_depay,
GST_TYPE_BASE_RTP_DEPAYLOAD);
static void
-gst_rtp_g723_depay_base_init (gpointer klass)
+gst_rtp_g723_depay_class_init (GstRtpG723DepayClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstElementClass *gstelement_class;
+ GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
+
+ GST_DEBUG_CATEGORY_INIT (rtpg723depay_debug, "rtpg723depay", 0,
+ "G.723 RTP Depayloader");
- gst_element_class_add_pad_template (element_class,
+ gstelement_class = (GstElementClass *) klass;
+ gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
+
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_g723_depay_src_template));
- gst_element_class_add_pad_template (element_class,
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_g723_depay_sink_template));
- gst_element_class_set_details_simple (element_class, "RTP G.723 depayloader",
- "Codec/Depayloader/Network/RTP",
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP G.723 depayloader", "Codec/Depayloader/Network/RTP",
"Extracts G.723 audio from RTP packets (RFC 3551)",
"Wim Taymans <wim.taymans@gmail.com>");
- GST_DEBUG_CATEGORY_INIT (rtpg723depay_debug, "rtpg723depay", 0,
- "G.723 RTP Depayloader");
-}
-
-static void
-gst_rtp_g723_depay_class_init (GstRtpG723DepayClass * klass)
-{
- GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
-
- gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
-
gstbasertpdepayload_class->process = gst_rtp_g723_depay_process;
gstbasertpdepayload_class->set_caps = gst_rtp_g723_depay_setcaps;
}
static void
-gst_rtp_g723_depay_init (GstRtpG723Depay * rtpg723depay,
- GstRtpG723DepayClass * klass)
+gst_rtp_g723_depay_init (GstRtpG723Depay * rtpg723depay)
{
GstBaseRTPDepayload *depayload;
GstBuffer *outbuf = NULL;
gint payload_len;
gboolean marker;
+ GstRTPBuffer rtp = { NULL };
rtpg723depay = GST_RTP_G723_DEPAY (depayload);
- payload_len = gst_rtp_buffer_get_payload_len (buf);
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
+
+ payload_len = gst_rtp_buffer_get_payload_len (&rtp);
/* At least 4 bytes */
if (payload_len < 4)
GST_LOG_OBJECT (rtpg723depay, "payload len %d", payload_len);
- outbuf = gst_rtp_buffer_get_payload_buffer (buf);
- marker = gst_rtp_buffer_get_marker (buf);
+ outbuf = gst_rtp_buffer_get_payload_buffer (&rtp);
+ marker = gst_rtp_buffer_get_marker (&rtp);
+ gst_rtp_buffer_unmap (&rtp);
if (marker) {
/* marker bit starts talkspurt */
}
GST_LOG_OBJECT (depayload, "pushing buffer of size %d",
- GST_BUFFER_SIZE (outbuf));
+ gst_buffer_get_size (outbuf));
return outbuf;
bad_packet:
{
/* no fatal error */
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
}
static GstStateChangeReturn gst_rtp_g723_pay_change_state (GstElement * element,
GstStateChange transition);
-GST_BOILERPLATE (GstRTPG723Pay, gst_rtp_g723_pay, GstBaseRTPPayload,
- GST_TYPE_BASE_RTP_PAYLOAD);
-
-static void
-gst_rtp_g723_pay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_g723_pay_sink_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_g723_pay_src_template));
- gst_element_class_set_details_simple (element_class, "RTP G.723 payloader",
- "Codec/Payloader/Network/RTP",
- "Packetize G.723 audio into RTP packets",
- "Wim Taymans <wim.taymans@gmail.com>");
-}
+#define gst_rtp_g723_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRTPG723Pay, gst_rtp_g723_pay, GST_TYPE_BASE_RTP_PAYLOAD);
static void
gst_rtp_g723_pay_class_init (GstRTPG723PayClass * klass)
gstelement_class->change_state = gst_rtp_g723_pay_change_state;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_g723_pay_sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_g723_pay_src_template));
+
+ gst_element_class_set_details_simple (gstelement_class, "RTP G.723 payloader",
+ "Codec/Payloader/Network/RTP",
+ "Packetize G.723 audio into RTP packets",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
payload_class->set_caps = gst_rtp_g723_pay_set_caps;
payload_class->handle_buffer = gst_rtp_g723_pay_handle_buffer;
}
static void
-gst_rtp_g723_pay_init (GstRTPG723Pay * pay, GstRTPG723PayClass * klass)
+gst_rtp_g723_pay_init (GstRTPG723Pay * pay)
{
GstBaseRTPPayload *payload = GST_BASE_RTP_PAYLOAD (pay);
GstFlowReturn ret;
guint8 *payload;
guint avail;
+ GstRTPBuffer rtp = { NULL };
avail = gst_adapter_available (pay->adapter);
outbuf = gst_rtp_buffer_new_allocate (avail, 0, 0);
- payload = gst_rtp_buffer_get_payload (outbuf);
+
+ gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
+ payload = gst_rtp_buffer_get_payload (&rtp);
GST_BUFFER_TIMESTAMP (outbuf) = pay->timestamp;
GST_BUFFER_DURATION (outbuf) = pay->duration;
/* set discont and marker */
if (pay->discont) {
GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
- gst_rtp_buffer_set_marker (outbuf, TRUE);
+ gst_rtp_buffer_set_marker (&rtp, TRUE);
pay->discont = FALSE;
}
+ gst_rtp_buffer_unmap (&rtp);
ret = gst_basertppayload_push (GST_BASE_RTP_PAYLOAD (pay), outbuf);
{
GstFlowReturn ret = GST_FLOW_OK;
guint8 *data;
- guint size;
+ gsize size;
guint8 HDR;
GstRTPG723Pay *pay;
GstClockTime packet_dur, timestamp;
pay = GST_RTP_G723_PAY (payload);
- size = GST_BUFFER_SIZE (buf);
- data = GST_BUFFER_DATA (buf);
+ data = gst_buffer_map (buf, &size, NULL, GST_MAP_READ);
timestamp = GST_BUFFER_TIMESTAMP (buf);
if (GST_BUFFER_IS_DISCONT (buf)) {
else
pay->timestamp = 0;
}
+ gst_buffer_unmap (buf, data, size);
/* add packet to the queue */
gst_adapter_push (pay->adapter, buf);
GST_ELEMENT_WARNING (pay, STREAM, WRONG_TYPE,
("Invalid input buffer size"),
("Input size should be 4, 20 or 24, got %u", size));
+ gst_buffer_unmap (buf, data, size);
gst_buffer_unref (buf);
return GST_FLOW_OK;
}
GST_ELEMENT_WARNING (pay, STREAM, WRONG_TYPE,
("Wrong input buffer size"),
("Expected input buffer size %u but got %u", size_tab[HDR], size));
+ gst_buffer_unmap (buf, data, size);
gst_buffer_unref (buf);
return GST_FLOW_OK;
}
static gboolean gst_rtp_g726_depay_setcaps (GstBaseRTPDepayload * depayload,
GstCaps * caps);
-GST_BOILERPLATE (GstRtpG726Depay, gst_rtp_g726_depay, GstBaseRTPDepayload,
+#define gst_rtp_g726_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpG726Depay, gst_rtp_g726_depay,
GST_TYPE_BASE_RTP_DEPAYLOAD);
static void
-gst_rtp_g726_depay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_g726_depay_src_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_g726_depay_sink_template));
- gst_element_class_set_details_simple (element_class, "RTP G.726 depayloader",
- "Codec/Depayloader/Network/RTP",
- "Extracts G.726 audio from RTP packets",
- "Axis Communications <dev-gstreamer@axis.com>");
-}
-
-static void
gst_rtp_g726_depay_class_init (GstRtpG726DepayClass * klass)
{
GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
+ GST_DEBUG_CATEGORY_INIT (rtpg726depay_debug, "rtpg726depay", 0,
+ "G.726 RTP Depayloader");
+
gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
gobject_class->set_property = gst_rtp_g726_depay_set_property;
"Force AAL2 decoding for compatibility with bad payloaders",
DEFAULT_FORCE_AAL2, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_g726_depay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_g726_depay_sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP G.726 depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts G.726 audio from RTP packets",
+ "Axis Communications <dev-gstreamer@axis.com>");
+
gstbasertpdepayload_class->process = gst_rtp_g726_depay_process;
gstbasertpdepayload_class->set_caps = gst_rtp_g726_depay_setcaps;
-
- GST_DEBUG_CATEGORY_INIT (rtpg726depay_debug, "rtpg726depay", 0,
- "G.726 RTP Depayloader");
}
static void
-gst_rtp_g726_depay_init (GstRtpG726Depay * rtpG726depay,
- GstRtpG726DepayClass * klass)
+gst_rtp_g726_depay_init (GstRtpG726Depay * rtpG726depay)
{
GstBaseRTPDepayload *depayload;
GstRtpG726Depay *depay;
GstBuffer *outbuf = NULL;
gboolean marker;
+ GstRTPBuffer rtp = { NULL };
depay = GST_RTP_G726_DEPAY (depayload);
- marker = gst_rtp_buffer_get_marker (buf);
+ gst_rtp_buffer_map (buf, GST_MAP_READWRITE, &rtp);
+
+ marker = gst_rtp_buffer_get_marker (&rtp);
GST_DEBUG ("process : got %d bytes, mark %d ts %u seqn %d",
- GST_BUFFER_SIZE (buf), marker,
- gst_rtp_buffer_get_timestamp (buf), gst_rtp_buffer_get_seq (buf));
+ gst_buffer_get_size (buf), marker,
+ gst_rtp_buffer_get_timestamp (&rtp), gst_rtp_buffer_get_seq (&rtp));
if (depay->aal2 || depay->force_aal2) {
/* AAL2, we can just copy the bytes */
- outbuf = gst_rtp_buffer_get_payload_buffer (buf);
+ outbuf = gst_rtp_buffer_get_payload_buffer (&rtp);
if (!outbuf)
goto bad_len;
} else {
- guint8 *in, *out, tmp;
+ guint8 *in, *out, tmp, *odata;
guint len;
+ gsize osize;
- in = gst_rtp_buffer_get_payload (buf);
- len = gst_rtp_buffer_get_payload_len (buf);
-
- if (gst_buffer_is_writable (buf)) {
- outbuf = gst_rtp_buffer_get_payload_buffer (buf);
- } else {
- GstBuffer *copy;
-
- /* copy buffer */
- copy = gst_buffer_copy (buf);
- outbuf = gst_rtp_buffer_get_payload_buffer (copy);
- gst_buffer_unref (copy);
- }
+ in = gst_rtp_buffer_get_payload (&rtp);
+ len = gst_rtp_buffer_get_payload_len (&rtp);
+ outbuf = gst_rtp_buffer_get_payload_buffer (&rtp);
if (!outbuf)
goto bad_len;
+ outbuf = gst_buffer_make_writable (outbuf);
- out = GST_BUFFER_DATA (outbuf);
+ odata = gst_buffer_map (outbuf, &osize, NULL, GST_MAP_WRITE);
+ out = odata;
/* we need to reshuffle the bytes, input is always of the form
* A B C D ... with the number of bits depending on the bitrate. */
break;
}
}
+ gst_buffer_unmap (outbuf, odata, osize);
}
if (marker) {
static GstFlowReturn gst_rtp_g726_pay_handle_buffer (GstBaseRTPPayload *
payload, GstBuffer * buffer);
-GST_BOILERPLATE (GstRtpG726Pay, gst_rtp_g726_pay, GstBaseRTPAudioPayload,
+#define gst_rtp_g726_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpG726Pay, gst_rtp_g726_pay,
GST_TYPE_BASE_RTP_AUDIO_PAYLOAD);
static void
-gst_rtp_g726_pay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_g726_pay_sink_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_g726_pay_src_template));
- gst_element_class_set_details_simple (element_class, "RTP G.726 payloader",
- "Codec/Payloader/Network/RTP",
- "Payload-encodes G.726 audio into a RTP packet",
- "Axis Communications <dev-gstreamer@axis.com>");
-}
-
-static void
gst_rtp_g726_pay_class_init (GstRtpG726PayClass * klass)
{
GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
GstBaseRTPPayloadClass *gstbasertppayload_class;
gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
gobject_class->set_property = gst_rtp_g726_pay_set_property;
"Force AAL2 encoding for compatibility with bad depayloaders",
DEFAULT_FORCE_AAL2, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_g726_pay_sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_g726_pay_src_template));
+
+ gst_element_class_set_details_simple (gstelement_class, "RTP G.726 payloader",
+ "Codec/Payloader/Network/RTP",
+ "Payload-encodes G.726 audio into a RTP packet",
+ "Axis Communications <dev-gstreamer@axis.com>");
+
gstbasertppayload_class->set_caps = gst_rtp_g726_pay_setcaps;
gstbasertppayload_class->handle_buffer = gst_rtp_g726_pay_handle_buffer;
}
static void
-gst_rtp_g726_pay_init (GstRtpG726Pay * rtpg726pay, GstRtpG726PayClass * klass)
+gst_rtp_g726_pay_init (GstRtpG726Pay * rtpg726pay)
{
GstBaseRTPAudioPayload *basertpaudiopayload;
GST_DEBUG_OBJECT (payload, "selected base encoding %s", encoding_name);
/* now see if we need to produce AAL2 or not */
- peercaps = gst_pad_peer_get_caps (payload->srcpad);
+ peercaps = gst_pad_peer_get_caps (payload->srcpad, NULL);
if (peercaps) {
GstCaps *filter, *intersect;
gchar *capsstr;
if (!pay->aal2) {
guint8 *data, tmp;
- guint len;
+ gsize len;
/* for non AAL2, we need to reshuffle the bytes, we can do this in-place
* when the buffer is writable. */
buffer = gst_buffer_make_writable (buffer);
- data = GST_BUFFER_DATA (buffer);
- len = GST_BUFFER_SIZE (buffer);
+ data = gst_buffer_map (buffer, &len, NULL, GST_MAP_READWRITE);
GST_LOG_OBJECT (pay, "packing %u bytes of data", len);
break;
}
}
+ gst_buffer_unmap (buffer, data, len);
}
res =
static GstBuffer *gst_rtp_g729_depay_process (GstBaseRTPDepayload * depayload,
GstBuffer * buf);
-GST_BOILERPLATE (GstRtpG729Depay, gst_rtp_g729_depay, GstBaseRTPDepayload,
+#define gst_rtp_g729_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpG729Depay, gst_rtp_g729_depay,
GST_TYPE_BASE_RTP_DEPAYLOAD);
static void
-gst_rtp_g729_depay_base_init (gpointer klass)
+gst_rtp_g729_depay_class_init (GstRtpG729DepayClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstElementClass *gstelement_class;
+ GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
+
+ GST_DEBUG_CATEGORY_INIT (rtpg729depay_debug, "rtpg729depay", 0,
+ "G.729 RTP Depayloader");
- gst_element_class_add_pad_template (element_class,
+ gstelement_class = (GstElementClass *) klass;
+ gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
+
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_g729_depay_src_template));
- gst_element_class_add_pad_template (element_class,
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_g729_depay_sink_template));
- gst_element_class_set_details_simple (element_class, "RTP G.729 depayloader",
- "Codec/Depayloader/Network/RTP",
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP G.729 depayloader", "Codec/Depayloader/Network/RTP",
"Extracts G.729 audio from RTP packets (RFC 3551)",
"Laurent Glayal <spglegle@yahoo.fr>");
- GST_DEBUG_CATEGORY_INIT (rtpg729depay_debug, "rtpg729depay", 0,
- "G.729 RTP Depayloader");
-}
-
-static void
-gst_rtp_g729_depay_class_init (GstRtpG729DepayClass * klass)
-{
- GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
-
- gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
-
gstbasertpdepayload_class->process = gst_rtp_g729_depay_process;
gstbasertpdepayload_class->set_caps = gst_rtp_g729_depay_setcaps;
}
static void
-gst_rtp_g729_depay_init (GstRtpG729Depay * rtpg729depay,
- GstRtpG729DepayClass * klass)
+gst_rtp_g729_depay_init (GstRtpG729Depay * rtpg729depay)
{
GstBaseRTPDepayload *depayload;
}
}
-
static GstBuffer *
gst_rtp_g729_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
{
GstBuffer *outbuf = NULL;
gint payload_len;
gboolean marker;
+ GstRTPBuffer rtp = { NULL };
rtpg729depay = GST_RTP_G729_DEPAY (depayload);
- payload_len = gst_rtp_buffer_get_payload_len (buf);
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
+
+ payload_len = gst_rtp_buffer_get_payload_len (&rtp);
/* At least 2 bytes (CNG from G729 Annex B) */
if (payload_len < 2) {
GST_LOG_OBJECT (rtpg729depay, "G729 payload contains CNG frame");
}
- outbuf = gst_rtp_buffer_get_payload_buffer (buf);
- marker = gst_rtp_buffer_get_marker (buf);
+ outbuf = gst_rtp_buffer_get_payload_buffer (&rtp);
+ marker = gst_rtp_buffer_get_marker (&rtp);
+
+ gst_rtp_buffer_unmap (&rtp);
if (marker) {
/* marker bit starts talkspurt */
}
GST_LOG_OBJECT (depayload, "pushing buffer of size %d",
- GST_BUFFER_SIZE (outbuf));
+ gst_buffer_get_size (outbuf));
return outbuf;
"clock-rate = (int) 8000, " "encoding-name = (string) \"G729\"")
);
-GST_BOILERPLATE (GstRTPG729Pay, gst_rtp_g729_pay, GstBaseRTPPayload,
- GST_TYPE_BASE_RTP_PAYLOAD);
-
-static void
-gst_rtp_g729_pay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_g729_pay_sink_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_g729_pay_src_template));
- gst_element_class_set_details_simple (element_class, "RTP G.729 payloader",
- "Codec/Payloader/Network/RTP",
- "Packetize G.729 audio into RTP packets",
- "Olivier Crete <olivier.crete@collabora.co.uk>");
-
- GST_DEBUG_CATEGORY_INIT (rtpg729pay_debug, "rtpg729pay", 0,
- "G.729 RTP Payloader");
-}
+#define gst_rtp_g729_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRTPG729Pay, gst_rtp_g729_pay, GST_TYPE_BASE_RTP_PAYLOAD);
static void
gst_rtp_g729_pay_finalize (GObject * object)
GstElementClass *gstelement_class = (GstElementClass *) klass;
GstBaseRTPPayloadClass *payload_class = GST_BASE_RTP_PAYLOAD_CLASS (klass);
+ GST_DEBUG_CATEGORY_INIT (rtpg729pay_debug, "rtpg729pay", 0,
+ "G.729 RTP Payloader");
+
gobject_class->finalize = gst_rtp_g729_pay_finalize;
gstelement_class->change_state = gst_rtp_g729_pay_change_state;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_g729_pay_sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_g729_pay_src_template));
+
+ gst_element_class_set_details_simple (gstelement_class, "RTP G.729 payloader",
+ "Codec/Payloader/Network/RTP",
+ "Packetize G.729 audio into RTP packets",
+ "Olivier Crete <olivier.crete@collabora.co.uk>");
+
payload_class->set_caps = gst_rtp_g729_pay_set_caps;
payload_class->handle_buffer = gst_rtp_g729_pay_handle_buffer;
}
static void
-gst_rtp_g729_pay_init (GstRTPG729Pay * pay, GstRTPG729PayClass * klass)
+gst_rtp_g729_pay_init (GstRTPG729Pay * pay)
{
GstBaseRTPPayload *payload = GST_BASE_RTP_PAYLOAD (pay);
GstBuffer *outbuf;
guint8 *payload;
GstFlowReturn ret;
+ GstRTPBuffer rtp = { NULL };
basepayload = GST_BASE_RTP_PAYLOAD (rtpg729pay);
/* create buffer to hold the payload */
outbuf = gst_rtp_buffer_new_allocate (payload_len, 0, 0);
+ gst_rtp_buffer_map (outbuf, GST_MAP_READWRITE, &rtp);
+
/* copy payload */
- payload = gst_rtp_buffer_get_payload (outbuf);
+ payload = gst_rtp_buffer_get_payload (&rtp);
memcpy (payload, data, payload_len);
/* set metadata */
if (G_UNLIKELY (rtpg729pay->discont)) {
GST_DEBUG_OBJECT (basepayload, "discont, setting marker bit");
GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
- gst_rtp_buffer_set_marker (outbuf, TRUE);
+ gst_rtp_buffer_set_marker (&rtp, TRUE);
rtpg729pay->discont = FALSE;
}
+ gst_rtp_buffer_unmap (&rtp);
ret = gst_basertppayload_push (basepayload, outbuf);
guint minptime_octets = 0;
guint min_payload_len;
guint max_payload_len;
+ gsize size;
+ GstClockTime timestamp;
- available = GST_BUFFER_SIZE (buf);
+ size = gst_buffer_get_size (buf);
- if (available % G729_FRAME_SIZE != 0 &&
- available % G729_FRAME_SIZE != G729B_CN_FRAME_SIZE)
+ if (size % G729_FRAME_SIZE != 0 &&
+ size % G729_FRAME_SIZE != G729B_CN_FRAME_SIZE)
goto invalid_size;
/* max number of bytes based on given ptime, has to be multiple of
adapter = rtpg729pay->adapter;
available = gst_adapter_available (adapter);
+ timestamp = GST_BUFFER_TIMESTAMP (buf);
+
/* resync rtp time on discont or a discontinuous cn packet */
if (GST_BUFFER_IS_DISCONT (buf)) {
/* flush remainder */
available = 0;
}
rtpg729pay->discont = TRUE;
- gst_rtp_g729_pay_recalc_rtp_time (rtpg729pay, GST_BUFFER_TIMESTAMP (buf));
+ gst_rtp_g729_pay_recalc_rtp_time (rtpg729pay, timestamp);
}
- if (GST_BUFFER_SIZE (buf) < G729_FRAME_SIZE)
- gst_rtp_g729_pay_recalc_rtp_time (rtpg729pay, GST_BUFFER_TIMESTAMP (buf));
+ if (size < G729_FRAME_SIZE)
+ gst_rtp_g729_pay_recalc_rtp_time (rtpg729pay, timestamp);
if (G_UNLIKELY (!GST_CLOCK_TIME_IS_VALID (rtpg729pay->first_ts))) {
- rtpg729pay->first_ts = GST_BUFFER_TIMESTAMP (buf);
+ rtpg729pay->first_ts = timestamp;
rtpg729pay->first_rtp_time = rtpg729pay->next_rtp_time;
}
/* let's reset the base timestamp when the adapter is empty */
if (available == 0)
- rtpg729pay->next_ts = GST_BUFFER_TIMESTAMP (buf);
+ rtpg729pay->next_ts = timestamp;
- if (available == 0 &&
- GST_BUFFER_SIZE (buf) >= min_payload_len &&
- GST_BUFFER_SIZE (buf) <= max_payload_len) {
- ret = gst_rtp_g729_pay_push (rtpg729pay,
- GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (buf));
+ if (available == 0 && size >= min_payload_len && size <= max_payload_len) {
+ guint8 *data;
+
+ data = gst_buffer_map (buf, &size, NULL, GST_MAP_READ);
+ ret = gst_rtp_g729_pay_push (rtpg729pay, data, size);
+ gst_buffer_unmap (buf, data, size);
gst_buffer_unref (buf);
return ret;
}
("Invalid input buffer size"),
("Invalid buffer size, should be a multiple of"
" G729_FRAME_SIZE(10) with an optional G729B_CN_FRAME_SIZE(2)"
- " added to it, but it is %u", available));
+ " added to it, but it is %u", size));
gst_buffer_unref (buf);
return GST_FLOW_ERROR;
}
static gboolean gst_rtp_gsm_depay_setcaps (GstBaseRTPDepayload * _depayload,
GstCaps * caps);
-GST_BOILERPLATE (GstRTPGSMDepay, gst_rtp_gsm_depay, GstBaseRTPDepayload,
- GST_TYPE_BASE_RTP_DEPAYLOAD);
+#define gst_rtp_gsm_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRTPGSMDepay, gst_rtp_gsm_depay, GST_TYPE_BASE_RTP_DEPAYLOAD);
static void
-gst_rtp_gsm_depay_base_init (gpointer klass)
+gst_rtp_gsm_depay_class_init (GstRTPGSMDepayClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstElementClass *gstelement_class;
+ GstBaseRTPDepayloadClass *gstbasertp_depayload_class;
+
+ gstelement_class = (GstElementClass *) klass;
+ gstbasertp_depayload_class = (GstBaseRTPDepayloadClass *) klass;
- gst_element_class_add_pad_template (element_class,
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_gsm_depay_src_template));
- gst_element_class_add_pad_template (element_class,
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_gsm_depay_sink_template));
- gst_element_class_set_details_simple (element_class, "RTP GSM depayloader",
+
+ gst_element_class_set_details_simple (gstelement_class, "RTP GSM depayloader",
"Codec/Depayloader/Network/RTP",
"Extracts GSM audio from RTP packets", "Zeeshan Ali <zeenix@gmail.com>");
-}
-
-static void
-gst_rtp_gsm_depay_class_init (GstRTPGSMDepayClass * klass)
-{
- GstBaseRTPDepayloadClass *gstbasertp_depayload_class;
-
- gstbasertp_depayload_class = (GstBaseRTPDepayloadClass *) klass;
gstbasertp_depayload_class->process = gst_rtp_gsm_depay_process;
gstbasertp_depayload_class->set_caps = gst_rtp_gsm_depay_setcaps;
}
static void
-gst_rtp_gsm_depay_init (GstRTPGSMDepay * rtpgsmdepay,
- GstRTPGSMDepayClass * klass)
+gst_rtp_gsm_depay_init (GstRTPGSMDepay * rtpgsmdepay)
{
- /* needed because of GST_BOILERPLATE */
}
static gboolean
{
GstBuffer *outbuf = NULL;
gboolean marker;
+ GstRTPBuffer rtp = { NULL };
+
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
- marker = gst_rtp_buffer_get_marker (buf);
+ marker = gst_rtp_buffer_get_marker (&rtp);
GST_DEBUG ("process : got %d bytes, mark %d ts %u seqn %d",
- GST_BUFFER_SIZE (buf), marker,
- gst_rtp_buffer_get_timestamp (buf), gst_rtp_buffer_get_seq (buf));
+ gst_buffer_get_size (buf), marker,
+ gst_rtp_buffer_get_timestamp (&rtp), gst_rtp_buffer_get_seq (&rtp));
+
+ outbuf = gst_rtp_buffer_get_payload_buffer (&rtp);
- outbuf = gst_rtp_buffer_get_payload_buffer (buf);
+ gst_rtp_buffer_unmap (&rtp);
if (marker && outbuf) {
/* mark start of talkspurt with DISCONT */
static GstFlowReturn gst_rtp_gsm_pay_handle_buffer (GstBaseRTPPayload * payload,
GstBuffer * buffer);
-GST_BOILERPLATE (GstRTPGSMPay, gst_rtp_gsm_pay, GstBaseRTPPayload,
- GST_TYPE_BASE_RTP_PAYLOAD);
+#define gst_rtp_gsm_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRTPGSMPay, gst_rtp_gsm_pay, GST_TYPE_BASE_RTP_PAYLOAD);
static void
-gst_rtp_gsm_pay_base_init (gpointer klass)
+gst_rtp_gsm_pay_class_init (GstRTPGSMPayClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstElementClass *gstelement_class;
+ GstBaseRTPPayloadClass *gstbasertppayload_class;
+
+ GST_DEBUG_CATEGORY_INIT (rtpgsmpay_debug, "rtpgsmpay", 0,
+ "GSM Audio RTP Payloader");
- gst_element_class_add_pad_template (element_class,
+ gstelement_class = (GstElementClass *) klass;
+ gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
+
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_gsm_pay_sink_template));
- gst_element_class_add_pad_template (element_class,
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_gsm_pay_src_template));
- gst_element_class_set_details_simple (element_class, "RTP GSM payloader",
+
+ gst_element_class_set_details_simple (gstelement_class, "RTP GSM payloader",
"Codec/Payloader/Network/RTP",
"Payload-encodes GSM audio into a RTP packet",
"Zeeshan Ali <zeenix@gmail.com>");
-}
-
-static void
-gst_rtp_gsm_pay_class_init (GstRTPGSMPayClass * klass)
-{
- GstBaseRTPPayloadClass *gstbasertppayload_class;
-
- gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
gstbasertppayload_class->set_caps = gst_rtp_gsm_pay_setcaps;
gstbasertppayload_class->handle_buffer = gst_rtp_gsm_pay_handle_buffer;
-
- GST_DEBUG_CATEGORY_INIT (rtpgsmpay_debug, "rtpgsmpay", 0,
- "GSM Audio RTP Payloader");
}
static void
-gst_rtp_gsm_pay_init (GstRTPGSMPay * rtpgsmpay, GstRTPGSMPayClass * klass)
+gst_rtp_gsm_pay_init (GstRTPGSMPay * rtpgsmpay)
{
GST_BASE_RTP_PAYLOAD (rtpgsmpay)->clock_rate = 8000;
GST_BASE_RTP_PAYLOAD_PT (rtpgsmpay) = GST_RTP_PAYLOAD_GSM;
GstBuffer * buffer)
{
GstRTPGSMPay *rtpgsmpay;
- guint size, payload_len;
+ guint payload_len;
GstBuffer *outbuf;
guint8 *payload, *data;
GstClockTime timestamp, duration;
GstFlowReturn ret;
+ gsize size;
+ GstRTPBuffer rtp = { NULL };
rtpgsmpay = GST_RTP_GSM_PAY (basepayload);
- size = GST_BUFFER_SIZE (buffer);
+ data = gst_buffer_map (buffer, &size, NULL, GST_MAP_READ);
+
timestamp = GST_BUFFER_TIMESTAMP (buffer);
duration = GST_BUFFER_DURATION (buffer);
payload_len = size;
/* FIXME, just error out for now */
- if (payload_len > GST_BASE_RTP_PAYLOAD_MTU (rtpgsmpay)) {
- GST_ELEMENT_ERROR (rtpgsmpay, STREAM, ENCODE, (NULL),
- ("payload_len %u > mtu %u", payload_len,
- GST_BASE_RTP_PAYLOAD_MTU (rtpgsmpay)));
- return GST_FLOW_ERROR;
- }
+ if (payload_len > GST_BASE_RTP_PAYLOAD_MTU (rtpgsmpay))
+ goto too_big;
outbuf = gst_rtp_buffer_new_allocate (payload_len, 0, 0);
GST_BUFFER_DURATION (outbuf) = duration;
/* get payload */
- payload = gst_rtp_buffer_get_payload (outbuf);
-
- data = GST_BUFFER_DATA (buffer);
+ gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
/* copy data in payload */
- memcpy (&payload[0], data, size);
+ payload = gst_rtp_buffer_get_payload (&rtp);
+ memcpy (payload, data, size);
+
+ gst_rtp_buffer_unmap (&rtp);
+ gst_buffer_unmap (buffer, data, size);
gst_buffer_unref (buffer);
GST_DEBUG ("gst_rtp_gsm_pay_chain: pushing buffer of size %d",
- GST_BUFFER_SIZE (outbuf));
+ gst_buffer_get_size (outbuf));
ret = gst_basertppayload_push (basepayload, outbuf);
return ret;
+
+ /* ERRORS */
+too_big:
+ {
+ GST_ELEMENT_ERROR (rtpgsmpay, STREAM, ENCODE, (NULL),
+ ("payload_len %u > mtu %u", payload_len,
+ GST_BASE_RTP_PAYLOAD_MTU (rtpgsmpay)));
+ gst_buffer_unmap (buffer, data, size);
+ return GST_FLOW_ERROR;
+ }
}
gboolean
"clock-rate = (int) 90000, " "encoding-name = (string) \"X-GST\"")
);
-GST_BOILERPLATE (GstRtpGSTDepay, gst_rtp_gst_depay, GstBaseRTPDepayload,
- GST_TYPE_BASE_RTP_DEPAYLOAD);
+#define gst_rtp_gst_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpGSTDepay, gst_rtp_gst_depay, GST_TYPE_BASE_RTP_DEPAYLOAD);
static void gst_rtp_gst_depay_finalize (GObject * object);
GstBuffer * buf);
static void
-gst_rtp_gst_depay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_gst_depay_src_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_gst_depay_sink_template));
-
- gst_element_class_set_details_simple (element_class,
- "GStreamer depayloader", "Codec/Depayloader/Network",
- "Extracts GStreamer buffers from RTP packets",
- "Wim Taymans <wim.taymans@gmail.com>");
-}
-
-static void
gst_rtp_gst_depay_class_init (GstRtpGSTDepayClass * klass)
{
GObjectClass *gobject_class;
GstElementClass *gstelement_class;
GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
+ GST_DEBUG_CATEGORY_INIT (rtpgstdepay_debug, "rtpgstdepay", 0,
+ "Gstreamer RTP Depayloader");
+
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
gstelement_class->change_state = gst_rtp_gst_depay_change_state;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_gst_depay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_gst_depay_sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "GStreamer depayloader", "Codec/Depayloader/Network",
+ "Extracts GStreamer buffers from RTP packets",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
gstbasertpdepayload_class->set_caps = gst_rtp_gst_depay_setcaps;
gstbasertpdepayload_class->process = gst_rtp_gst_depay_process;
-
- GST_DEBUG_CATEGORY_INIT (rtpgstdepay_debug, "rtpgstdepay", 0,
- "Gstreamer RTP Depayloader");
}
static void
-gst_rtp_gst_depay_init (GstRtpGSTDepay * rtpgstdepay,
- GstRtpGSTDepayClass * klass)
+gst_rtp_gst_depay_init (GstRtpGSTDepay * rtpgstdepay)
{
rtpgstdepay->adapter = gst_adapter_new ();
}
gint payload_len;
guint8 *payload;
guint CV;
+ GstRTPBuffer rtp = { NULL };
rtpgstdepay = GST_RTP_GST_DEPAY (depayload);
- payload_len = gst_rtp_buffer_get_payload_len (buf);
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
+
+ payload_len = gst_rtp_buffer_get_payload_len (&rtp);
if (payload_len <= 8)
goto empty_packet;
gst_adapter_clear (rtpgstdepay->adapter);
}
- payload = gst_rtp_buffer_get_payload (buf);
+ payload = gst_rtp_buffer_get_payload (&rtp);
/* strip off header
*
*/
/* subbuffer skipping the 8 header bytes */
- subbuf = gst_rtp_buffer_get_payload_subbuffer (buf, 8, -1);
+ subbuf = gst_rtp_buffer_get_payload_subbuffer (&rtp, 8, -1);
gst_adapter_push (rtpgstdepay->adapter, subbuf);
- if (gst_rtp_buffer_get_marker (buf)) {
+ if (gst_rtp_buffer_get_marker (&rtp)) {
guint avail;
GstCaps *outcaps;
CV = (payload[0] >> 4) & 0x7;
if (payload[0] & 0x80) {
- guint b, csize, size, offset;
+ guint b, csize, left, offset;
+ gsize size;
guint8 *data;
GstBuffer *subbuf;
/* C bit, we have inline caps */
- data = GST_BUFFER_DATA (outbuf);
- size = GST_BUFFER_SIZE (outbuf);
+ data = gst_buffer_map (outbuf, &size, NULL, GST_MAP_READ);
/* start reading the length, we need this to skip to the data later */
csize = offset = 0;
+ left = size;
do {
- if (offset >= size)
+ if (offset >= left) {
+ gst_buffer_unmap (outbuf, data, size);
goto too_small;
+ }
b = data[offset++];
csize = (csize << 7) | (b & 0x7f);
} while (b & 0x80);
- if (size < csize)
+ if (left < csize) {
+ gst_buffer_unmap (outbuf, data, size);
goto too_small;
+ }
/* parse and store in cache */
outcaps = gst_caps_from_string ((gchar *) & data[offset]);
/* skip caps */
offset += csize;
- size -= csize;
+ left -= csize;
GST_DEBUG_OBJECT (rtpgstdepay,
"inline caps %u, length %u, %" GST_PTR_FORMAT, CV, csize, outcaps);
/* create real data buffer when needed */
if (size)
- subbuf = gst_buffer_create_sub (outbuf, offset, size);
+ subbuf =
+ gst_buffer_copy_region (outbuf, GST_BUFFER_COPY_ALL, offset, left);
else
subbuf = NULL;
+ gst_buffer_unmap (outbuf, data, size);
gst_buffer_unref (outbuf);
outbuf = subbuf;
}
{
GST_ELEMENT_WARNING (rtpgstdepay, STREAM, DECODE,
("Empty Payload."), (NULL));
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
too_small:
("Buffer too small."), (NULL));
if (outbuf)
gst_buffer_unref (outbuf);
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
missing_caps:
("Missing caps %u.", CV), (NULL));
if (outbuf)
gst_buffer_unref (outbuf);
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
}
static GstFlowReturn gst_rtp_gst_pay_handle_buffer (GstBaseRTPPayload * payload,
GstBuffer * buffer);
-GST_BOILERPLATE (GstRtpGSTPay, gst_rtp_gst_pay, GstBaseRTPPayload,
- GST_TYPE_BASE_RTP_PAYLOAD)
+#define gst_rtp_gst_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpGSTPay, gst_rtp_gst_pay, GST_TYPE_BASE_RTP_PAYLOAD);
- static void gst_rtp_gst_pay_base_init (gpointer klass)
+static void
+gst_rtp_gst_pay_class_init (GstRtpGSTPayClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstElementClass *gstelement_class;
+ GstBaseRTPPayloadClass *gstbasertppayload_class;
+
+ gstelement_class = (GstElementClass *) klass;
+ gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
- gst_element_class_add_pad_template (element_class,
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_gst_pay_src_template));
- gst_element_class_add_pad_template (element_class,
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_gst_pay_sink_template));
- gst_element_class_set_details_simple (element_class,
+ gst_element_class_set_details_simple (gstelement_class,
"RTP GStreamer payloader", "Codec/Payloader/Network/RTP",
"Payload GStreamer buffers as RTP packets",
"Wim Taymans <wim.taymans@gmail.com>");
-}
-
-static void
-gst_rtp_gst_pay_class_init (GstRtpGSTPayClass * klass)
-{
- GstBaseRTPPayloadClass *gstbasertppayload_class;
-
- gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
gstbasertppayload_class->set_caps = gst_rtp_gst_pay_setcaps;
gstbasertppayload_class->handle_buffer = gst_rtp_gst_pay_handle_buffer;
}
static void
-gst_rtp_gst_pay_init (GstRtpGSTPay * rtpgstpay, GstRtpGSTPayClass * klass)
+gst_rtp_gst_pay_init (GstRtpGSTPay * rtpgstpay)
{
}
GstBuffer * buffer)
{
GstRtpGSTPay *rtpgstpay;
- guint8 *data;
- guint size;
+ guint8 *data, *ptr;
+ gsize size, left;
GstBuffer *outbuf;
GstFlowReturn ret;
GstClockTime timestamp;
rtpgstpay = GST_RTP_GST_PAY (basepayload);
- size = GST_BUFFER_SIZE (buffer);
- data = GST_BUFFER_DATA (buffer);
+ data = gst_buffer_map (buffer, &size, NULL, GST_MAP_READ);
timestamp = GST_BUFFER_TIMESTAMP (buffer);
ret = GST_FLOW_OK;
* +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
*/
frag_offset = 0;
+ ptr = data;
+ left = size;
- while (size > 0) {
+ while (left > 0) {
guint towrite;
guint8 *payload;
guint payload_len;
guint packet_len;
+ GstRTPBuffer rtp = { NULL };
/* this will be the total lenght of the packet */
- packet_len = gst_rtp_buffer_calc_packet_len (8 + size, 0, 0);
+ packet_len = gst_rtp_buffer_calc_packet_len (8 + left, 0, 0);
/* fill one MTU or all available bytes */
towrite = MIN (packet_len, GST_BASE_RTP_PAYLOAD_MTU (rtpgstpay));
/* create buffer to hold the payload */
outbuf = gst_rtp_buffer_new_allocate (payload_len, 0, 0);
- payload = gst_rtp_buffer_get_payload (outbuf);
+
+ gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
+ payload = gst_rtp_buffer_get_payload (&rtp);
payload[0] = flags;
payload[1] = payload[2] = payload[3] = 0;
payload += 8;
payload_len -= 8;
- memcpy (payload, data, payload_len);
+ memcpy (payload, ptr, payload_len);
- data += payload_len;
- size -= payload_len;
+ ptr += payload_len;
+ left -= payload_len;
frag_offset += payload_len;
- if (size == 0)
- gst_rtp_buffer_set_marker (outbuf, TRUE);
+ if (left == 0)
+ gst_rtp_buffer_set_marker (&rtp, TRUE);
+
+ gst_rtp_buffer_unmap (&rtp);
GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
ret = gst_basertppayload_push (basepayload, outbuf);
}
+ gst_buffer_unmap (buffer, data, size);
gst_buffer_unref (buffer);
return ret;
"clock-rate = (int) 90000, " "encoding-name = (string) \"H263\"")
);
-GST_BOILERPLATE (GstRtpH263Depay, gst_rtp_h263_depay, GstBaseRTPDepayload,
+#define gst_rtp_h263_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpH263Depay, gst_rtp_h263_depay,
GST_TYPE_BASE_RTP_DEPAYLOAD);
static void gst_rtp_h263_depay_finalize (GObject * object);
GstCaps * caps);
static void
-gst_rtp_h263_depay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_h263_depay_src_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_h263_depay_sink_template));
-
- gst_element_class_set_details_simple (element_class, "RTP H263 depayloader",
- "Codec/Depayloader/Network/RTP",
- "Extracts H263 video from RTP packets (RFC 2190)",
- "Philippe Kalaf <philippe.kalaf@collabora.co.uk>, "
- "Edward Hervey <bilboed@bilboed.com>");
-}
-
-static void
gst_rtp_h263_depay_class_init (GstRtpH263DepayClass * klass)
{
GObjectClass *gobject_class;
GstElementClass *gstelement_class;
GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
+ GST_DEBUG_CATEGORY_INIT (rtph263depay_debug, "rtph263depay", 0,
+ "H263 Video RTP Depayloader");
+
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
- gstbasertpdepayload_class->process = gst_rtp_h263_depay_process;
- gstbasertpdepayload_class->set_caps = gst_rtp_h263_depay_setcaps;
-
gobject_class->finalize = gst_rtp_h263_depay_finalize;
gstelement_class->change_state = gst_rtp_h263_depay_change_state;
- GST_DEBUG_CATEGORY_INIT (rtph263depay_debug, "rtph263depay", 0,
- "H263 Video RTP Depayloader");
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_h263_depay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_h263_depay_sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP H263 depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts H263 video from RTP packets (RFC 2190)",
+ "Philippe Kalaf <philippe.kalaf@collabora.co.uk>, "
+ "Edward Hervey <bilboed@bilboed.com>");
+
+ gstbasertpdepayload_class->process = gst_rtp_h263_depay_process;
+ gstbasertpdepayload_class->set_caps = gst_rtp_h263_depay_setcaps;
}
static void
-gst_rtp_h263_depay_init (GstRtpH263Depay * rtph263depay,
- GstRtpH263DepayClass * klass)
+gst_rtp_h263_depay_init (GstRtpH263Depay * rtph263depay)
{
rtph263depay->adapter = gst_adapter_new ();
guint SBIT, EBIT;
gboolean F, P, M;
gboolean I;
+ GstRTPBuffer rtp = { NULL };
rtph263depay = GST_RTP_H263_DEPAY (depayload);
rtph263depay->start = FALSE;
}
- payload_len = gst_rtp_buffer_get_payload_len (buf);
- payload = gst_rtp_buffer_get_payload (buf);
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
+
+ payload_len = gst_rtp_buffer_get_payload_len (&rtp);
+ payload = gst_rtp_buffer_get_payload (&rtp);
- M = gst_rtp_buffer_get_marker (buf);
+ M = gst_rtp_buffer_get_marker (&rtp);
/* Let's see what mode we are using */
F = (payload[0] & 0x80) == 0x80;
GstBuffer *tmp;
/* Take the entire buffer */
- tmp = gst_rtp_buffer_get_payload_subbuffer (buf, header_len, payload_len);
+ tmp = gst_rtp_buffer_get_payload_subbuffer (&rtp, header_len, payload_len);
gst_adapter_push (rtph263depay->adapter, tmp);
} else {
GstBuffer *tmp;
/* Take the entire buffer except for the last byte */
- tmp = gst_rtp_buffer_get_payload_subbuffer (buf, header_len,
+ tmp = gst_rtp_buffer_get_payload_subbuffer (&rtp, header_len,
payload_len - 1);
gst_adapter_push (rtph263depay->adapter, tmp);
GstBuffer *buf = gst_buffer_new_and_alloc (1);
GST_DEBUG ("Pushing leftover in adapter");
- GST_BUFFER_DATA (buf)[0] = rtph263depay->leftover;
+ gst_buffer_fill (buf, 0, &rtph263depay->leftover, 1);
gst_adapter_push (rtph263depay->adapter, buf);
}
GST_DEBUG ("Pushing out a buffer of %d bytes", avail);
- timestamp = gst_rtp_buffer_get_timestamp (buf);
+ timestamp = gst_rtp_buffer_get_timestamp (&rtp);
gst_base_rtp_depayload_push_ts (depayload, timestamp, outbuf);
rtph263depay->offset = 0;
rtph263depay->leftover = 0;
rtph263depay->start = TRUE;
}
}
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
guint ind);
static void gst_rtp_h263_pay_package_destroy (GstRtpH263PayPackage * pack);
-GST_BOILERPLATE (GstRtpH263Pay, gst_rtp_h263_pay, GstBaseRTPPayload,
- GST_TYPE_BASE_RTP_PAYLOAD)
-
- static void gst_rtp_h263_pay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_h263_pay_src_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_h263_pay_sink_template));
-
- gst_element_class_set_details_simple (element_class,
- "RTP H263 packet payloader", "Codec/Payloader/Network/RTP",
- "Payload-encodes H263 video in RTP packets (RFC 2190)",
- "Neil Stratford <neils@vipadia.com>"
- "Dejan Sakelsak <dejan.sakelsak@marand.si>");
-}
+#define gst_rtp_h263_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpH263Pay, gst_rtp_h263_pay, GST_TYPE_BASE_RTP_PAYLOAD);
static void
gst_rtp_h263_pay_class_init (GstRtpH263PayClass * klass)
{
GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
GstBaseRTPPayloadClass *gstbasertppayload_class;
gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
gobject_class->finalize = gst_rtp_h263_pay_finalize;
"Disable packetization modes B and C", DEFAULT_MODE_A,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_h263_pay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_h263_pay_sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP H263 packet payloader", "Codec/Payloader/Network/RTP",
+ "Payload-encodes H263 video in RTP packets (RFC 2190)",
+ "Neil Stratford <neils@vipadia.com>"
+ "Dejan Sakelsak <dejan.sakelsak@marand.si>");
+
GST_DEBUG_CATEGORY_INIT (rtph263pay_debug, "rtph263pay", 0,
"H263 RTP Payloader");
}
static void
-gst_rtp_h263_pay_init (GstRtpH263Pay * rtph263pay, GstRtpH263PayClass * klass)
+gst_rtp_h263_pay_init (GstRtpH263Pay * rtph263pay)
{
rtph263pay->adapter = gst_adapter_new ();
} else {
if (n > rest_bits) {
context->window =
- (context->
- window << rest_bits) | (*context->win_end & (((guint) pow (2.0,
- (double) rest_bits)) - 1));
+ (context->window << rest_bits) | (*context->
+ win_end & (((guint) pow (2.0, (double) rest_bits)) - 1));
n -= rest_bits;
rest_bits = 0;
} else {
guint8 *header;
guint8 *payload;
GstFlowReturn ret;
+ GstRTPBuffer rtp = { NULL };
- header = gst_rtp_buffer_get_payload (package->outbuf);
+ gst_rtp_buffer_map (package->outbuf, GST_MAP_WRITE, &rtp);
+
+ header = gst_rtp_buffer_get_payload (&rtp);
payload = header + package->mode;
switch (package->mode) {
*/
GST_BUFFER_TIMESTAMP (package->outbuf) = rtph263pay->first_ts;
- gst_rtp_buffer_set_marker (package->outbuf, package->marker);
+ gst_rtp_buffer_set_marker (&rtp, package->marker);
if (package->marker)
GST_DEBUG ("Marker set!");
+ gst_rtp_buffer_unmap (&rtp);
+
ret =
gst_basertppayload_push (GST_BASE_RTP_PAYLOAD (rtph263pay),
package->outbuf);
/* Get a pointer to all the data for the frame */
rtph263pay->data =
- (guint8 *) gst_adapter_peek (rtph263pay->adapter,
+ (guint8 *) gst_adapter_map (rtph263pay->adapter,
rtph263pay->available_data);
/* Picture header */
gst_rtp_h263_pay_boundry_init (&bound, NULL, rtph263pay->data - 1, 0, 0);
context->gobs =
- (GstRtpH263PayGob **) g_malloc0 (format_props[context->
- piclayer->ptype_srcformat][0] * sizeof (GstRtpH263PayGob *));
+ (GstRtpH263PayGob **) g_malloc0 (format_props[context->piclayer->
+ ptype_srcformat][0] * sizeof (GstRtpH263PayGob *));
for (i = 0; i < format_props[context->piclayer->ptype_srcformat][0]; i++) {
end:
gst_rtp_h263_pay_context_destroy (context,
context->piclayer->ptype_srcformat);
- gst_adapter_flush (rtph263pay->adapter, rtph263pay->available_data);
+ gst_adapter_unmap (rtph263pay->adapter, rtph263pay->available_data);
return ret;
}
/* GStreamer
- * Copyright (C) <2005> Wim Taymans <wim@fluendo.com>
+ * Copyright (C) <2005> Wim Taymans <wim.taymans@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
)
);
-GST_BOILERPLATE (GstRtpH263PDepay, gst_rtp_h263p_depay, GstBaseRTPDepayload,
+#define gst_rtp_h263p_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpH263PDepay, gst_rtp_h263p_depay,
GST_TYPE_BASE_RTP_DEPAYLOAD);
static void gst_rtp_h263p_depay_finalize (GObject * object);
GstCaps * caps);
static void
-gst_rtp_h263p_depay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_h263p_depay_src_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_h263p_depay_sink_template));
-
-
- gst_element_class_set_details_simple (element_class, "RTP H263 depayloader",
- "Codec/Depayloader/Network/RTP",
- "Extracts H263/+/++ video from RTP packets (RFC 4629)",
- "Wim Taymans <wim.taymans@gmail.com>");
-}
-
-static void
gst_rtp_h263p_depay_class_init (GstRtpH263PDepayClass * klass)
{
GObjectClass *gobject_class;
gstelement_class = (GstElementClass *) klass;
gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
- gstbasertpdepayload_class->process = gst_rtp_h263p_depay_process;
- gstbasertpdepayload_class->set_caps = gst_rtp_h263p_depay_setcaps;
-
gobject_class->finalize = gst_rtp_h263p_depay_finalize;
gstelement_class->change_state = gst_rtp_h263p_depay_change_state;
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_h263p_depay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_h263p_depay_sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP H263 depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts H263/+/++ video from RTP packets (RFC 4629)",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
+ gstbasertpdepayload_class->process = gst_rtp_h263p_depay_process;
+ gstbasertpdepayload_class->set_caps = gst_rtp_h263p_depay_setcaps;
}
static void
-gst_rtp_h263p_depay_init (GstRtpH263PDepay * rtph263pdepay,
- GstRtpH263PDepayClass * klass)
+gst_rtp_h263p_depay_init (GstRtpH263PDepay * rtph263pdepay)
{
rtph263pdepay->adapter = gst_adapter_new ();
}
{
GstRtpH263PDepay *rtph263pdepay;
GstBuffer *outbuf;
+ gint payload_len;
+ guint8 *payload;
+ gboolean P, V, M;
+ guint header_len;
+ guint8 PLEN, PEBIT;
+ GstRTPBuffer rtp = { NULL };
rtph263pdepay = GST_RTP_H263P_DEPAY (depayload);
rtph263pdepay->wait_start = TRUE;
}
- {
- gint payload_len;
- guint8 *payload;
- gboolean P, V, M;
- guint header_len;
- guint8 PLEN, PEBIT;
-
- payload_len = gst_rtp_buffer_get_payload_len (buf);
- payload = gst_rtp_buffer_get_payload (buf);
-
- header_len = 2;
-
- if (payload_len < header_len)
- goto too_small;
-
- M = gst_rtp_buffer_get_marker (buf);
-
- /* 0 1
- * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5
- * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
- * | RR |P|V| PLEN |PEBIT|
- * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
- */
- P = (payload[0] & 0x04) == 0x04;
- V = (payload[0] & 0x02) == 0x02;
- PLEN = ((payload[0] & 0x1) << 5) | (payload[1] >> 3);
- PEBIT = payload[1] & 0x7;
-
- GST_LOG_OBJECT (depayload, "P %d, V %d, PLEN %d, PEBIT %d", P, V, PLEN,
- PEBIT);
-
- if (V) {
- header_len++;
- }
- if (PLEN) {
- header_len += PLEN;
- }
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
- if ((!P && payload_len < header_len) || (P && payload_len < header_len - 2))
- goto too_small;
+ payload_len = gst_rtp_buffer_get_payload_len (&rtp);
+ header_len = 2;
- if (P) {
- /* FIXME, have to make the packet writable hear. Better to reset these
- * bytes when we copy the packet below */
- rtph263pdepay->wait_start = FALSE;
- header_len -= 2;
- payload[header_len] = 0;
- payload[header_len + 1] = 0;
- }
+ if (payload_len < header_len)
+ goto too_small;
- if (rtph263pdepay->wait_start)
- goto waiting_start;
+ payload = gst_rtp_buffer_get_payload (&rtp);
- if (payload_len < header_len)
- goto too_small;
+ M = gst_rtp_buffer_get_marker (&rtp);
- /* FIXME do not ignore the VRC header (See RFC 2429 section 4.2) */
- /* FIXME actually use the RTP picture header when it is lost in the network */
- /* for now strip off header */
- payload += header_len;
- payload_len -= header_len;
+ /* 0 1
+ * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | RR |P|V| PLEN |PEBIT|
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ */
+ P = (payload[0] & 0x04) == 0x04;
+ V = (payload[0] & 0x02) == 0x02;
+ PLEN = ((payload[0] & 0x1) << 5) | (payload[1] >> 3);
+ PEBIT = payload[1] & 0x7;
- if (M) {
- /* frame is completed: append to previous, push it out */
- guint len, padlen;
- guint avail;
+ GST_LOG_OBJECT (depayload, "P %d, V %d, PLEN %d, PEBIT %d", P, V, PLEN,
+ PEBIT);
- GST_LOG_OBJECT (depayload, "Frame complete");
+ if (V) {
+ header_len++;
+ }
+ if (PLEN) {
+ header_len += PLEN;
+ }
- avail = gst_adapter_available (rtph263pdepay->adapter);
+ if ((!P && payload_len < header_len) || (P && payload_len < header_len - 2))
+ goto too_small;
- len = avail + payload_len;
- padlen = (len % 4) + 4;
- outbuf = gst_buffer_new_and_alloc (len + padlen);
- memset (GST_BUFFER_DATA (outbuf) + len, 0, padlen);
- GST_BUFFER_SIZE (outbuf) = len;
+ if (P) {
+ /* FIXME, have to make the packet writable hear. Better to reset these
+ * bytes when we copy the packet below */
+ rtph263pdepay->wait_start = FALSE;
+ header_len -= 2;
+ payload[header_len] = 0;
+ payload[header_len + 1] = 0;
+ }
- /* prepend previous data */
- if (avail > 0) {
- gst_adapter_copy (rtph263pdepay->adapter, GST_BUFFER_DATA (outbuf), 0,
- avail);
- gst_adapter_flush (rtph263pdepay->adapter, avail);
- }
- memcpy (GST_BUFFER_DATA (outbuf) + avail, payload, payload_len);
+ if (rtph263pdepay->wait_start)
+ goto waiting_start;
- return outbuf;
+ if (payload_len < header_len)
+ goto too_small;
- } else {
- /* frame not completed: store in adapter */
- outbuf = gst_buffer_new_and_alloc (payload_len);
+ /* FIXME do not ignore the VRC header (See RFC 2429 section 4.2) */
+ /* FIXME actually use the RTP picture header when it is lost in the network */
+ /* for now strip off header */
+ payload += header_len;
+ payload_len -= header_len;
+
+ if (M) {
+ /* frame is completed: append to previous, push it out */
+ guint len, padlen;
+ guint avail;
+ guint8 *data;
+
+ GST_LOG_OBJECT (depayload, "Frame complete");
+
+ avail = gst_adapter_available (rtph263pdepay->adapter);
+ len = avail + payload_len;
+ padlen = (len % 4) + 4;
- GST_LOG_OBJECT (depayload, "Frame incomplete, storing %d", payload_len);
+ outbuf = gst_buffer_new_and_alloc (len + padlen);
- memcpy (GST_BUFFER_DATA (outbuf), payload, payload_len);
+ data = gst_buffer_map (outbuf, NULL, NULL, GST_MAP_WRITE);
+ memset (data + len, 0, padlen);
- gst_adapter_push (rtph263pdepay->adapter, outbuf);
+ /* prepend previous data */
+ if (avail > 0) {
+ gst_adapter_copy (rtph263pdepay->adapter, data, 0, avail);
+ gst_adapter_flush (rtph263pdepay->adapter, avail);
}
+ memcpy (data + avail, payload, payload_len);
+ gst_buffer_unmap (outbuf, data, len);
+ gst_rtp_buffer_unmap (&rtp);
+
+ return outbuf;
+
+ } else {
+ /* frame not completed: store in adapter */
+ outbuf = gst_buffer_new_and_alloc (payload_len);
+
+ GST_LOG_OBJECT (depayload, "Frame incomplete, storing %d", payload_len);
+ gst_buffer_fill (outbuf, 0, payload, payload_len);
+
+ gst_adapter_push (rtph263pdepay->adapter, outbuf);
+ gst_rtp_buffer_unmap (&rtp);
}
return NULL;
{
GST_ELEMENT_WARNING (rtph263pdepay, STREAM, DECODE,
("Packet payload was too small"), (NULL));
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
waiting_start:
{
GST_DEBUG_OBJECT (rtph263pdepay, "waiting for picture start");
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
}
static GstFlowReturn gst_rtp_h263p_pay_handle_buffer (GstBaseRTPPayload *
payload, GstBuffer * buffer);
-GST_BOILERPLATE (GstRtpH263PPay, gst_rtp_h263p_pay, GstBaseRTPPayload,
- GST_TYPE_BASE_RTP_PAYLOAD);
-
-static void
-gst_rtp_h263p_pay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_h263p_pay_src_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_h263p_pay_sink_template));
-
- gst_element_class_set_details_simple (element_class, "RTP H263 payloader",
- "Codec/Payloader/Network/RTP",
- "Payload-encodes H263/+/++ video in RTP packets (RFC 4629)",
- "Wim Taymans <wim.taymans@gmail.com>");
-}
+#define gst_rtp_h263p_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpH263PPay, gst_rtp_h263p_pay, GST_TYPE_BASE_RTP_PAYLOAD);
static void
gst_rtp_h263p_pay_class_init (GstRtpH263PPayClass * klass)
{
GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
GstBaseRTPPayloadClass *gstbasertppayload_class;
gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
gobject_class->finalize = gst_rtp_h263p_pay_finalize;
DEFAULT_FRAGMENTATION_MODE,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_h263p_pay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_h263p_pay_sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class, "RTP H263 payloader",
+ "Codec/Payloader/Network/RTP",
+ "Payload-encodes H263/+/++ video in RTP packets (RFC 4629)",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
GST_DEBUG_CATEGORY_INIT (rtph263ppay_debug, "rtph263ppay",
0, "rtph263ppay (RFC 4629)");
}
static void
-gst_rtp_h263p_pay_init (GstRtpH263PPay * rtph263ppay,
- GstRtpH263PPayClass * klass)
+gst_rtp_h263p_pay_init (GstRtpH263PPay * rtph263ppay)
{
rtph263ppay->adapter = gst_adapter_new ();
gint header_len;
guint next_gop = 0;
gboolean found_gob = FALSE;
+ GstRTPBuffer rtp = { NULL };
if (rtph263ppay->fragmentation_mode == GST_FRAGMENTATION_MODE_SYNC) {
/* start after 1st gop possible */
guint parsed_len = 3;
const guint8 *parse_data = NULL;
- parse_data = gst_adapter_peek (rtph263ppay->adapter, avail);
+ parse_data = gst_adapter_map (rtph263ppay->adapter, avail);
/* Check if we have a gob or eos , eossbs */
/* FIXME EOS and EOSSBS packets should never contain any gobs and vice-versa */
}
parsed_len++;
}
+ gst_adapter_unmap (rtph263ppay->adapter, 0);
}
/* for picture start frames (non-fragmented), we need to remove the first
payload_len = header_len + towrite;
outbuf = gst_rtp_buffer_new_allocate (payload_len, 0, 0);
+
+ gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
/* last fragment gets the marker bit set */
- gst_rtp_buffer_set_marker (outbuf, avail > towrite ? 0 : 1);
+ gst_rtp_buffer_set_marker (&rtp, avail > towrite ? 0 : 1);
- payload = gst_rtp_buffer_get_payload (outbuf);
+ payload = gst_rtp_buffer_get_payload (&rtp);
gst_adapter_copy (rtph263ppay->adapter, &payload[header_len], 0, towrite);
GST_BUFFER_TIMESTAMP (outbuf) = rtph263ppay->first_timestamp;
GST_BUFFER_DURATION (outbuf) = rtph263ppay->first_duration;
+ gst_rtp_buffer_unmap (&rtp);
gst_adapter_flush (rtph263ppay->adapter, towrite);
/* "max-rcmd-nalu-size = (string) ANY " */
);
-GST_BOILERPLATE (GstRtpH264Depay, gst_rtp_h264_depay, GstBaseRTPDepayload,
+#define gst_rtp_h264_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpH264Depay, gst_rtp_h264_depay,
GST_TYPE_BASE_RTP_DEPAYLOAD);
static void gst_rtp_h264_depay_finalize (GObject * object);
GstCaps * caps);
static void
-gst_rtp_h264_depay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_h264_depay_src_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_h264_depay_sink_template));
-
- gst_element_class_set_details_simple (element_class, "RTP H264 depayloader",
- "Codec/Depayloader/Network/RTP",
- "Extracts H264 video from RTP packets (RFC 3984)",
- "Wim Taymans <wim.taymans@gmail.com>");
-}
-
-static void
gst_rtp_h264_depay_class_init (GstRtpH264DepayClass * klass)
{
GObjectClass *gobject_class;
"Merge NALU into AU (picture) (deprecated; use caps)",
DEFAULT_ACCESS_UNIT, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_h264_depay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_h264_depay_sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP H264 depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts H264 video from RTP packets (RFC 3984)",
+ "Wim Taymans <wim.taymans@gmail.com>");
gstelement_class->change_state = gst_rtp_h264_depay_change_state;
gstbasertpdepayload_class->process = gst_rtp_h264_depay_process;
}
static void
-gst_rtp_h264_depay_init (GstRtpH264Depay * rtph264depay,
- GstRtpH264DepayClass * klass)
+gst_rtp_h264_depay_init (GstRtpH264Depay * rtph264depay)
{
rtph264depay->adapter = gst_adapter_new ();
rtph264depay->picture_adapter = gst_adapter_new ();
GstRtpH264Depay *rtph264depay;
const gchar *ps, *profile;
GstBuffer *codec_data;
- guint8 *b64;
+ guint8 *ptr, *data;
gboolean res;
rtph264depay = GST_RTP_H264_DEPAY (depayload);
}
/* we seriously overshoot the length, but it's fine. */
codec_data = gst_buffer_new_and_alloc (len);
- b64 = GST_BUFFER_DATA (codec_data);
+
+ data = gst_buffer_map (codec_data, NULL, NULL, GST_MAP_WRITE);
+ ptr = data;
total = 0;
for (i = 0; params[i]; i++) {
guint save = 0;
gint state = 0;
GST_DEBUG_OBJECT (depayload, "decoding param %d (%s)", i, params[i]);
- memcpy (b64, sync_bytes, sizeof (sync_bytes));
- b64 += sizeof (sync_bytes);
+ memcpy (ptr, sync_bytes, sizeof (sync_bytes));
+ ptr += sizeof (sync_bytes);
len =
- g_base64_decode_step (params[i], strlen (params[i]), b64, &state,
+ g_base64_decode_step (params[i], strlen (params[i]), ptr, &state,
&save);
GST_DEBUG_OBJECT (depayload, "decoded %d bytes", len);
total += len + sizeof (sync_bytes);
- b64 += len;
+ ptr += len;
}
- GST_BUFFER_SIZE (codec_data) = total;
+ gst_buffer_unmap (codec_data, data, total);
g_strfreev (params);
/* keep the codec_data, we need to send it as the first buffer. We cannot
guint8 **sps, **pps;
guint len, num_sps, num_pps;
gint i;
- guint8 *data;
if (ps == NULL)
goto incomplete_caps;
}
codec_data = gst_buffer_new_and_alloc (len);
- data = GST_BUFFER_DATA (codec_data);
+
+ data = ptr = gst_buffer_map (codec_data, NULL, NULL, GST_MAP_WRITE);
/* 8 bits version == 1 */
- *data++ = 1;
+ *ptr++ = 1;
if (profile) {
guint32 profile_id;
/* hex: AVCProfileIndication:8 | profile_compat:8 | AVCLevelIndication:8 */
sscanf (profile, "%6x", &profile_id);
- *data++ = (profile_id >> 16) & 0xff;
- *data++ = (profile_id >> 8) & 0xff;
- *data++ = profile_id & 0xff;
+ *ptr++ = (profile_id >> 16) & 0xff;
+ *ptr++ = (profile_id >> 8) & 0xff;
+ *ptr++ = profile_id & 0xff;
} else {
/* extract from SPS */
- *data++ = sps[0][3];
- *data++ = sps[0][4];
- *data++ = sps[0][5];
+ *ptr++ = sps[0][3];
+ *ptr++ = sps[0][4];
+ *ptr++ = sps[0][5];
}
/* 6 bits reserved | 2 bits lengthSizeMinusOn */
- *data++ = 0xff;
+ *ptr++ = 0xff;
/* 3 bits reserved | 5 bits numOfSequenceParameterSets */
- *data++ = 0xe0 | (num_sps & 0x1f);
+ *ptr++ = 0xe0 | (num_sps & 0x1f);
/* copy all SPS */
for (i = 0; sps[i]; i++) {
len = ((sps[i][0] << 8) | sps[i][1]) + 2;
GST_DEBUG_OBJECT (depayload, "copy SPS %d of length %d", i, len);
- memcpy (data, sps[i], len);
+ memcpy (ptr, sps[i], len);
g_free (sps[i]);
- data += len;
+ ptr += len;
}
g_free (sps);
/* 8 bits numOfPictureParameterSets */
- *data++ = num_pps;
+ *ptr++ = num_pps;
/* copy all PPS */
for (i = 0; pps[i]; i++) {
len = ((pps[i][0] << 8) | pps[i][1]) + 2;
GST_DEBUG_OBJECT (depayload, "copy PPS %d of length %d", i, len);
- memcpy (data, pps[i], len);
+ memcpy (ptr, pps[i], len);
g_free (pps[i]);
- data += len;
+ ptr += len;
}
g_free (pps);
- GST_BUFFER_SIZE (codec_data) = data - GST_BUFFER_DATA (codec_data);
+ gst_buffer_unmap (codec_data, data, ptr - data);
gst_caps_set_simple (srccaps,
"codec_data", GST_TYPE_BUFFER, codec_data, NULL);
{
GstBaseRTPDepayload *depayload = GST_BASE_RTP_DEPAYLOAD (rtph264depay);
gint nal_type;
- guint size;
+ gsize size;
guint8 *data;
GstBuffer *outbuf = NULL;
GstClockTime out_timestamp;
gboolean keyframe, out_keyframe;
- size = GST_BUFFER_SIZE (nal);
+ data = gst_buffer_map (nal, &size, NULL, GST_MAP_READ);
if (G_UNLIKELY (size < 5))
goto short_nal;
- data = GST_BUFFER_DATA (nal);
-
nal_type = data[4] & 0x1f;
GST_DEBUG_OBJECT (rtph264depay, "handle NAL type %d", nal_type);
GST_DEBUG_OBJECT (depayload, "using NAL as output");
outbuf = nal;
}
+ gst_buffer_unmap (nal, data, size);
if (outbuf) {
/* prepend codec_data */
rtph264depay->codec_data = NULL;
out_keyframe = TRUE;
}
- outbuf = gst_buffer_make_metadata_writable (outbuf);
+ outbuf = gst_buffer_make_writable (outbuf);
GST_BUFFER_TIMESTAMP (outbuf) = out_timestamp;
else
GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT);
- gst_buffer_set_caps (outbuf, GST_PAD_CAPS (depayload->srcpad));
-
gst_base_rtp_depayload_push (depayload, outbuf);
}
short_nal:
{
GST_WARNING_OBJECT (depayload, "dropping short NAL");
+ gst_buffer_unmap (nal, data, size);
gst_buffer_unref (nal);
return FALSE;
}
GstRtpH264Depay *rtph264depay;
GstBuffer *outbuf;
guint8 nal_unit_type;
+ GstRTPBuffer rtp = { NULL };
rtph264depay = GST_RTP_H264_DEPAY (depayload);
timestamp = GST_BUFFER_TIMESTAMP (buf);
- payload_len = gst_rtp_buffer_get_payload_len (buf);
- payload = gst_rtp_buffer_get_payload (buf);
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
+
+ payload_len = gst_rtp_buffer_get_payload_len (&rtp);
+ payload = gst_rtp_buffer_get_payload (&rtp);
GST_DEBUG_OBJECT (rtph264depay, "receiving %d bytes", payload_len);
outsize = nalu_size + sizeof (sync_bytes);
outbuf = gst_buffer_new_and_alloc (outsize);
- outdata = GST_BUFFER_DATA (outbuf);
+
+ outdata = gst_buffer_map (outbuf, NULL, NULL, GST_MAP_WRITE);
if (rtph264depay->byte_stream) {
memcpy (outdata, sync_bytes, sizeof (sync_bytes));
} else {
payload += 2;
payload_len -= 2;
- outdata += sizeof (sync_bytes);
- memcpy (outdata, payload, nalu_size);
+ memcpy (outdata + sizeof (sync_bytes), payload, nalu_size);
+ gst_buffer_unmap (outbuf, outdata, outsize);
gst_adapter_push (rtph264depay->adapter, outbuf);
nalu_size = payload_len;
outsize = nalu_size + sizeof (sync_bytes);
outbuf = gst_buffer_new_and_alloc (outsize);
- outdata = GST_BUFFER_DATA (outbuf);
- outdata += sizeof (sync_bytes);
- memcpy (outdata, payload, nalu_size);
- outdata[0] = nal_header;
+
+ outdata = gst_buffer_map (outbuf, NULL, NULL, GST_MAP_WRITE);
+ memcpy (outdata + sizeof (sync_bytes), payload, nalu_size);
+ outdata[sizeof (sync_bytes)] = nal_header;
+ gst_buffer_unmap (outbuf, outdata, outsize);
GST_DEBUG_OBJECT (rtph264depay, "queueing %d bytes", outsize);
outsize = payload_len;
outbuf = gst_buffer_new_and_alloc (outsize);
- outdata = GST_BUFFER_DATA (outbuf);
+
+ outdata = gst_buffer_map (outbuf, NULL, NULL, GST_MAP_WRITE);
memcpy (outdata, payload, outsize);
+ gst_buffer_unmap (outbuf, outdata, outsize);
GST_DEBUG_OBJECT (rtph264depay, "queueing %d bytes", outsize);
outsize = gst_adapter_available (rtph264depay->adapter);
outbuf = gst_adapter_take_buffer (rtph264depay->adapter, outsize);
- outdata = GST_BUFFER_DATA (outbuf);
+ outdata = gst_buffer_map (outbuf, NULL, NULL, GST_MAP_WRITE);
if (rtph264depay->byte_stream) {
memcpy (outdata, sync_bytes, sizeof (sync_bytes));
} else {
outdata[1] = (outsize >> 16);
outdata[2] = (outsize >> 8);
outdata[3] = (outsize);
+ outsize += 4;
}
+ gst_buffer_unmap (outbuf, outdata, outsize);
+
gst_rtp_h264_depay_handle_nal (rtph264depay, outbuf, timestamp);
}
break;
nalu_size = payload_len;
outsize = nalu_size + sizeof (sync_bytes);
outbuf = gst_buffer_new_and_alloc (outsize);
- outdata = GST_BUFFER_DATA (outbuf);
+
+ outdata = gst_buffer_map (outbuf, NULL, NULL, GST_MAP_WRITE);
if (rtph264depay->byte_stream) {
memcpy (outdata, sync_bytes, sizeof (sync_bytes));
} else {
outdata[2] = nalu_size >> 8;
outdata[3] = nalu_size & 0xff;
}
- outdata += sizeof (sync_bytes);
- memcpy (outdata, payload, nalu_size);
+ memcpy (outdata + sizeof (sync_bytes), payload, nalu_size);
+ gst_buffer_unmap (outbuf, outdata, outsize);
gst_rtp_h264_depay_handle_nal (rtph264depay, outbuf, timestamp);
break;
}
}
+ gst_rtp_buffer_unmap (&rtp);
}
return NULL;
{
GST_ELEMENT_WARNING (rtph264depay, STREAM, DECODE,
(NULL), ("Undefined packet type"));
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
waiting_start:
{
GST_DEBUG_OBJECT (rtph264depay, "waiting for start");
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
not_implemented:
{
GST_ELEMENT_ERROR (rtph264depay, STREAM, FORMAT,
(NULL), ("NAL unit type %d not supported yet", nal_unit_type));
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
}
GstCaps * caps);
static GstFlowReturn gst_rtp_h264_pay_handle_buffer (GstBaseRTPPayload * pad,
GstBuffer * buffer);
-static gboolean gst_rtp_h264_pay_handle_event (GstPad * pad, GstEvent * event);
+static gboolean gst_rtp_h264_pay_handle_event (GstBaseRTPPayload * payload,
+ GstEvent * event);
static GstStateChangeReturn gst_basertppayload_change_state (GstElement *
element, GstStateChange transition);
-GST_BOILERPLATE (GstRtpH264Pay, gst_rtp_h264_pay, GstBaseRTPPayload,
- GST_TYPE_BASE_RTP_PAYLOAD);
-
-static void
-gst_rtp_h264_pay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_h264_pay_src_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_h264_pay_sink_template));
-
- gst_element_class_set_details_simple (element_class, "RTP H264 payloader",
- "Codec/Payloader/Network/RTP",
- "Payload-encode H264 video into RTP packets (RFC 3984)",
- "Laurent Glayal <spglegle@yahoo.fr>");
-}
+#define gst_rtp_h264_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpH264Pay, gst_rtp_h264_pay, GST_TYPE_BASE_RTP_PAYLOAD);
static void
gst_rtp_h264_pay_class_init (GstRtpH264PayClass * klass)
gobject_class->finalize = gst_rtp_h264_pay_finalize;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_h264_pay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_h264_pay_sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class, "RTP H264 payloader",
+ "Codec/Payloader/Network/RTP",
+ "Payload-encode H264 video into RTP packets (RFC 3984)",
+ "Laurent Glayal <spglegle@yahoo.fr>");
+
gstelement_class->change_state =
GST_DEBUG_FUNCPTR (gst_basertppayload_change_state);
}
static void
-gst_rtp_h264_pay_init (GstRtpH264Pay * rtph264pay, GstRtpH264PayClass * klass)
+gst_rtp_h264_pay_init (GstRtpH264Pay * rtph264pay)
{
rtph264pay->queue = g_array_new (FALSE, FALSE, sizeof (guint));
rtph264pay->profile = 0;
GString *sprops;
guint count;
gboolean res;
+ guint8 *data;
+ gsize size;
sprops = g_string_new ("");
count = 0;
for (walk = payloader->sps; walk; walk = g_list_next (walk)) {
GstBuffer *sps_buf = GST_BUFFER_CAST (walk->data);
- set =
- g_base64_encode (GST_BUFFER_DATA (sps_buf), GST_BUFFER_SIZE (sps_buf));
+ data = gst_buffer_map (sps_buf, &size, NULL, GST_MAP_READ);
+ set = g_base64_encode (data, size);
+ gst_buffer_unmap (sps_buf, data, size);
+
g_string_append_printf (sprops, "%s%s", count ? "," : "", set);
g_free (set);
count++;
for (walk = payloader->pps; walk; walk = g_list_next (walk)) {
GstBuffer *pps_buf = GST_BUFFER_CAST (walk->data);
- set =
- g_base64_encode (GST_BUFFER_DATA (pps_buf), GST_BUFFER_SIZE (pps_buf));
+ data = gst_buffer_map (pps_buf, &size, NULL, GST_MAP_READ);
+ set = g_base64_encode (data, size);
+ gst_buffer_unmap (pps_buf, data, size);
+
g_string_append_printf (sprops, "%s%s", count ? "," : "", set);
g_free (set);
count++;
GstRtpH264Pay *rtph264pay;
GstStructure *str;
const GValue *value;
- guint8 *data;
- guint size;
+ guint8 *data, *bdata;
+ gsize size, bsize;
+ GstBuffer *buffer;
rtph264pay = GST_RTP_H264_PAY (basepayload);
/* packetized AVC video has a codec_data */
if ((value = gst_structure_get_value (str, "codec_data"))) {
- GstBuffer *buffer;
guint num_sps, num_pps;
gint i, nal_size;
rtph264pay->packetized = TRUE;
buffer = gst_value_get_buffer (value);
- data = GST_BUFFER_DATA (buffer);
- size = GST_BUFFER_SIZE (buffer);
+
+ bdata = gst_buffer_map (buffer, &bsize, NULL, GST_MAP_READ);
+ data = bdata;
+ size = bsize;
/* parse the avcC data */
if (size < 7)
/* make a buffer out of it and add to SPS list */
sps_buf = gst_buffer_new_and_alloc (nal_size);
- memcpy (GST_BUFFER_DATA (sps_buf), data, nal_size);
+ gst_buffer_fill (sps_buf, 0, data, nal_size);
rtph264pay->sps = g_list_append (rtph264pay->sps, sps_buf);
data += nal_size;
/* make a buffer out of it and add to PPS list */
pps_buf = gst_buffer_new_and_alloc (nal_size);
- memcpy (GST_BUFFER_DATA (pps_buf), data, nal_size);
+ gst_buffer_fill (pps_buf, 0, data, nal_size);
rtph264pay->pps = g_list_append (rtph264pay->pps, pps_buf);
data += nal_size;
}
/* and update the caps with the collected data */
if (!gst_rtp_h264_pay_set_sps_pps (basepayload))
- return FALSE;
+ goto set_sps_pps_failed;
} else {
GST_DEBUG_OBJECT (rtph264pay, "have bytestream h264");
rtph264pay->packetized = FALSE;
avcc_too_small:
{
GST_ERROR_OBJECT (rtph264pay, "avcC size %u < 7", size);
- return FALSE;
+ goto error;
}
wrong_version:
{
GST_ERROR_OBJECT (rtph264pay, "wrong avcC version");
- return FALSE;
+ goto error;
}
avcc_error:
{
GST_ERROR_OBJECT (rtph264pay, "avcC too small ");
+ goto error;
+ }
+set_sps_pps_failed:
+ {
+ GST_ERROR_OBJECT (rtph264pay, "failed to set sps/pps");
+ goto error;
+ }
+error:
+ {
+ gst_buffer_unmap (buffer, bdata, bsize);
return FALSE;
}
}
guint8 *nalp;
guint save = 0;
gint state = 0;
+ guint8 nal_type;
nal_len = strlen (params[i]);
buf = gst_buffer_new_and_alloc (nal_len);
- nalp = GST_BUFFER_DATA (buf);
+ nalp = gst_buffer_map (buf, NULL, NULL, GST_MAP_WRITE);
nal_len = g_base64_decode_step (params[i], nal_len, nalp, &state, &save);
- GST_BUFFER_SIZE (buf) = nal_len;
+ nal_type = nalp[0];
+ gst_buffer_unmap (buf, nalp, nal_len);
if (!nal_len) {
gst_buffer_unref (buf);
}
/* append to the right list */
- if ((nalp[0] & 0x1f) == 7) {
+ if ((nal_type & 0x1f) == 7) {
GST_DEBUG_OBJECT (rtph264pay, "adding param %d as SPS %d", i, num_sps);
rtph264pay->sps = g_list_append (rtph264pay->sps, buf);
num_sps++;
if (payloader->sps != NULL) {
sps_buf = GST_BUFFER_CAST (payloader->sps->data);
- if ((GST_BUFFER_SIZE (sps_buf) != sps_len)
- || memcmp (GST_BUFFER_DATA (sps_buf), sps, sps_len)) {
+ if (gst_buffer_memcmp (sps_buf, 0, sps, sps_len)) {
/* something changed, update */
payloader->profile = (sps[1] << 16) + (sps[2] << 8) + sps[3];
GST_DEBUG ("Profile level IDC = %06x", payloader->profile);
if (updated) {
sps_buf = gst_buffer_new_and_alloc (sps_len);
- memcpy (GST_BUFFER_DATA (sps_buf), sps, sps_len);
+ gst_buffer_fill (sps_buf, 0, sps, sps_len);
if (payloader->sps) {
/* replace old buffer */
if (payloader->pps != NULL) {
pps_buf = GST_BUFFER_CAST (payloader->pps->data);
- if ((GST_BUFFER_SIZE (pps_buf) != pps_len)
- || memcmp (GST_BUFFER_DATA (pps_buf), pps, pps_len)) {
+ if (gst_buffer_memcmp (pps_buf, 0, pps, pps_len)) {
/* something changed, update */
updated = TRUE;
}
if (updated) {
pps_buf = gst_buffer_new_and_alloc (pps_len);
- memcpy (GST_BUFFER_DATA (pps_buf), pps, pps_len);
+ gst_buffer_fill (pps_buf, 0, pps, pps_len);
if (payloader->pps) {
/* replace old buffer */
{
GstFlowReturn ret = GST_FLOW_OK;
GList *walk;
+ guint8 *data;
+ gsize size;
for (walk = rtph264pay->sps; walk; walk = g_list_next (walk)) {
GstBuffer *sps_buf = GST_BUFFER_CAST (walk->data);
GST_DEBUG_OBJECT (rtph264pay, "inserting SPS in the stream");
/* resend SPS */
+ data = gst_buffer_map (sps_buf, &size, NULL, GST_MAP_READ);
ret = gst_rtp_h264_pay_payload_nal (basepayload,
- GST_BUFFER_DATA (sps_buf), GST_BUFFER_SIZE (sps_buf), timestamp,
- sps_buf);
+ data, size, timestamp, sps_buf);
+ gst_buffer_unmap (sps_buf, data, size);
/* Not critical here; but throw a warning */
if (ret != GST_FLOW_OK)
GST_WARNING ("Problem pushing SPS");
GST_DEBUG_OBJECT (rtph264pay, "inserting PPS in the stream");
/* resend PPS */
+ data = gst_buffer_map (pps_buf, &size, NULL, GST_MAP_READ);
ret = gst_rtp_h264_pay_payload_nal (basepayload,
- GST_BUFFER_DATA (pps_buf), GST_BUFFER_SIZE (pps_buf), timestamp,
- pps_buf);
+ data, size, timestamp, pps_buf);
+ gst_buffer_unmap (pps_buf, data, size);
/* Not critical here; but throw a warning */
if (ret != GST_FLOW_OK)
GST_WARNING ("Problem pushing PPS");
guint packet_len, payload_len, mtu;
GstBuffer *outbuf;
guint8 *payload;
+#if 0
GstBufferList *list = NULL;
- GstBufferListIterator *it = NULL;
+#endif
gboolean send_spspps;
+ GstRTPBuffer rtp = { NULL };
rtph264pay = GST_RTP_H264_PAY (basepayload);
mtu = GST_BASE_RTP_PAYLOAD_MTU (rtph264pay);
"NAL Unit fit in one packet datasize=%d mtu=%d", size, mtu);
/* will fit in one packet */
+#if 0
if (rtph264pay->buffer_list) {
/* use buffer lists
* first create buffer without payload containing only the RTP header
* and then another buffer containing the payload. both buffers will
* be then added to the list */
outbuf = gst_rtp_buffer_new_allocate (0, 0, 0);
- } else {
+ } else
+#endif
+ {
/* use the old-fashioned way with a single buffer and memcpy */
outbuf = gst_rtp_buffer_new_allocate (size, 0, 0);
}
+ gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
+
/* only set the marker bit on packets containing access units */
if (IS_ACCESS_UNIT (nalType)) {
- gst_rtp_buffer_set_marker (outbuf, 1);
+ gst_rtp_buffer_set_marker (&rtp, 1);
}
/* timestamp the outbuffer */
GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
+#if 0
if (rtph264pay->buffer_list) {
GstBuffer *paybuf;
GST_BUFFER_DATA (buffer_orig), size);
else {
paybuf = gst_buffer_new_and_alloc (size);
- memcpy (GST_BUFFER_DATA (paybuf), data, size);
+ gst_buffer_fill (paybuf, 0, data, size);
}
list = gst_buffer_list_new ();
- it = gst_buffer_list_iterate (list);
/* add both buffers to the buffer list */
- gst_buffer_list_iterator_add_group (it);
- gst_buffer_list_iterator_add (it, outbuf);
- gst_buffer_list_iterator_add (it, paybuf);
-
- gst_buffer_list_iterator_free (it);
+ gst_buffer_list_add (list, outbuf);
+ gst_buffer_list_add (list, paybuf);
/* push the list to the next element in the pipe */
ret = gst_basertppayload_push_list (basepayload, list);
- } else {
- payload = gst_rtp_buffer_get_payload (outbuf);
+ } else
+#endif
+ {
+ payload = gst_rtp_buffer_get_payload (&rtp);
GST_DEBUG_OBJECT (basepayload, "Copying %d bytes to outbuf", size);
memcpy (payload, data, size);
+ gst_rtp_buffer_unmap (&rtp);
ret = gst_basertppayload_push (basepayload, outbuf);
}
/* We keep 2 bytes for FU indicator and FU Header */
payload_len = gst_rtp_buffer_calc_payload_len (mtu - 2, 0, 0);
+#if 0
if (rtph264pay->buffer_list) {
list = gst_buffer_list_new ();
it = gst_buffer_list_iterate (list);
}
+#endif
while (end == 0) {
limitedSize = size < payload_len ? size : payload_len;
"Inside FU-A fragmentation limitedSize=%d iteration=%d", limitedSize,
ii);
+#if 0
if (rtph264pay->buffer_list) {
/* use buffer lists
* first create buffer without payload containing only the RTP header
* and then another buffer containing the payload. both buffers will
* be then added to the list */
outbuf = gst_rtp_buffer_new_allocate (2, 0, 0);
- } else {
+ } else
+#endif
+ {
/* use the old-fashioned way with a single buffer and memcpy
* first create buffer to hold the payload */
outbuf = gst_rtp_buffer_new_allocate (limitedSize + 2, 0, 0);
}
+ gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
+
GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
- payload = gst_rtp_buffer_get_payload (outbuf);
+ payload = gst_rtp_buffer_get_payload (&rtp);
if (limitedSize == size) {
GST_DEBUG_OBJECT (basepayload, "end size=%d iteration=%d", size, ii);
end = 1;
}
if (IS_ACCESS_UNIT (nalType)) {
- gst_rtp_buffer_set_marker (outbuf, end);
+ gst_rtp_buffer_set_marker (&rtp, end);
}
/* FU indicator */
/* FU Header */
payload[1] = (start << 7) | (end << 6) | (nalHeader & 0x1f);
+#if 0
if (rtph264pay->buffer_list) {
GstBuffer *paybuf;
gst_buffer_list_iterator_add (it, outbuf);
gst_buffer_list_iterator_add (it, paybuf);
- } else {
+ } else
+#endif
+ {
memcpy (&payload[2], data + pos, limitedSize);
+ gst_rtp_buffer_unmap (&rtp);
GST_DEBUG_OBJECT (basepayload,
"recorded %d payload bytes into packet iteration=%d",
limitedSize + 2, ii);
start = 0;
}
+#if 0
if (rtph264pay->buffer_list) {
/* free iterator and push the whole buffer list at once */
gst_buffer_list_iterator_free (it);
ret = gst_basertppayload_push_list (basepayload, list);
}
+#endif
}
return ret;
}
{
GstRtpH264Pay *rtph264pay;
GstFlowReturn ret;
- guint size, nal_len, i;
+ gsize size;
+ guint nal_len, i;
+ guint8 *bdata = NULL;
+ gsize bsize;
const guint8 *data, *nal_data;
GstClockTime timestamp;
GArray *nal_queue;
guint pushed = 0;
+ gboolean bytestream;
rtph264pay = GST_RTP_H264_PAY (basepayload);
/* the input buffer contains one or more NAL units */
- if (rtph264pay->scan_mode == GST_H264_SCAN_MODE_BYTESTREAM) {
+ bytestream = (rtph264pay->scan_mode == GST_H264_SCAN_MODE_BYTESTREAM);
+
+ if (bytestream) {
timestamp = gst_adapter_prev_timestamp (rtph264pay->adapter, NULL);
gst_adapter_push (rtph264pay->adapter, buffer);
size = gst_adapter_available (rtph264pay->adapter);
- data = gst_adapter_peek (rtph264pay->adapter, size);
+ data = gst_adapter_map (rtph264pay->adapter, size);
GST_DEBUG_OBJECT (basepayload, "got %d bytes (%d)", size,
- GST_BUFFER_SIZE (buffer));
+ gst_buffer_get_size (buffer));
if (!GST_CLOCK_TIME_IS_VALID (timestamp))
timestamp = GST_BUFFER_TIMESTAMP (buffer);
} else {
- size = GST_BUFFER_SIZE (buffer);
- data = GST_BUFFER_DATA (buffer);
+ bdata = gst_buffer_map (buffer, &bsize, NULL, GST_MAP_READ);
+ data = bdata;
+ size = bsize;
timestamp = GST_BUFFER_TIMESTAMP (buffer);
GST_DEBUG_OBJECT (basepayload, "got %d bytes", size);
}
g_array_set_size (nal_queue, 0);
}
- if (rtph264pay->scan_mode == GST_H264_SCAN_MODE_BYTESTREAM)
- gst_adapter_flush (rtph264pay->adapter, pushed);
- else
+done:
+ if (bytestream) {
+ gst_adapter_unmap (rtph264pay->adapter, pushed);
+ } else {
+ gst_buffer_unmap (buffer, bdata, bsize);
gst_buffer_unref (buffer);
+ }
return ret;
{
GST_WARNING_OBJECT (basepayload, "Could not set outcaps");
g_array_set_size (nal_queue, 0);
- gst_buffer_unref (buffer);
- return GST_FLOW_NOT_NEGOTIATED;
+ ret = GST_FLOW_NOT_NEGOTIATED;
+ goto done;
}
}
static gboolean
-gst_rtp_h264_pay_handle_event (GstPad * pad, GstEvent * event)
+gst_rtp_h264_pay_handle_event (GstBaseRTPPayload * payload, GstEvent * event)
{
+ gboolean res;
const GstStructure *s;
- GstRtpH264Pay *rtph264pay = GST_RTP_H264_PAY (GST_PAD_PARENT (pad));
+ GstRtpH264Pay *rtph264pay = GST_RTP_H264_PAY (payload);
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_FLUSH_STOP:
break;
}
- return FALSE;
+ res =
+ GST_BASE_RTP_PAYLOAD_CLASS (parent_class)->handle_event (payload, event);
+
+ return res;
}
static GstStateChangeReturn
static gboolean gst_rtp_ilbc_depay_setcaps (GstBaseRTPDepayload * depayload,
GstCaps * caps);
-GST_BOILERPLATE (GstRTPiLBCDepay, gst_rtp_ilbc_depay, GstBaseRTPDepayload,
+#define gst_rtp_ilbc_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRTPiLBCDepay, gst_rtp_ilbc_depay,
GST_TYPE_BASE_RTP_DEPAYLOAD);
#define GST_TYPE_ILBC_MODE (gst_ilbc_mode_get_type())
}
static void
-gst_rtp_ilbc_depay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_ilbc_depay_src_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_ilbc_depay_sink_template));
- gst_element_class_set_details_simple (element_class, "RTP iLBC depayloader",
- "Codec/Depayloader/Network/RTP",
- "Extracts iLBC audio from RTP packets (RFC 3952)",
- "Philippe Kalaf <philippe.kalaf@collabora.co.uk>");
-}
-
-static void
gst_rtp_ilbc_depay_class_init (GstRTPiLBCDepayClass * klass)
{
GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
gobject_class->set_property = gst_ilbc_depay_set_property;
GST_TYPE_ILBC_MODE, DEFAULT_MODE,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_ilbc_depay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_ilbc_depay_sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP iLBC depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts iLBC audio from RTP packets (RFC 3952)",
+ "Philippe Kalaf <philippe.kalaf@collabora.co.uk>");
+
gstbasertpdepayload_class->process = gst_rtp_ilbc_depay_process;
gstbasertpdepayload_class->set_caps = gst_rtp_ilbc_depay_setcaps;
}
static void
-gst_rtp_ilbc_depay_init (GstRTPiLBCDepay * rtpilbcdepay,
- GstRTPiLBCDepayClass * klass)
+gst_rtp_ilbc_depay_init (GstRTPiLBCDepay * rtpilbcdepay)
{
/* Set default mode */
rtpilbcdepay->mode = DEFAULT_MODE;
{
GstBuffer *outbuf;
gboolean marker;
+ GstRTPBuffer rtp = { NULL };
- marker = gst_rtp_buffer_get_marker (buf);
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
+
+ marker = gst_rtp_buffer_get_marker (&rtp);
GST_DEBUG ("process : got %d bytes, mark %d ts %u seqn %d",
- GST_BUFFER_SIZE (buf), marker,
- gst_rtp_buffer_get_timestamp (buf), gst_rtp_buffer_get_seq (buf));
+ gst_buffer_get_size (buf), marker,
+ gst_rtp_buffer_get_timestamp (&rtp), gst_rtp_buffer_get_seq (&rtp));
+
+ outbuf = gst_rtp_buffer_get_payload_buffer (&rtp);
- outbuf = gst_rtp_buffer_get_payload_buffer (buf);
+ gst_rtp_buffer_unmap (&rtp);
if (marker && outbuf) {
/* mark start of talkspurt with DISCONT */
static GstCaps *gst_rtp_ilbc_pay_sink_getcaps (GstBaseRTPPayload * payload,
- GstPad * pad);
+ GstPad * pad, GstCaps * filter);
static gboolean gst_rtp_ilbc_pay_sink_setcaps (GstBaseRTPPayload * payload,
GstCaps * caps);
-GST_BOILERPLATE (GstRTPILBCPay, gst_rtp_ilbc_pay, GstBaseRTPAudioPayload,
+#define gst_rtp_ilbc_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRTPILBCPay, gst_rtp_ilbc_pay,
GST_TYPE_BASE_RTP_AUDIO_PAYLOAD);
static void
-gst_rtp_ilbc_pay_base_init (gpointer klass)
+gst_rtp_ilbc_pay_class_init (GstRTPILBCPayClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstElementClass *gstelement_class;
+ GstBaseRTPPayloadClass *gstbasertppayload_class;
+
+ GST_DEBUG_CATEGORY_INIT (rtpilbcpay_debug, "rtpilbcpay", 0,
+ "iLBC audio RTP payloader");
- gst_element_class_add_pad_template (element_class,
+ gstelement_class = (GstElementClass *) klass;
+ gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
+
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_ilbc_pay_sink_template));
- gst_element_class_add_pad_template (element_class,
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_ilbc_pay_src_template));
- gst_element_class_set_details_simple (element_class, "RTP iLBC Payloader",
+
+ gst_element_class_set_details_simple (gstelement_class, "RTP iLBC Payloader",
"Codec/Payloader/Network/RTP",
"Packetize iLBC audio streams into RTP packets",
"Philippe Kalaf <philippe.kalaf@collabora.co.uk>");
-}
-
-static void
-gst_rtp_ilbc_pay_class_init (GstRTPILBCPayClass * klass)
-{
- GstBaseRTPPayloadClass *gstbasertppayload_class;
-
- gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
gstbasertppayload_class->set_caps = gst_rtp_ilbc_pay_sink_setcaps;
gstbasertppayload_class->get_caps = gst_rtp_ilbc_pay_sink_getcaps;
-
- GST_DEBUG_CATEGORY_INIT (rtpilbcpay_debug, "rtpilbcpay", 0,
- "iLBC audio RTP payloader");
}
static void
-gst_rtp_ilbc_pay_init (GstRTPILBCPay * rtpilbcpay, GstRTPILBCPayClass * klass)
+gst_rtp_ilbc_pay_init (GstRTPILBCPay * rtpilbcpay)
{
GstBaseRTPPayload *basertppayload;
GstBaseRTPAudioPayload *basertpaudiopayload;
/* we return the padtemplate caps with the mode field fixated to a value if we
* can */
static GstCaps *
-gst_rtp_ilbc_pay_sink_getcaps (GstBaseRTPPayload * rtppayload, GstPad * pad)
+gst_rtp_ilbc_pay_sink_getcaps (GstBaseRTPPayload * rtppayload, GstPad * pad,
+ GstCaps * filter)
{
GstCaps *otherpadcaps;
GstCaps *caps;
PROP_LAST
};
-GST_BOILERPLATE (GstRtpJ2KDepay, gst_rtp_j2k_depay, GstBaseRTPDepayload,
- GST_TYPE_BASE_RTP_DEPAYLOAD);
+#define gst_rtp_j2k_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpJ2KDepay, gst_rtp_j2k_depay, GST_TYPE_BASE_RTP_DEPAYLOAD);
static void gst_rtp_j2k_depay_finalize (GObject * object);
GstBuffer * buf);
static void
-gst_rtp_j2k_depay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_j2k_depay_src_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_j2k_depay_sink_template));
-
- gst_element_class_set_details_simple (element_class,
- "RTP JPEG 2000 depayloader", "Codec/Depayloader/Network/RTP",
- "Extracts JPEG 2000 video from RTP packets (RFC 5371)",
- "Wim Taymans <wim.taymans@gmail.com>");
-}
-
-static void
gst_rtp_j2k_depay_class_init (GstRtpJ2KDepayClass * klass)
{
GObjectClass *gobject_class;
"Use Buffer Lists",
DEFAULT_BUFFER_LIST, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_j2k_depay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_j2k_depay_sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP JPEG 2000 depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts JPEG 2000 video from RTP packets (RFC 5371)",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
gstelement_class->change_state = gst_rtp_j2k_depay_change_state;
gstbasertpdepayload_class->set_caps = gst_rtp_j2k_depay_setcaps;
}
static void
-gst_rtp_j2k_depay_init (GstRtpJ2KDepay * rtpj2kdepay,
- GstRtpJ2KDepayClass * klass)
+gst_rtp_j2k_depay_init (GstRtpJ2KDepay * rtpj2kdepay)
{
rtpj2kdepay->buffer_list = DEFAULT_BUFFER_LIST;
for (walk = packets; walk; walk = g_list_next (walk)) {
GstBuffer *buf = GST_BUFFER_CAST (walk->data);
GST_DEBUG_OBJECT (rtpj2kdepay, "append pu packet of size %u",
- GST_BUFFER_SIZE (buf));
+ gst_buffer_get_size (buf));
gst_adapter_push (rtpj2kdepay->t_adapter, buf);
}
g_list_free (packets);
GList *packets, *walk;
guint8 end[2];
GstFlowReturn ret = GST_FLOW_OK;
+ guint8 *data;
+ gsize size;
+ GstBuffer *buf;
rtpj2kdepay = GST_RTP_J2K_DEPAY (depayload);
/* now append the tile packets to the frame */
packets = gst_adapter_take_list (rtpj2kdepay->t_adapter, avail);
for (walk = packets; walk; walk = g_list_next (walk)) {
- GstBuffer *buf = GST_BUFFER_CAST (walk->data);
+ buf = GST_BUFFER_CAST (walk->data);
if (walk == packets) {
- guint8 *data;
- guint size;
-
/* first buffer should contain the SOT */
- data = GST_BUFFER_DATA (buf);
- size = GST_BUFFER_SIZE (buf);
+ data = gst_buffer_map (buf, &size, NULL, GST_MAP_READ);
if (size < 12)
goto invalid_tile;
if (Psot != nPsot && Psot != 0) {
/* Psot must match the size of the tile */
GST_DEBUG_OBJECT (rtpj2kdepay, "set Psot from %u to %u", Psot, nPsot);
+ gst_buffer_unmap (buf, data, size);
+
buf = gst_buffer_make_writable (buf);
- data = GST_BUFFER_DATA (buf);
+
+ data = gst_buffer_map (buf, &size, NULL, GST_MAP_WRITE);
GST_WRITE_UINT32_BE (&data[6], nPsot);
}
}
+ gst_buffer_unmap (buf, data, size);
}
GST_DEBUG_OBJECT (rtpj2kdepay, "append pu packet of size %u",
- GST_BUFFER_SIZE (buf));
+ gst_buffer_get_size (buf));
gst_adapter_push (rtpj2kdepay->f_adapter, buf);
}
g_list_free (packets);
invalid_tile:
{
GST_ELEMENT_WARNING (rtpj2kdepay, STREAM, DECODE, ("Invalid tile"), (NULL));
+ gst_buffer_unmap (buf, data, size);
gst_adapter_clear (rtpj2kdepay->t_adapter);
rtpj2kdepay->last_tile = -1;
return ret;
{
GstRtpJ2KDepay *rtpj2kdepay;
guint8 end[2];
- guint8 *data;
guint avail;
GstFlowReturn ret = GST_FLOW_OK;
gst_adapter_copy (rtpj2kdepay->f_adapter, end, avail - 2, 2);
if (end[0] != 0xff && end[1] != 0xd9) {
+ end[0] = 0xff;
+ end[1] = 0xd9;
+
GST_DEBUG_OBJECT (rtpj2kdepay, "no EOC marker, adding one");
/* no EOI marker, add one */
outbuf = gst_buffer_new_and_alloc (2);
- data = GST_BUFFER_DATA (outbuf);
- data[0] = 0xff;
- data[1] = 0xd9;
+ gst_buffer_fill (outbuf, 0, end, 2);
gst_adapter_push (rtpj2kdepay->f_adapter, outbuf);
avail += 2;
}
-
+#if 0
if (rtpj2kdepay->buffer_list) {
GList *list;
GstBufferList *buflist;
gst_buffer_list_iterator_free (it);
ret = gst_base_rtp_depayload_push_list (depayload, buflist);
- } else {
+ } else
+#endif
+ {
GST_DEBUG_OBJECT (rtpj2kdepay, "pushing buffer of %u bytes", avail);
outbuf = gst_adapter_take_buffer (rtpj2kdepay->f_adapter, avail);
ret = gst_base_rtp_depayload_push (depayload, outbuf);
guint MHF, mh_id, frag_offset, tile, payload_len, j2klen;
gint gap;
guint32 rtptime;
+ GstRTPBuffer rtp = { NULL };
rtpj2kdepay = GST_RTP_J2K_DEPAY (depayload);
- payload = gst_rtp_buffer_get_payload (buf);
- payload_len = gst_rtp_buffer_get_payload_len (buf);
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
+
+ payload = gst_rtp_buffer_get_payload (&rtp);
+ payload_len = gst_rtp_buffer_get_payload_len (&rtp);
/* we need at least a header */
if (payload_len < 8)
goto empty_packet;
- rtptime = gst_rtp_buffer_get_timestamp (buf);
+ rtptime = gst_rtp_buffer_get_timestamp (&rtp);
/* new timestamp marks new frame */
if (rtpj2kdepay->last_rtptime != rtptime) {
}
/* and push in pu adapter */
GST_DEBUG_OBJECT (rtpj2kdepay, "push pu of size %u in adapter", j2klen);
- pu_frag = gst_rtp_buffer_get_payload_subbuffer (buf, 8, -1);
+ pu_frag = gst_rtp_buffer_get_payload_subbuffer (&rtp, 8, -1);
gst_adapter_push (rtpj2kdepay->pu_adapter, pu_frag);
if (MHF & 2) {
}
/* marker bit finishes the frame */
- if (gst_rtp_buffer_get_marker (buf)) {
+ if (gst_rtp_buffer_get_marker (&rtp)) {
GST_DEBUG_OBJECT (rtpj2kdepay, "marker set, last buffer");
/* then flush frame */
gst_rtp_j2k_depay_flush_frame (depayload);
}
+ gst_rtp_buffer_unmap (&rtp);
+
return NULL;
/* ERRORS */
{
GST_ELEMENT_WARNING (rtpj2kdepay, STREAM, DECODE,
("Empty Payload."), (NULL));
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
wrong_mh_id:
("Invalid mh_id %u, expected %u", mh_id, rtpj2kdepay->last_mh_id),
(NULL));
gst_rtp_j2k_depay_clear_pu (rtpj2kdepay);
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
}
static GstFlowReturn gst_rtp_j2k_pay_handle_buffer (GstBaseRTPPayload * pad,
GstBuffer * buffer);
-GST_BOILERPLATE (GstRtpJ2KPay, gst_rtp_j2k_pay, GstBaseRTPPayload,
- GST_TYPE_BASE_RTP_PAYLOAD);
-
-static void
-gst_rtp_j2k_pay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_j2k_pay_src_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_j2k_pay_sink_template));
-
- gst_element_class_set_details_simple (element_class,
- "RTP JPEG 2000 payloader", "Codec/Payloader/Network/RTP",
- "Payload-encodes JPEG 2000 pictures into RTP packets (RFC 5371)",
- "Wim Taymans <wim.taymans@gmail.com>");
-}
+#define gst_rtp_j2k_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpJ2KPay, gst_rtp_j2k_pay, GST_TYPE_BASE_RTP_PAYLOAD);
static void
gst_rtp_j2k_pay_class_init (GstRtpJ2KPayClass * klass)
{
GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
GstBaseRTPPayloadClass *gstbasertppayload_class;
gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
gobject_class->set_property = gst_rtp_j2k_pay_set_property;
"Use Buffer Lists",
DEFAULT_BUFFER_LIST, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_j2k_pay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_j2k_pay_sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP JPEG 2000 payloader", "Codec/Payloader/Network/RTP",
+ "Payload-encodes JPEG 2000 pictures into RTP packets (RFC 5371)",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
gstbasertppayload_class->set_caps = gst_rtp_j2k_pay_setcaps;
gstbasertppayload_class->handle_buffer = gst_rtp_j2k_pay_handle_buffer;
}
static void
-gst_rtp_j2k_pay_init (GstRtpJ2KPay * pay, GstRtpJ2KPayClass * klass)
+gst_rtp_j2k_pay_init (GstRtpJ2KPay * pay)
{
pay->buffer_list = DEFAULT_BUFFER_LIST;
}
GstClockTime timestamp;
GstFlowReturn ret = GST_FLOW_ERROR;
RtpJ2KState state;
+#if 0
GstBufferList *list = NULL;
- GstBufferListIterator *it = NULL;
+#endif
guint8 *data;
- guint size;
+ gsize size;
guint mtu, max_size;
guint offset;
guint end, pos;
pay = GST_RTP_J2K_PAY (basepayload);
mtu = GST_BASE_RTP_PAYLOAD_MTU (pay);
- size = GST_BUFFER_SIZE (buffer);
- data = GST_BUFFER_DATA (buffer);
+ data = gst_buffer_map (buffer, &size, NULL, GST_MAP_READ);
timestamp = GST_BUFFER_TIMESTAMP (buffer);
offset = pos = end = 0;
state.next_sot = 0;
state.force_packet = FALSE;
+#if 0
if (pay->buffer_list) {
list = gst_buffer_list_new ();
it = gst_buffer_list_iterate (list);
}
+#endif
/* get max packet length */
max_size = gst_rtp_buffer_calc_payload_len (mtu - HEADER_SIZE, 0, 0);
guint8 *header;
guint payload_size;
guint pu_size;
+ GstRTPBuffer rtp = { NULL };
/* try to pack as much as we can */
do {
payload_size = gst_rtp_buffer_calc_payload_len (packet_size, 0, 0);
data_size = payload_size - HEADER_SIZE;
+#if 0
if (pay->buffer_list) {
/* make buffer for header */
outbuf = gst_rtp_buffer_new_allocate (HEADER_SIZE, 0, 0);
- } else {
+ } else
+#endif
+ {
/* make buffer for header and data */
outbuf = gst_rtp_buffer_new_allocate (payload_size, 0, 0);
}
GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
+ gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
+
/* get pointer to header */
- header = gst_rtp_buffer_get_payload (outbuf);
+ header = gst_rtp_buffer_get_payload (&rtp);
pu_size -= data_size;
if (pu_size == 0) {
state.header.MHF = 2;
}
if (end >= size)
- gst_rtp_buffer_set_marker (outbuf, TRUE);
+ gst_rtp_buffer_set_marker (&rtp, TRUE);
}
/*
header[6] = (state.header.offset >> 8) & 0xff;
header[7] = state.header.offset & 0xff;
+#if 0
if (pay->buffer_list) {
GstBuffer *paybuf;
/* add both buffers to the buffer list */
gst_buffer_list_iterator_add (it, outbuf);
gst_buffer_list_iterator_add (it, paybuf);
- } else {
+ } else
+#endif
+ {
/* copy payload */
memcpy (header + HEADER_SIZE, &data[offset], data_size);
+ gst_rtp_buffer_unmap (&rtp);
ret = gst_basertppayload_push (basepayload, outbuf);
if (ret != GST_FLOW_OK)
done:
gst_buffer_unref (buffer);
+#if 0
if (pay->buffer_list) {
/* free iterator and push the whole buffer list at once */
gst_buffer_list_iterator_free (it);
ret = gst_basertppayload_push_list (basepayload, list);
}
+#endif
return ret;
}
)
);
-GST_BOILERPLATE (GstRtpJPEGDepay, gst_rtp_jpeg_depay, GstBaseRTPDepayload,
+#define gst_rtp_jpeg_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpJPEGDepay, gst_rtp_jpeg_depay,
GST_TYPE_BASE_RTP_DEPAYLOAD);
static void gst_rtp_jpeg_depay_finalize (GObject * object);
GstBuffer * buf);
static void
-gst_rtp_jpeg_depay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_jpeg_depay_src_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_jpeg_depay_sink_template));
-
- gst_element_class_set_details_simple (element_class, "RTP JPEG depayloader",
- "Codec/Depayloader/Network/RTP",
- "Extracts JPEG video from RTP packets (RFC 2435)",
- "Wim Taymans <wim.taymans@gmail.com>");
-}
-
-static void
gst_rtp_jpeg_depay_class_init (GstRtpJPEGDepayClass * klass)
{
GObjectClass *gobject_class;
gobject_class->finalize = gst_rtp_jpeg_depay_finalize;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_jpeg_depay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_jpeg_depay_sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP JPEG depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts JPEG video from RTP packets (RFC 2435)",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
gstelement_class->change_state = gst_rtp_jpeg_depay_change_state;
gstbasertpdepayload_class->set_caps = gst_rtp_jpeg_depay_setcaps;
}
static void
-gst_rtp_jpeg_depay_init (GstRtpJPEGDepay * rtpjpegdepay,
- GstRtpJPEGDepayClass * klass)
+gst_rtp_jpeg_depay_init (GstRtpJPEGDepay * rtpjpegdepay)
{
rtpjpegdepay->adapter = gst_adapter_new ();
}
"clock-rate = (int) 90000")
);
-GST_BOILERPLATE (GstRtpMPADepay, gst_rtp_mpa_depay, GstBaseRTPDepayload,
- GST_TYPE_BASE_RTP_DEPAYLOAD);
+#define gst_rtp_mpa_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpMPADepay, gst_rtp_mpa_depay, GST_TYPE_BASE_RTP_DEPAYLOAD);
static gboolean gst_rtp_mpa_depay_setcaps (GstBaseRTPDepayload * depayload,
GstCaps * caps);
GstBuffer * buf);
static void
-gst_rtp_mpa_depay_base_init (gpointer klass)
+gst_rtp_mpa_depay_class_init (GstRtpMPADepayClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstElementClass *gstelement_class;
+ GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
+
+ GST_DEBUG_CATEGORY_INIT (rtpmpadepay_debug, "rtpmpadepay", 0,
+ "MPEG Audio RTP Depayloader");
+
+ gstelement_class = (GstElementClass *) klass;
+ gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
- gst_element_class_add_pad_template (element_class,
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_mpa_depay_src_template));
- gst_element_class_add_pad_template (element_class,
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_mpa_depay_sink_template));
- gst_element_class_set_details_simple (element_class,
+ gst_element_class_set_details_simple (gstelement_class,
"RTP MPEG audio depayloader", "Codec/Depayloader/Network/RTP",
"Extracts MPEG audio from RTP packets (RFC 2038)",
"Wim Taymans <wim.taymans@gmail.com>");
-}
-
-static void
-gst_rtp_mpa_depay_class_init (GstRtpMPADepayClass * klass)
-{
- GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
-
- gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
gstbasertpdepayload_class->set_caps = gst_rtp_mpa_depay_setcaps;
gstbasertpdepayload_class->process = gst_rtp_mpa_depay_process;
-
- GST_DEBUG_CATEGORY_INIT (rtpmpadepay_debug, "rtpmpadepay", 0,
- "MPEG Audio RTP Depayloader");
}
static void
-gst_rtp_mpa_depay_init (GstRtpMPADepay * rtpmpadepay,
- GstRtpMPADepayClass * klass)
+gst_rtp_mpa_depay_init (GstRtpMPADepay * rtpmpadepay)
{
/* needed because of GST_BOILERPLATE */
}
{
GstRtpMPADepay *rtpmpadepay;
GstBuffer *outbuf;
+ GstRTPBuffer rtp = { NULL };
+ gint payload_len;
+#if 0
+ guint8 *payload;
+ guint16 frag_offset;
+#endif
+ gboolean marker;
rtpmpadepay = GST_RTP_MPA_DEPAY (depayload);
- {
- gint payload_len;
- gboolean marker;
-
- payload_len = gst_rtp_buffer_get_payload_len (buf);
-
- if (payload_len <= 4)
- goto empty_packet;
-
- /* strip off header
- *
- * 0 1 2 3
- * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
- * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
- * | MBZ | Frag_offset |
- * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
- */
- /* frag_offset = (payload[2] << 8) | payload[3]; */
-
- /* subbuffer skipping the 4 header bytes */
- outbuf = gst_rtp_buffer_get_payload_subbuffer (buf, 4, -1);
- marker = gst_rtp_buffer_get_marker (buf);
-
- if (marker) {
- /* mark start of talkspurt with discont */
- GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
- }
- GST_DEBUG_OBJECT (rtpmpadepay,
- "gst_rtp_mpa_depay_chain: pushing buffer of size %d",
- GST_BUFFER_SIZE (outbuf));
-
- /* FIXME, we can push half mpeg frames when they are split over multiple
- * RTP packets */
- return outbuf;
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
+
+ payload_len = gst_rtp_buffer_get_payload_len (&rtp);
+
+ if (payload_len <= 4)
+ goto empty_packet;
+
+#if 0
+ payload = gst_rtp_buffer_get_payload (&rtp);
+ /* strip off header
+ *
+ * 0 1 2 3
+ * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | MBZ | Frag_offset |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ */
+ frag_offset = (payload[2] << 8) | payload[3];
+#endif
+
+ /* subbuffer skipping the 4 header bytes */
+ outbuf = gst_rtp_buffer_get_payload_subbuffer (&rtp, 4, -1);
+ marker = gst_rtp_buffer_get_marker (&rtp);
+
+ if (marker) {
+ /* mark start of talkspurt with discont */
+ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
}
+ GST_DEBUG_OBJECT (rtpmpadepay,
+ "gst_rtp_mpa_depay_chain: pushing buffer of size %d",
+ gst_buffer_get_size (outbuf));
+
+ gst_rtp_buffer_unmap (&rtp);
- return NULL;
+ /* FIXME, we can push half mpeg frames when they are split over multiple
+ * RTP packets */
+ return outbuf;
/* ERRORS */
empty_packet:
{
GST_ELEMENT_WARNING (rtpmpadepay, STREAM, DECODE,
("Empty Payload."), (NULL));
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
}
static gboolean gst_rtp_mpa_pay_setcaps (GstBaseRTPPayload * payload,
GstCaps * caps);
-static gboolean gst_rtp_mpa_pay_handle_event (GstPad * pad, GstEvent * event);
+static gboolean gst_rtp_mpa_pay_handle_event (GstBaseRTPPayload * payload,
+ GstEvent * event);
static GstFlowReturn gst_rtp_mpa_pay_flush (GstRtpMPAPay * rtpmpapay);
static GstFlowReturn gst_rtp_mpa_pay_handle_buffer (GstBaseRTPPayload * payload,
GstBuffer * buffer);
-GST_BOILERPLATE (GstRtpMPAPay, gst_rtp_mpa_pay, GstBaseRTPPayload,
- GST_TYPE_BASE_RTP_PAYLOAD)
-
- static void gst_rtp_mpa_pay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_mpa_pay_src_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_mpa_pay_sink_template));
-
- gst_element_class_set_details_simple (element_class,
- "RTP MPEG audio payloader", "Codec/Payloader/Network/RTP",
- "Payload MPEG audio as RTP packets (RFC 2038)",
- "Wim Taymans <wim.taymans@gmail.com>");
-}
+#define gst_rtp_mpa_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpMPAPay, gst_rtp_mpa_pay, GST_TYPE_BASE_RTP_PAYLOAD);
static void
gst_rtp_mpa_pay_class_init (GstRtpMPAPayClass * klass)
GstElementClass *gstelement_class;
GstBaseRTPPayloadClass *gstbasertppayload_class;
+ GST_DEBUG_CATEGORY_INIT (rtpmpapay_debug, "rtpmpapay", 0,
+ "MPEG Audio RTP Depayloader");
+
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
gstelement_class->change_state = gst_rtp_mpa_pay_change_state;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_mpa_pay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_mpa_pay_sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP MPEG audio payloader", "Codec/Payloader/Network/RTP",
+ "Payload MPEG audio as RTP packets (RFC 2038)",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
gstbasertppayload_class->set_caps = gst_rtp_mpa_pay_setcaps;
gstbasertppayload_class->handle_event = gst_rtp_mpa_pay_handle_event;
gstbasertppayload_class->handle_buffer = gst_rtp_mpa_pay_handle_buffer;
-
- GST_DEBUG_CATEGORY_INIT (rtpmpapay_debug, "rtpmpapay", 0,
- "MPEG Audio RTP Depayloader");
}
static void
-gst_rtp_mpa_pay_init (GstRtpMPAPay * rtpmpapay, GstRtpMPAPayClass * klass)
+gst_rtp_mpa_pay_init (GstRtpMPAPay * rtpmpapay)
{
rtpmpapay->adapter = gst_adapter_new ();
}
}
static gboolean
-gst_rtp_mpa_pay_handle_event (GstPad * pad, GstEvent * event)
+gst_rtp_mpa_pay_handle_event (GstBaseRTPPayload * payload, GstEvent * event)
{
+ gboolean ret;
GstRtpMPAPay *rtpmpapay;
- rtpmpapay = GST_RTP_MPA_PAY (gst_pad_get_parent (pad));
+ rtpmpapay = GST_RTP_MPA_PAY (payload);
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_EOS:
break;
}
- gst_object_unref (rtpmpapay);
+ ret =
+ GST_BASE_RTP_PAYLOAD_CLASS (parent_class)->handle_event (payload, event);
- /* FALSE to let the parent handle the event as well */
- return FALSE;
+ return ret;
}
static GstFlowReturn
guint8 *payload;
guint payload_len;
guint packet_len;
+ GstRTPBuffer rtp = { NULL };
/* this will be the total length of the packet */
packet_len = gst_rtp_buffer_calc_packet_len (4 + avail, 0, 0);
/* create buffer to hold the payload */
outbuf = gst_rtp_buffer_new_allocate (payload_len, 0, 0);
+ gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
+
payload_len -= 4;
- gst_rtp_buffer_set_payload_type (outbuf, GST_RTP_PAYLOAD_MPA);
+ gst_rtp_buffer_set_payload_type (&rtp, GST_RTP_PAYLOAD_MPA);
/*
* 0 1 2 3
* | MBZ | Frag_offset |
* +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
*/
- payload = gst_rtp_buffer_get_payload (outbuf);
+ payload = gst_rtp_buffer_get_payload (&rtp);
payload[0] = 0;
payload[1] = 0;
payload[2] = frag_offset >> 8;
frag_offset += payload_len;
if (avail == 0)
- gst_rtp_buffer_set_marker (outbuf, TRUE);
+ gst_rtp_buffer_set_marker (&rtp, TRUE);
+
+ gst_rtp_buffer_unmap (&rtp);
GST_BUFFER_TIMESTAMP (outbuf) = rtpmpapay->first_ts;
GST_BUFFER_DURATION (outbuf) = rtpmpapay->duration;
rtpmpapay = GST_RTP_MPA_PAY (basepayload);
- size = GST_BUFFER_SIZE (buffer);
+ size = gst_buffer_get_size (buffer);
duration = GST_BUFFER_DURATION (buffer);
timestamp = GST_BUFFER_TIMESTAMP (buffer);
GstBuffer *buffer;
} GstADUFrame;
-GST_BOILERPLATE (GstRtpMPARobustDepay, gst_rtp_mpa_robust_depay,
- GstBaseRTPDepayload, GST_TYPE_BASE_RTP_DEPAYLOAD);
+#define gst_rtp_mpa_robust_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpMPARobustDepay, gst_rtp_mpa_robust_depay,
+ GST_TYPE_BASE_RTP_DEPAYLOAD);
static GstStateChangeReturn gst_rtp_mpa_robust_change_state (GstElement *
element, GstStateChange transition);
depayload, GstBuffer * buf);
static void
-gst_rtp_mpa_robust_depay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_mpa_robust_depay_src_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_mpa_robust_depay_sink_template));
-
- gst_element_class_set_details_simple (element_class,
- "RTP MPEG audio depayloader", "Codec/Depayloader/Network/RTP",
- "Extracts MPEG audio from RTP packets (RFC 5219)",
- "Mark Nauwelaerts <mark.nauwelaerts@collabora.co.uk>");
-}
-
-static void
gst_rtp_mpa_robust_depay_finalize (GObject * object)
{
GstRtpMPARobustDepay *rtpmpadepay;
G_OBJECT_CLASS (parent_class)->finalize (object);
}
-
static void
gst_rtp_mpa_robust_depay_class_init (GstRtpMPARobustDepayClass * klass)
{
GstElementClass *gstelement_class;
GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
+ GST_DEBUG_CATEGORY_INIT (rtpmparobustdepay_debug, "rtpmparobustdepay", 0,
+ "Robust MPEG Audio RTP Depayloader");
+
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
gstelement_class->change_state =
GST_DEBUG_FUNCPTR (gst_rtp_mpa_robust_change_state);
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_mpa_robust_depay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_mpa_robust_depay_sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP MPEG audio depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts MPEG audio from RTP packets (RFC 5219)",
+ "Mark Nauwelaerts <mark.nauwelaerts@collabora.co.uk>");
+
gstbasertpdepayload_class->set_caps = gst_rtp_mpa_robust_depay_setcaps;
gstbasertpdepayload_class->process = gst_rtp_mpa_robust_depay_process;
-
- GST_DEBUG_CATEGORY_INIT (rtpmparobustdepay_debug, "rtpmparobustdepay", 0,
- "Robust MPEG Audio RTP Depayloader");
}
static void
-gst_rtp_mpa_robust_depay_init (GstRtpMPARobustDepay * rtpmpadepay,
- GstRtpMPARobustDepayClass * klass)
+gst_rtp_mpa_robust_depay_init (GstRtpMPARobustDepay * rtpmpadepay)
{
rtpmpadepay->adapter = gst_adapter_new ();
rtpmpadepay->adu_frames = g_queue_new ();
rtpmpadepay, GstADUFrame * frame)
{
GstADUFrame *dummy;
+ guint8 *data;
+ gsize size;
dummy = g_slice_dup (GstADUFrame, frame);
dummy->backpointer = 0;
dummy->buffer = gst_buffer_new_and_alloc (dummy->side_info + 4);
- memset (GST_BUFFER_DATA (dummy->buffer), 0, dummy->side_info + 4);
- GST_WRITE_UINT32_BE (GST_BUFFER_DATA (dummy->buffer), dummy->header);
+
+ data = gst_buffer_map (dummy->buffer, &size, NULL, GST_MAP_WRITE);
+ memset (data, 0, size);
+ GST_WRITE_UINT32_BE (data, dummy->header);
+ gst_buffer_unmap (dummy->buffer, data, size);
+
GST_BUFFER_TIMESTAMP (dummy->buffer) = GST_BUFFER_TIMESTAMP (frame->buffer);
return dummy;
GstADUFrame *frame = NULL;
guint version, layer, channels, size;
guint crc;
+ guint8 *bdata;
+ gsize bsize;
g_return_val_if_fail (buf != NULL, FALSE);
- if (GST_BUFFER_SIZE (buf) < 6) {
+ bdata = gst_buffer_map (buf, &bsize, NULL, GST_MAP_READ);
+
+ if (bsize < 6)
goto corrupt_frame;
- }
frame = g_slice_new0 (GstADUFrame);
- frame->header = GST_READ_UINT32_BE (GST_BUFFER_DATA (buf));
+ frame->header = GST_READ_UINT32_BE (bdata);
size = mp3_type_frame_length_from_header (GST_ELEMENT_CAST (rtpmpadepay),
frame->header, &version, &layer, &channels, NULL, NULL, NULL, &crc);
/* backpointer */
if (layer == 3) {
- frame->backpointer = GST_READ_UINT16_BE (GST_BUFFER_DATA (buf) + 4);
+ frame->backpointer = GST_READ_UINT16_BE (bdata + 4);
frame->backpointer >>= 7;
GST_LOG_OBJECT (rtpmpadepay, "backpointer: %d", frame->backpointer);
}
frame->data_size = frame->size - 4 - frame->side_info;
/* some size validation checks */
- if (4 + frame->side_info > GST_BUFFER_SIZE (buf))
+ if (4 + frame->side_info > bsize)
goto corrupt_frame;
/* ADU data would then extend past MP3 frame,
* even using past byte reservoir */
- if (-frame->backpointer + (gint) (GST_BUFFER_SIZE (buf)) > frame->size)
+ if (-frame->backpointer + (gint) (bsize) > frame->size)
goto corrupt_frame;
+ gst_buffer_unmap (buf, bdata, bsize);
+
/* ok, take buffer and queue */
frame->buffer = buf;
g_queue_push_tail (rtpmpadepay->adu_frames, frame);
corrupt_frame:
{
GST_DEBUG_OBJECT (rtpmpadepay, "frame is corrupt");
+ gst_buffer_unmap (buf, bdata, bsize);
gst_buffer_unref (buf);
if (frame)
g_slice_free (GstADUFrame, frame);
{
gboolean ret = FALSE;
guint8 *data;
+ gsize size;
guint val, iindex, icc;
- data = GST_BUFFER_DATA (buf);
+ data = gst_buffer_map (buf, &size, NULL, GST_MAP_READ);
val = GST_READ_UINT16_BE (data) >> 5;
+ gst_buffer_unmap (buf, data, size);
+
iindex = val >> 3;
icc = val & 0x7;
GstFlowReturn ret = GST_FLOW_OK;
while (1) {
+ guint8 *data;
+ gsize size;
if (G_UNLIKELY (!rtpmpadepay->cur_adu_frame)) {
rtpmpadepay->cur_adu_frame = rtpmpadepay->adu_frames->head;
continue;
}
- if (rtpmpadepay->offset == GST_BUFFER_SIZE (frame->buffer)) {
+ if (rtpmpadepay->offset == gst_buffer_get_size (frame->buffer)) {
if (g_list_next (rtpmpadepay->cur_adu_frame)) {
GST_LOG_OBJECT (rtpmpadepay,
"moving to next ADU frame, size %d, side_info %d",
gst_byte_writer_set_pos (rtpmpadepay->mp3_frame, 0);
/* bytewriter corresponds to head frame,
* i.e. the header and the side info must match */
+ data = gst_buffer_map (head->buffer, &size, NULL, GST_MAP_READ);
gst_byte_writer_put_data (rtpmpadepay->mp3_frame,
- GST_BUFFER_DATA (head->buffer), 4 + head->side_info);
+ data, 4 + head->side_info);
+ gst_buffer_unmap (head->buffer, data, size);
}
buf = frame->buffer;
rtpmpadepay->size);
if (rtpmpadepay->offset) {
+ data = gst_buffer_map (buf, &size, NULL, GST_MAP_READ);
/* no need to position, simply append */
- g_assert (GST_BUFFER_SIZE (buf) > rtpmpadepay->offset);
- av = MIN (av, GST_BUFFER_SIZE (buf) - rtpmpadepay->offset);
+ g_assert (size > rtpmpadepay->offset);
+ av = MIN (av, size - rtpmpadepay->offset);
GST_LOG_OBJECT (rtpmpadepay,
"appending %d bytes from ADU frame at offset %d", av,
rtpmpadepay->offset);
gst_byte_writer_put_data (rtpmpadepay->mp3_frame,
- GST_BUFFER_DATA (buf) + rtpmpadepay->offset, av);
+ data + rtpmpadepay->offset, av);
rtpmpadepay->offset += av;
+ gst_buffer_unmap (buf, data, size);
} else {
gint pos, tpos;
gst_byte_writer_set_pos (rtpmpadepay->mp3_frame, pos + av);
} else {
/* position and append */
+ data = gst_buffer_map (buf, &size, NULL, GST_MAP_READ);
GST_LOG_OBJECT (rtpmpadepay, "adding to current MP3 frame");
gst_byte_writer_set_pos (rtpmpadepay->mp3_frame, tpos);
- av = MIN (av, GST_BUFFER_SIZE (buf) - 4 - frame->side_info);
+ av = MIN (av, size - 4 - frame->side_info);
gst_byte_writer_put_data (rtpmpadepay->mp3_frame,
- GST_BUFFER_DATA (buf) + 4 + frame->side_info, av);
+ data + 4 + frame->side_info, av);
rtpmpadepay->offset += av + 4 + frame->side_info;
+ gst_buffer_unmap (buf, data, size);
}
}
gboolean cont, dtype;
guint av, size;
GstClockTime timestamp;
+ GstRTPBuffer rtp = { NULL };
rtpmpadepay = GST_RTP_MPA_ROBUST_DEPAY (depayload);
- payload_len = gst_rtp_buffer_get_payload_len (buf);
timestamp = GST_BUFFER_TIMESTAMP (buf);
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
+
+ payload_len = gst_rtp_buffer_get_payload_len (&rtp);
if (payload_len <= 1)
goto short_read;
- payload = gst_rtp_buffer_get_payload (buf);
+ payload = gst_rtp_buffer_get_payload (&rtp);
offset = 0;
GST_LOG_OBJECT (rtpmpadepay, "payload_len: %d", payload_len);
GST_LOG_OBJECT (rtpmpadepay, "offset %d has cont: %d, dtype: %d, size: %d",
offset, cont, dtype, size);
- buf = gst_rtp_buffer_get_payload_subbuffer (buf, offset,
+ buf = gst_rtp_buffer_get_payload_subbuffer (&rtp, offset,
MIN (size, payload_len));
if (cont) {
"discarding continuation fragment without prior fragment");
gst_buffer_unref (buf);
} else {
- av += GST_BUFFER_SIZE (buf);
+ av += gst_buffer_get_size (buf);
gst_adapter_push (rtpmpadepay->adapter, buf);
if (av == size) {
timestamp = gst_adapter_prev_timestamp (rtpmpadepay->adapter, NULL);
/* timestamp applies to first payload, no idea for subsequent ones */
timestamp = GST_CLOCK_TIME_NONE;
}
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
{
GST_ELEMENT_WARNING (rtpmpadepay, STREAM, DECODE,
(NULL), ("Packet contains invalid data"));
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
}
"clock-rate = (int) 90000")
);
-GST_BOILERPLATE (GstRtpMPVDepay, gst_rtp_mpv_depay, GstBaseRTPDepayload,
- GST_TYPE_BASE_RTP_DEPAYLOAD);
+G_DEFINE_TYPE (GstRtpMPVDepay, gst_rtp_mpv_depay, GST_TYPE_BASE_RTP_DEPAYLOAD);
static gboolean gst_rtp_mpv_depay_setcaps (GstBaseRTPDepayload * depayload,
GstCaps * caps);
GstBuffer * buf);
static void
-gst_rtp_mpv_depay_base_init (gpointer klass)
+gst_rtp_mpv_depay_class_init (GstRtpMPVDepayClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstElementClass *gstelement_class;
+ GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
+
+ gstelement_class = (GstElementClass *) klass;
+ gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
- gst_element_class_add_pad_template (element_class,
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_mpv_depay_src_template));
- gst_element_class_add_pad_template (element_class,
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_mpv_depay_sink_template));
- gst_element_class_set_details_simple (element_class,
+ gst_element_class_set_details_simple (gstelement_class,
"RTP MPEG video depayloader", "Codec/Depayloader/Network/RTP",
"Extracts MPEG video from RTP packets (RFC 2250)",
"Wim Taymans <wim.taymans@gmail.com>");
-}
-
-static void
-gst_rtp_mpv_depay_class_init (GstRtpMPVDepayClass * klass)
-{
- GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
-
- gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
gstbasertpdepayload_class->set_caps = gst_rtp_mpv_depay_setcaps;
gstbasertpdepayload_class->process = gst_rtp_mpv_depay_process;
}
static void
-gst_rtp_mpv_depay_init (GstRtpMPVDepay * rtpmpvdepay,
- GstRtpMPVDepayClass * klass)
+gst_rtp_mpv_depay_init (GstRtpMPVDepay * rtpmpvdepay)
{
- /* needed because of GST_BOILERPLATE */
}
static gboolean
{
GstRtpMPVDepay *rtpmpvdepay;
GstBuffer *outbuf;
+ GstRTPBuffer rtp = { NULL };
rtpmpvdepay = GST_RTP_MPV_DEPAY (depayload);
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
+
{
gint payload_len, payload_header;
guint8 *payload;
guint8 T;
- payload_len = gst_rtp_buffer_get_payload_len (buf);
- payload = gst_rtp_buffer_get_payload (buf);
+ payload_len = gst_rtp_buffer_get_payload_len (&rtp);
+ payload = gst_rtp_buffer_get_payload (&rtp);
payload_header = 0;
if (payload_len <= 4)
payload += 4;
}
- outbuf = gst_rtp_buffer_get_payload_subbuffer (buf, payload_header, -1);
+ outbuf = gst_rtp_buffer_get_payload_subbuffer (&rtp, payload_header, -1);
if (outbuf) {
GST_DEBUG_OBJECT (rtpmpvdepay,
"gst_rtp_mpv_depay_chain: pushing buffer of size %d",
- GST_BUFFER_SIZE (outbuf));
+ gst_buffer_get_size (outbuf));
}
-
return outbuf;
}
GstCaps * caps);
static GstFlowReturn gst_rtp_mpv_pay_handle_buffer (GstBaseRTPPayload *
payload, GstBuffer * buffer);
-static gboolean gst_rtp_mpv_pay_handle_event (GstPad * pad, GstEvent * event);
+static gboolean gst_rtp_mpv_pay_handle_event (GstBaseRTPPayload * payload,
+ GstEvent * event);
-GST_BOILERPLATE (GstRTPMPVPay, gst_rtp_mpv_pay, GstBaseRTPPayload,
- GST_TYPE_BASE_RTP_PAYLOAD);
-
-static void
-gst_rtp_mpv_pay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_mpv_pay_sink_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_mpv_pay_src_template));
- gst_element_class_set_details_simple (element_class,
- "RTP MPEG2 ES video payloader", "Codec/Payloader/Network/RTP",
- "Payload-encodes MPEG2 ES into RTP packets (RFC 2250)",
- "Thijs Vermeir <thijsvermeir@gmail.com>");
-}
+#define gst_rtp_mpv_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRTPMPVPay, gst_rtp_mpv_pay, GST_TYPE_BASE_RTP_PAYLOAD);
static void
gst_rtp_mpv_pay_class_init (GstRTPMPVPayClass * klass)
gstelement_class->change_state = gst_rtp_mpv_pay_change_state;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_mpv_pay_sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_mpv_pay_src_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP MPEG2 ES video payloader", "Codec/Payloader/Network/RTP",
+ "Payload-encodes MPEG2 ES into RTP packets (RFC 2250)",
+ "Thijs Vermeir <thijsvermeir@gmail.com>");
+
gstbasertppayload_class->set_caps = gst_rtp_mpv_pay_setcaps;
gstbasertppayload_class->handle_buffer = gst_rtp_mpv_pay_handle_buffer;
gstbasertppayload_class->handle_event = gst_rtp_mpv_pay_handle_event;
}
static void
-gst_rtp_mpv_pay_init (GstRTPMPVPay * rtpmpvpay, GstRTPMPVPayClass * klass)
+gst_rtp_mpv_pay_init (GstRTPMPVPay * rtpmpvpay)
{
GST_BASE_RTP_PAYLOAD (rtpmpvpay)->clock_rate = 90000;
GST_BASE_RTP_PAYLOAD_PT (rtpmpvpay) = GST_RTP_PAYLOAD_MPV;
}
static gboolean
-gst_rtp_mpv_pay_handle_event (GstPad * pad, GstEvent * event)
+gst_rtp_mpv_pay_handle_event (GstBaseRTPPayload * payload, GstEvent * event)
{
+ gboolean ret;
GstRTPMPVPay *rtpmpvpay;
- rtpmpvpay = GST_RTP_MPV_PAY (gst_pad_get_parent (pad));
+ rtpmpvpay = GST_RTP_MPV_PAY (payload);
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_EOS:
break;
}
- gst_object_unref (rtpmpvpay);
+ ret =
+ GST_BASE_RTP_PAYLOAD_CLASS (parent_class)->handle_event (payload, event);
- /* FALSE to let the parent handle the event as well */
- return FALSE;
+ return ret;
}
static GstFlowReturn
guint towrite;
guint packet_len;
guint payload_len;
+ GstRTPBuffer rtp = { NULL };
packet_len = gst_rtp_buffer_calc_packet_len (avail, 4, 0);
outbuf = gst_rtp_buffer_new_allocate (payload_len, 4, 0);
- payload = gst_rtp_buffer_get_payload (outbuf);
+ gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
+
+ payload = gst_rtp_buffer_get_payload (&rtp);
/* enable MPEG Video-specific header
*
* 0 1 2 3
avail -= payload_len;
- gst_rtp_buffer_set_marker (outbuf, avail == 0);
+ gst_rtp_buffer_set_marker (&rtp, avail == 0);
+ gst_rtp_buffer_unmap (&rtp);
GST_BUFFER_TIMESTAMP (outbuf) = rtpmpvpay->first_ts;
static gboolean gst_rtp_pcma_depay_setcaps (GstBaseRTPDepayload * depayload,
GstCaps * caps);
-GST_BOILERPLATE (GstRtpPcmaDepay, gst_rtp_pcma_depay, GstBaseRTPDepayload,
+#define gst_rtp_pcma_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpPcmaDepay, gst_rtp_pcma_depay,
GST_TYPE_BASE_RTP_DEPAYLOAD);
static void
-gst_rtp_pcma_depay_base_init (gpointer klass)
+gst_rtp_pcma_depay_class_init (GstRtpPcmaDepayClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstElementClass *gstelement_class;
+ GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
+
+ gstelement_class = (GstElementClass *) klass;
+ gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
- gst_element_class_add_pad_template (element_class,
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_pcma_depay_src_template));
- gst_element_class_add_pad_template (element_class,
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_pcma_depay_sink_template));
- gst_element_class_set_details_simple (element_class, "RTP PCMA depayloader",
- "Codec/Depayloader/Network/RTP",
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP PCMA depayloader", "Codec/Depayloader/Network/RTP",
"Extracts PCMA audio from RTP packets",
"Edgard Lima <edgard.lima@indt.org.br>, Zeeshan Ali <zeenix@gmail.com>");
-}
-
-static void
-gst_rtp_pcma_depay_class_init (GstRtpPcmaDepayClass * klass)
-{
- GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
-
- gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
gstbasertpdepayload_class->process = gst_rtp_pcma_depay_process;
gstbasertpdepayload_class->set_caps = gst_rtp_pcma_depay_setcaps;
}
static void
-gst_rtp_pcma_depay_init (GstRtpPcmaDepay * rtppcmadepay,
- GstRtpPcmaDepayClass * klass)
+gst_rtp_pcma_depay_init (GstRtpPcmaDepay * rtppcmadepay)
{
GstBaseRTPDepayload *depayload;
GstBuffer *outbuf = NULL;
gboolean marker;
guint len;
+ GstRTPBuffer rtp = { NULL };
+
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
- marker = gst_rtp_buffer_get_marker (buf);
+ marker = gst_rtp_buffer_get_marker (&rtp);
GST_DEBUG ("process : got %d bytes, mark %d ts %u seqn %d",
- GST_BUFFER_SIZE (buf), marker,
- gst_rtp_buffer_get_timestamp (buf), gst_rtp_buffer_get_seq (buf));
+ gst_buffer_get_size (buf), marker,
+ gst_rtp_buffer_get_timestamp (&rtp), gst_rtp_buffer_get_seq (&rtp));
- len = gst_rtp_buffer_get_payload_len (buf);
- outbuf = gst_rtp_buffer_get_payload_buffer (buf);
+ len = gst_rtp_buffer_get_payload_len (&rtp);
+ outbuf = gst_rtp_buffer_get_payload_buffer (&rtp);
+ gst_rtp_buffer_unmap (&rtp);
if (outbuf) {
GST_BUFFER_DURATION (outbuf) =
}
}
+
return outbuf;
}
static gboolean gst_rtp_pcma_pay_setcaps (GstBaseRTPPayload * payload,
GstCaps * caps);
-GST_BOILERPLATE (GstRtpPcmaPay, gst_rtp_pcma_pay, GstBaseRTPAudioPayload,
+#define gst_rtp_pcma_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpPcmaPay, gst_rtp_pcma_pay,
GST_TYPE_BASE_RTP_AUDIO_PAYLOAD);
static void
-gst_rtp_pcma_pay_base_init (gpointer klass)
+gst_rtp_pcma_pay_class_init (GstRtpPcmaPayClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstElementClass *gstelement_class;
+ GstBaseRTPPayloadClass *gstbasertppayload_class;
+
+ gstelement_class = (GstElementClass *) klass;
+ gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
- gst_element_class_add_pad_template (element_class,
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_pcma_pay_sink_template));
- gst_element_class_add_pad_template (element_class,
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_pcma_pay_src_template));
- gst_element_class_set_details_simple (element_class, "RTP PCMA payloader",
+
+ gst_element_class_set_details_simple (gstelement_class, "RTP PCMA payloader",
"Codec/Payloader/Network/RTP",
"Payload-encodes PCMA audio into a RTP packet",
"Edgard Lima <edgard.lima@indt.org.br>");
-}
-
-static void
-gst_rtp_pcma_pay_class_init (GstRtpPcmaPayClass * klass)
-{
- GstBaseRTPPayloadClass *gstbasertppayload_class;
-
- gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
gstbasertppayload_class->set_caps = gst_rtp_pcma_pay_setcaps;
}
static void
-gst_rtp_pcma_pay_init (GstRtpPcmaPay * rtppcmapay, GstRtpPcmaPayClass * klass)
+gst_rtp_pcma_pay_init (GstRtpPcmaPay * rtppcmapay)
{
GstBaseRTPAudioPayload *basertpaudiopayload;
static gboolean gst_rtp_pcmu_depay_setcaps (GstBaseRTPDepayload * depayload,
GstCaps * caps);
-GST_BOILERPLATE (GstRtpPcmuDepay, gst_rtp_pcmu_depay, GstBaseRTPDepayload,
+#define gst_rtp_pcmu_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpPcmuDepay, gst_rtp_pcmu_depay,
GST_TYPE_BASE_RTP_DEPAYLOAD);
static void
-gst_rtp_pcmu_depay_base_init (gpointer klass)
+gst_rtp_pcmu_depay_class_init (GstRtpPcmuDepayClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstElementClass *gstelement_class;
+ GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
+
+ gstelement_class = (GstElementClass *) klass;
+ gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
- gst_element_class_add_pad_template (element_class,
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_pcmu_depay_src_template));
- gst_element_class_add_pad_template (element_class,
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_pcmu_depay_sink_template));
- gst_element_class_set_details_simple (element_class, "RTP PCMU depayloader",
- "Codec/Depayloader/Network/RTP",
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP PCMU depayloader", "Codec/Depayloader/Network/RTP",
"Extracts PCMU audio from RTP packets",
"Edgard Lima <edgard.lima@indt.org.br>, Zeeshan Ali <zeenix@gmail.com>");
-}
-
-static void
-gst_rtp_pcmu_depay_class_init (GstRtpPcmuDepayClass * klass)
-{
- GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
-
- gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
gstbasertpdepayload_class->process = gst_rtp_pcmu_depay_process;
gstbasertpdepayload_class->set_caps = gst_rtp_pcmu_depay_setcaps;
}
static void
-gst_rtp_pcmu_depay_init (GstRtpPcmuDepay * rtppcmudepay,
- GstRtpPcmuDepayClass * klass)
+gst_rtp_pcmu_depay_init (GstRtpPcmuDepay * rtppcmudepay)
{
GstBaseRTPDepayload *depayload;
GstBuffer *outbuf = NULL;
guint len;
gboolean marker;
+ GstRTPBuffer rtp = { NULL };
+
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
- marker = gst_rtp_buffer_get_marker (buf);
+ marker = gst_rtp_buffer_get_marker (&rtp);
GST_DEBUG ("process : got %d bytes, mark %d ts %u seqn %d",
- GST_BUFFER_SIZE (buf), marker,
- gst_rtp_buffer_get_timestamp (buf), gst_rtp_buffer_get_seq (buf));
+ gst_buffer_get_size (buf), marker,
+ gst_rtp_buffer_get_timestamp (&rtp), gst_rtp_buffer_get_seq (&rtp));
- len = gst_rtp_buffer_get_payload_len (buf);
- outbuf = gst_rtp_buffer_get_payload_buffer (buf);
+ len = gst_rtp_buffer_get_payload_len (&rtp);
+ outbuf = gst_rtp_buffer_get_payload_buffer (&rtp);
+ gst_rtp_buffer_unmap (&rtp);
if (outbuf) {
GST_BUFFER_DURATION (outbuf) =
static gboolean gst_rtp_pcmu_pay_setcaps (GstBaseRTPPayload * payload,
GstCaps * caps);
-GST_BOILERPLATE (GstRtpPcmuPay, gst_rtp_pcmu_pay, GstBaseRTPAudioPayload,
+#define gst_rtp_pcmu_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpPcmuPay, gst_rtp_pcmu_pay,
GST_TYPE_BASE_RTP_AUDIO_PAYLOAD);
static void
-gst_rtp_pcmu_pay_base_init (gpointer klass)
+gst_rtp_pcmu_pay_class_init (GstRtpPcmuPayClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstElementClass *gstelement_class;
+ GstBaseRTPPayloadClass *gstbasertppayload_class;
+
+ gstelement_class = (GstElementClass *) klass;
+ gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
- gst_element_class_add_pad_template (element_class,
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_pcmu_pay_sink_template));
- gst_element_class_add_pad_template (element_class,
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_pcmu_pay_src_template));
- gst_element_class_set_details_simple (element_class, "RTP PCMU payloader",
+
+ gst_element_class_set_details_simple (gstelement_class, "RTP PCMU payloader",
"Codec/Payloader/Network/RTP",
"Payload-encodes PCMU audio into a RTP packet",
"Edgard Lima <edgard.lima@indt.org.br>");
-}
-
-static void
-gst_rtp_pcmu_pay_class_init (GstRtpPcmuPayClass * klass)
-{
- GstBaseRTPPayloadClass *gstbasertppayload_class;
-
- gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
gstbasertppayload_class->set_caps = gst_rtp_pcmu_pay_setcaps;
}
static void
-gst_rtp_pcmu_pay_init (GstRtpPcmuPay * rtppcmupay, GstRtpPcmuPayClass * klass)
+gst_rtp_pcmu_pay_init (GstRtpPcmuPay * rtppcmupay)
{
GstBaseRTPAudioPayload *basertpaudiopayload;
guint clock_rate;
guint64 extrtptime;
GstBuffer *buffer;
+ GstRTCPBuffer rtcp = { NULL };
bin = stream->bin;
have_sr = FALSE;
have_sdes = FALSE;
- GST_RTCP_BUFFER_FOR_PACKETS (more, buffer, &packet) {
+
+ gst_rtcp_buffer_map (buffer, GST_MAP_READ, &rtcp);
+
+ GST_RTCP_BUFFER_FOR_PACKETS (more, &rtcp, &packet) {
/* first packet must be SR or RR or else the validate would have failed */
switch (gst_rtcp_packet_get_type (&packet)) {
case GST_RTCP_TYPE_SR:
break;
}
}
+ gst_rtcp_buffer_unmap (&rtcp);
}
/* create a new stream with @ssrc in @session. Must be called with
static GstStateChangeReturn gst_rtp_bin_change_state (GstElement * element,
GstStateChange transition);
static GstPad *gst_rtp_bin_request_new_pad (GstElement * element,
- GstPadTemplate * templ, const gchar * name);
+ GstPadTemplate * templ, const gchar * name, const GstCaps * caps);
static void gst_rtp_bin_release_pad (GstElement * element, GstPad * pad);
static void gst_rtp_bin_handle_message (GstBin * bin, GstMessage * message);
-GST_BOILERPLATE (GstRtpBin, gst_rtp_bin, GstBin, GST_TYPE_BIN);
-
-static void
-gst_rtp_bin_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- /* sink pads */
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&rtpbin_recv_rtp_sink_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&rtpbin_recv_rtcp_sink_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&rtpbin_send_rtp_sink_template));
-
- /* src pads */
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&rtpbin_recv_rtp_src_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&rtpbin_send_rtcp_src_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&rtpbin_send_rtp_src_template));
-
- gst_element_class_set_details_simple (element_class, "RTP Bin",
- "Filter/Network/RTP",
- "Real-Time Transport Protocol bin",
- "Wim Taymans <wim.taymans@gmail.com>");
-}
+#define gst_rtp_bin_parent_class parent_class
+G_DEFINE_TYPE (GstRtpBin, gst_rtp_bin, GST_TYPE_BIN);
static void
gst_rtp_bin_class_init (GstRtpBinClass * klass)
GST_DEBUG_FUNCPTR (gst_rtp_bin_request_new_pad);
gstelement_class->release_pad = GST_DEBUG_FUNCPTR (gst_rtp_bin_release_pad);
+ /* sink pads */
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&rtpbin_recv_rtp_sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&rtpbin_recv_rtcp_sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&rtpbin_send_rtp_sink_template));
+
+ /* src pads */
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&rtpbin_recv_rtp_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&rtpbin_send_rtcp_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&rtpbin_send_rtp_src_template));
+
+ gst_element_class_set_details_simple (gstelement_class, "RTP Bin",
+ "Filter/Network/RTP",
+ "Real-Time Transport Protocol bin",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
gstbin_class->handle_message = GST_DEBUG_FUNCPTR (gst_rtp_bin_handle_message);
klass->clear_pt_map = GST_DEBUG_FUNCPTR (gst_rtp_bin_clear_pt_map);
}
static void
-gst_rtp_bin_init (GstRtpBin * rtpbin, GstRtpBinClass * klass)
+gst_rtp_bin_init (GstRtpBin * rtpbin)
{
gchar *str;
g_free (padname);
g_object_set_data (G_OBJECT (pad), "GstRTPBin.ghostpad", gpad);
- gst_pad_set_caps (gpad, GST_PAD_CAPS (pad));
gst_pad_set_active (gpad, TRUE);
GST_RTP_BIN_SHUTDOWN_UNLOCK (rtpbin);
gpad = gst_ghost_pad_new_from_template (padname, pad, templ);
g_free (padname);
- gst_pad_set_caps (gpad, GST_PAD_CAPS (pad));
gst_pad_set_active (gpad, TRUE);
gst_element_add_pad (GST_ELEMENT_CAST (rtpbin), gpad);
gint session = 0;
GstIterator *pad_it = NULL;
gchar *pad_name = NULL;
+ GValue data = { 0, };
GST_DEBUG_OBJECT (element, "find a free pad name for template");
while (!name_found) {
gboolean done = FALSE;
+
g_free (pad_name);
pad_name = g_strdup_printf (templ->name_template, session++);
pad_it = gst_element_iterate_pads (GST_ELEMENT (element));
name_found = TRUE;
while (!done) {
- gpointer data;
-
switch (gst_iterator_next (pad_it, &data)) {
case GST_ITERATOR_OK:
{
GstPad *pad;
gchar *name;
- pad = GST_PAD_CAST (data);
+ pad = g_value_get_object (&data);
name = gst_pad_get_name (pad);
if (strcmp (name, pad_name) == 0) {
name_found = FALSE;
}
g_free (name);
- gst_object_unref (pad);
+ g_value_reset (&data);
break;
}
case GST_ITERATOR_ERROR:
break;
}
}
+ g_value_unset (&data);
gst_iterator_free (pad_it);
}
*/
static GstPad *
gst_rtp_bin_request_new_pad (GstElement * element,
- GstPadTemplate * templ, const gchar * name)
+ GstPadTemplate * templ, const gchar * name, const GstCaps * caps)
{
GstRtpBin *rtpbin;
GstElementClass *klass;
static guint gst_rtp_jitter_buffer_signals[LAST_SIGNAL] = { 0 };
-GST_BOILERPLATE (GstRtpJitterBuffer, gst_rtp_jitter_buffer, GstElement,
- GST_TYPE_ELEMENT);
+#define gst_rtp_jitter_buffer_parent_class parent_class
+G_DEFINE_TYPE (GstRtpJitterBuffer, gst_rtp_jitter_buffer, GST_TYPE_ELEMENT);
/* object overrides */
static void gst_rtp_jitter_buffer_set_property (GObject * object,
static GstStateChangeReturn gst_rtp_jitter_buffer_change_state (GstElement
* element, GstStateChange transition);
static GstPad *gst_rtp_jitter_buffer_request_new_pad (GstElement * element,
- GstPadTemplate * templ, const gchar * name);
+ GstPadTemplate * templ, const gchar * name, const GstCaps * filter);
static void gst_rtp_jitter_buffer_release_pad (GstElement * element,
GstPad * pad);
static GstClock *gst_rtp_jitter_buffer_provide_clock (GstElement * element);
/* pad overrides */
-static GstCaps *gst_rtp_jitter_buffer_getcaps (GstPad * pad);
+static GstCaps *gst_rtp_jitter_buffer_getcaps (GstPad * pad, GstCaps * filter);
static GstIterator *gst_rtp_jitter_buffer_iterate_internal_links (GstPad * pad);
/* sinkpad overrides */
-static gboolean gst_jitter_buffer_sink_setcaps (GstPad * pad, GstCaps * caps);
static gboolean gst_rtp_jitter_buffer_sink_event (GstPad * pad,
GstEvent * event);
static GstFlowReturn gst_rtp_jitter_buffer_chain (GstPad * pad,
gboolean active, guint64 base_time);
static void
-gst_rtp_jitter_buffer_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_jitter_buffer_src_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_jitter_buffer_sink_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_jitter_buffer_sink_rtcp_template));
-
- gst_element_class_set_details_simple (element_class,
- "RTP packet jitter-buffer", "Filter/Network/RTP",
- "A buffer that deals with network jitter and other transmission faults",
- "Philippe Kalaf <philippe.kalaf@collabora.co.uk>, "
- "Wim Taymans <wim.taymans@gmail.com>");
-}
-
-static void
gst_rtp_jitter_buffer_class_init (GstRtpJitterBufferClass * klass)
{
GObjectClass *gobject_class;
gstelement_class->provide_clock =
GST_DEBUG_FUNCPTR (gst_rtp_jitter_buffer_provide_clock);
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_jitter_buffer_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_jitter_buffer_sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_jitter_buffer_sink_rtcp_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP packet jitter-buffer", "Filter/Network/RTP",
+ "A buffer that deals with network jitter and other transmission faults",
+ "Philippe Kalaf <philippe.kalaf@collabora.co.uk>, "
+ "Wim Taymans <wim.taymans@gmail.com>");
+
klass->clear_pt_map = GST_DEBUG_FUNCPTR (gst_rtp_jitter_buffer_clear_pt_map);
klass->set_active = GST_DEBUG_FUNCPTR (gst_rtp_jitter_buffer_set_active);
}
static void
-gst_rtp_jitter_buffer_init (GstRtpJitterBuffer * jitterbuffer,
- GstRtpJitterBufferClass * klass)
+gst_rtp_jitter_buffer_init (GstRtpJitterBuffer * jitterbuffer)
{
GstRtpJitterBufferPrivate *priv;
GST_DEBUG_FUNCPTR (gst_rtp_jitter_buffer_chain));
gst_pad_set_event_function (priv->sinkpad,
GST_DEBUG_FUNCPTR (gst_rtp_jitter_buffer_sink_event));
- gst_pad_set_setcaps_function (priv->sinkpad,
- GST_DEBUG_FUNCPTR (gst_jitter_buffer_sink_setcaps));
gst_pad_set_getcaps_function (priv->sinkpad,
GST_DEBUG_FUNCPTR (gst_rtp_jitter_buffer_getcaps));
GstRtpJitterBuffer *jitterbuffer;
GstPad *otherpad = NULL;
GstIterator *it;
+ GValue val = { 0, };
jitterbuffer = GST_RTP_JITTER_BUFFER (gst_pad_get_parent (pad));
otherpad = NULL;
}
- it = gst_iterator_new_single (GST_TYPE_PAD, otherpad,
- (GstCopyFunction) gst_object_ref, (GFreeFunc) gst_object_unref);
+ g_value_init (&val, GST_TYPE_PAD);
+ g_value_set_object (&val, otherpad);
+ it = gst_iterator_new_single (GST_TYPE_PAD, &val);
+ g_value_unset (&val);
gst_object_unref (jitterbuffer);
static GstPad *
gst_rtp_jitter_buffer_request_new_pad (GstElement * element,
- GstPadTemplate * templ, const gchar * name)
+ GstPadTemplate * templ, const gchar * name, const GstCaps * filter)
{
GstRtpJitterBuffer *jitterbuffer;
GstElementClass *klass;
}
static GstCaps *
-gst_rtp_jitter_buffer_getcaps (GstPad * pad)
+gst_rtp_jitter_buffer_getcaps (GstPad * pad, GstCaps * filter)
{
GstRtpJitterBuffer *jitterbuffer;
GstRtpJitterBufferPrivate *priv;
other = (pad == priv->srcpad ? priv->sinkpad : priv->srcpad);
- caps = gst_pad_peer_get_caps (other);
+ caps = gst_pad_peer_get_caps (other, filter);
templ = gst_pad_get_pad_template_caps (pad);
if (caps == NULL) {
}
}
-static gboolean
-gst_jitter_buffer_sink_setcaps (GstPad * pad, GstCaps * caps)
-{
- GstRtpJitterBuffer *jitterbuffer;
- GstRtpJitterBufferPrivate *priv;
- gboolean res;
-
- jitterbuffer = GST_RTP_JITTER_BUFFER (gst_pad_get_parent (pad));
- priv = jitterbuffer->priv;
-
- JBUF_LOCK (priv);
- res = gst_jitter_buffer_sink_parse_caps (jitterbuffer, caps);
- JBUF_UNLOCK (priv);
-
- /* set same caps on srcpad on success */
- if (res)
- gst_pad_set_caps (priv->srcpad, caps);
-
- gst_object_unref (jitterbuffer);
-
- return res;
-}
-
static void
gst_rtp_jitter_buffer_flush_start (GstRtpJitterBuffer * jitterbuffer)
{
GST_DEBUG_OBJECT (jitterbuffer, "received %s", GST_EVENT_TYPE_NAME (event));
switch (GST_EVENT_TYPE (event)) {
- case GST_EVENT_NEWSEGMENT:
+ case GST_EVENT_CAPS:
{
- GstFormat format;
- gdouble rate, arate;
- gint64 start, stop, time;
- gboolean update;
+ GstCaps *caps;
+
+ gst_event_parse_caps (event, &caps);
+
+ JBUF_LOCK (priv);
+ ret = gst_jitter_buffer_sink_parse_caps (jitterbuffer, caps);
+ JBUF_UNLOCK (priv);
+
+ /* set same caps on srcpad on success */
+ if (ret)
+ gst_pad_set_caps (priv->srcpad, caps);
- gst_event_parse_new_segment_full (event, &update, &rate, &arate, &format,
- &start, &stop, &time);
+ gst_event_unref (event);
+ break;
+ }
+ case GST_EVENT_SEGMENT:
+ {
+ gst_event_copy_segment (event, &priv->segment);
/* we need time for now */
- if (format != GST_FORMAT_TIME)
+ if (priv->segment.format != GST_FORMAT_TIME)
goto newseg_wrong_format;
GST_DEBUG_OBJECT (jitterbuffer,
- "newsegment: update %d, rate %g, arate %g, start %" GST_TIME_FORMAT
- ", stop %" GST_TIME_FORMAT ", time %" GST_TIME_FORMAT,
- update, rate, arate, GST_TIME_ARGS (start), GST_TIME_ARGS (stop),
- GST_TIME_ARGS (time));
-
- /* now configure the values, we need these to time the release of the
- * buffers on the srcpad. */
- gst_segment_set_newsegment_full (&priv->segment, update,
- rate, arate, format, start, stop, time);
+ "newsegment: %" GST_SEGMENT_FORMAT, &priv->segment);
/* FIXME, push SEGMENT in the queue. Sorting order might be difficult. */
ret = gst_pad_push_event (priv->srcpad, event);
gboolean tail;
gint percent = -1;
guint8 pt;
+ GstRTPBuffer rtp = { NULL };
jitterbuffer = GST_RTP_JITTER_BUFFER (gst_pad_get_parent (pad));
priv = jitterbuffer->priv;
- pt = gst_rtp_buffer_get_payload_type (buffer);
+ gst_rtp_buffer_map (buffer, GST_MAP_READ, &rtp);
+ pt = gst_rtp_buffer_get_payload_type (&rtp);
+ seqnum = gst_rtp_buffer_get_seq (&rtp);
+ gst_rtp_buffer_unmap (&rtp);
/* take the timestamp of the buffer. This is the time when the packet was
* received and is used to calculate jitter and clock skew. We will adjust
timestamp = gst_segment_to_running_time (&priv->segment, GST_FORMAT_TIME,
timestamp);
- seqnum = gst_rtp_buffer_get_seq (buffer);
-
GST_DEBUG_OBJECT (jitterbuffer,
"Received packet #%d at time %" GST_TIME_FORMAT, seqnum,
GST_TIME_ARGS (timestamp));
JBUF_LOCK_CHECK (priv, out_flushing);
if (G_UNLIKELY (priv->last_pt != pt)) {
- GstCaps *caps;
-
GST_DEBUG_OBJECT (jitterbuffer, "pt changed from %u to %u", priv->last_pt,
pt);
priv->last_pt = pt;
/* reset clock-rate so that we get a new one */
priv->clock_rate = -1;
+#if 0
+ GstCaps *caps;
/* Try to get the clock-rate from the caps first if we can. If there are no
* caps we must fire the signal to get the clock-rate. */
if ((caps = GST_BUFFER_CAPS (buffer))) {
gst_jitter_buffer_sink_parse_caps (jitterbuffer, caps);
}
+#endif
}
if (G_UNLIKELY (priv->clock_rate == -1)) {
old_buf = rtp_jitter_buffer_pop (priv->jbuf, &percent);
- GST_DEBUG_OBJECT (jitterbuffer, "Queue full, dropping old packet #%d",
- gst_rtp_buffer_get_seq (old_buf));
+ GST_DEBUG_OBJECT (jitterbuffer, "Queue full, dropping old packet %p",
+ old_buf);
gst_buffer_unref (old_buf);
}
/* we need to make the metadata writable before pushing it in the jitterbuffer
* because the jitterbuffer will update the timestamp */
- buffer = gst_buffer_make_metadata_writable (buffer);
+ buffer = gst_buffer_make_writable (buffer);
/* now insert the packet into the queue in sorted order. This function returns
* FALSE if a packet with the same seqnum was already in the queue, meaning we
guint64 ext_time, elapsed;
guint32 rtp_time;
GstRtpJitterBufferPrivate *priv;
+ GstRTPBuffer rtp = { NULL };
priv = jitterbuffer->priv;
- rtp_time = gst_rtp_buffer_get_timestamp (outbuf);
+ gst_rtp_buffer_map (outbuf, GST_MAP_READ, &rtp);
+ rtp_time = gst_rtp_buffer_get_timestamp (&rtp);
+ gst_rtp_buffer_unmap (&rtp);
GST_LOG_OBJECT (jitterbuffer, "rtp %" G_GUINT32_FORMAT ", ext %"
G_GUINT64_FORMAT, rtp_time, priv->ext_timestamp);
GstClockID id;
GstClockTime sync_time;
gint percent = -1;
+ GstRTPBuffer rtp = { NULL };
priv = jitterbuffer->priv;
outbuf = rtp_jitter_buffer_peek (priv->jbuf);
/* get the seqnum and the next expected seqnum */
- seqnum = gst_rtp_buffer_get_seq (outbuf);
+ gst_rtp_buffer_map (outbuf, GST_MAP_READ, &rtp);
+ seqnum = gst_rtp_buffer_get_seq (&rtp);
+ gst_rtp_buffer_unmap (&rtp);
next_seqnum = priv->next_seqnum;
/* get the timestamp, this is already corrected for clock skew by the
guint64 ext_rtptime, diff;
guint32 rtptime;
gboolean drop = FALSE;
+ GstRTCPBuffer rtcp = { NULL };
jitterbuffer = GST_RTP_JITTER_BUFFER (gst_pad_get_parent (pad));
priv = jitterbuffer->priv;
- if (!gst_rtcp_buffer_get_first_packet (buffer, &packet))
- goto invalid_buffer;
+ gst_rtcp_buffer_map (buffer, GST_MAP_READ, &rtcp);
+
+ if (!gst_rtcp_buffer_get_first_packet (&rtcp, &packet))
+ goto empty_buffer;
/* first packet must be SR or RR or else the validate would have failed */
switch (gst_rtcp_packet_get_type (&packet)) {
default:
goto ignore_buffer;
}
+ gst_rtcp_buffer_unmap (&rtcp);
GST_DEBUG_OBJECT (jitterbuffer, "received RTCP of SSRC %08x", ssrc);
ret = GST_FLOW_OK;
goto done;
}
+empty_buffer:
+ {
+ /* this is not fatal but should be filtered earlier */
+ GST_ELEMENT_WARNING (jitterbuffer, STREAM, DECODE, (NULL),
+ ("Received empty RTCP payload, dropping"));
+ gst_rtcp_buffer_unmap (&rtcp);
+ ret = GST_FLOW_OK;
+ goto done;
+ }
ignore_buffer:
{
GST_DEBUG_OBJECT (jitterbuffer, "ignoring RTCP packet");
+ gst_rtcp_buffer_unmap (&rtcp);
ret = GST_FLOW_OK;
goto done;
}
LAST_SIGNAL
};
-GST_BOILERPLATE (GstRtpPtDemux, gst_rtp_pt_demux, GstElement, GST_TYPE_ELEMENT);
+#define gst_rtp_pt_demux_parent_class parent_class
+G_DEFINE_TYPE (GstRtpPtDemux, gst_rtp_pt_demux, GST_TYPE_ELEMENT);
static void gst_rtp_pt_demux_finalize (GObject * object);
static guint gst_rtp_pt_demux_signals[LAST_SIGNAL] = { 0 };
static void
-gst_rtp_pt_demux_base_init (gpointer g_class)
-{
- GstElementClass *gstelement_klass = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_pad_template (gstelement_klass,
- gst_static_pad_template_get (&rtp_pt_demux_sink_template));
- gst_element_class_add_pad_template (gstelement_klass,
- gst_static_pad_template_get (&rtp_pt_demux_src_template));
-
- gst_element_class_set_details_simple (gstelement_klass, "RTP Demux",
- "Demux/Network/RTP",
- "Parses codec streams transmitted in the same RTP session",
- "Kai Vehmanen <kai.vehmanen@nokia.com>");
-}
-
-static void
gst_rtp_pt_demux_class_init (GstRtpPtDemuxClass * klass)
{
GObjectClass *gobject_klass;
klass->clear_pt_map = GST_DEBUG_FUNCPTR (gst_rtp_pt_demux_clear_pt_map);
+ gst_element_class_add_pad_template (gstelement_klass,
+ gst_static_pad_template_get (&rtp_pt_demux_sink_template));
+ gst_element_class_add_pad_template (gstelement_klass,
+ gst_static_pad_template_get (&rtp_pt_demux_src_template));
+
+ gst_element_class_set_details_simple (gstelement_klass, "RTP Demux",
+ "Demux/Network/RTP",
+ "Parses codec streams transmitted in the same RTP session",
+ "Kai Vehmanen <kai.vehmanen@nokia.com>");
+
GST_DEBUG_CATEGORY_INIT (gst_rtp_pt_demux_debug,
"rtpptdemux", 0, "RTP codec demuxer");
}
static void
-gst_rtp_pt_demux_init (GstRtpPtDemux * ptdemux, GstRtpPtDemuxClass * g_class)
+gst_rtp_pt_demux_init (GstRtpPtDemux * ptdemux)
{
GstElementClass *klass = GST_ELEMENT_GET_CLASS (ptdemux);
caps = g_value_dup_boxed (&ret);
g_value_unset (&ret);
if (caps == NULL) {
- caps = GST_PAD_CAPS (rtpdemux->sink);
- if (caps)
- gst_caps_ref (caps);
+ caps = gst_pad_get_current_caps (rtpdemux->sink);
}
GST_DEBUG ("pt %d, got caps %" GST_PTR_FORMAT, pt, caps);
GstPad *srcpad;
GstRtpPtDemuxPad *rtpdemuxpad;
GstCaps *caps;
+ GstRTPBuffer rtp;
rtpdemux = GST_RTP_PT_DEMUX (GST_OBJECT_PARENT (pad));
if (!gst_rtp_buffer_validate (buf))
goto invalid_buffer;
- pt = gst_rtp_buffer_get_payload_type (buf);
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
+ pt = gst_rtp_buffer_get_payload_type (&rtp);
+ gst_rtp_buffer_unmap (&rtp);
GST_DEBUG_OBJECT (rtpdemux, "received buffer for pt %d", pt);
rtpdemuxpad->newcaps = FALSE;
}
- gst_buffer_set_caps (buf, GST_PAD_CAPS (srcpad));
-
/* push to srcpad */
ret = gst_pad_push (srcpad, buf);
GstRtpPtDemuxPad *dpad = (GstRtpPtDemuxPad *) walk->data;
if (dpad->pad == pad) {
+ GstStructure *ws;
+
event =
GST_EVENT_CAST (gst_mini_object_make_writable
(GST_MINI_OBJECT_CAST (event)));
- gst_structure_set (event->structure,
- "payload", G_TYPE_UINT, dpad->pt, NULL);
+ ws = gst_event_writable_structure (event);
+ gst_structure_set (ws, "payload", G_TYPE_UINT, dpad->pt, NULL);
break;
}
}
static GstStateChangeReturn gst_rtp_session_change_state (GstElement * element,
GstStateChange transition);
static GstPad *gst_rtp_session_request_new_pad (GstElement * element,
- GstPadTemplate * templ, const gchar * name);
+ GstPadTemplate * templ, const gchar * name, const GstCaps * caps);
static void gst_rtp_session_release_pad (GstElement * element, GstPad * pad);
+static gboolean gst_rtp_session_sink_setcaps (GstPad * pad, GstCaps * caps);
+static gboolean gst_rtp_session_setcaps_send_rtp (GstPad * pad, GstCaps * caps);
+
static void gst_rtp_session_clear_pt_map (GstRtpSession * rtpsession);
static guint gst_rtp_session_signals[LAST_SIGNAL] = { 0 };
src->ssrc);
}
-GST_BOILERPLATE (GstRtpSession, gst_rtp_session, GstElement, GST_TYPE_ELEMENT);
-
-static void
-gst_rtp_session_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- /* sink pads */
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&rtpsession_recv_rtp_sink_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&rtpsession_recv_rtcp_sink_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&rtpsession_send_rtp_sink_template));
-
- /* src pads */
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&rtpsession_recv_rtp_src_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&rtpsession_sync_src_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&rtpsession_send_rtp_src_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&rtpsession_send_rtcp_src_template));
-
- gst_element_class_set_details_simple (element_class, "RTP Session",
- "Filter/Network/RTP",
- "Implement an RTP session", "Wim Taymans <wim.taymans@gmail.com>");
-}
+#define gst_rtp_session_parent_class parent_class
+G_DEFINE_TYPE (GstRtpSession, gst_rtp_session, GST_TYPE_ELEMENT);
static void
gst_rtp_session_class_init (GstRtpSessionClass * klass)
klass->clear_pt_map = GST_DEBUG_FUNCPTR (gst_rtp_session_clear_pt_map);
+ /* sink pads */
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&rtpsession_recv_rtp_sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&rtpsession_recv_rtcp_sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&rtpsession_send_rtp_sink_template));
+
+ /* src pads */
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&rtpsession_recv_rtp_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&rtpsession_sync_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&rtpsession_send_rtp_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&rtpsession_send_rtcp_src_template));
+
+ gst_element_class_set_details_simple (gstelement_class, "RTP Session",
+ "Filter/Network/RTP",
+ "Implement an RTP session", "Wim Taymans <wim.taymans@gmail.com>");
+
GST_DEBUG_CATEGORY_INIT (gst_rtp_session_debug,
"rtpsession", 0, "RTP Session");
}
static void
-gst_rtp_session_init (GstRtpSession * rtpsession, GstRtpSessionClass * klass)
+gst_rtp_session_init (GstRtpSession * rtpsession)
{
rtpsession->priv = GST_RTP_SESSION_GET_PRIVATE (rtpsession);
rtpsession->priv->lock = g_mutex_new ();
break;
}
- res = parent_class->change_state (element, transition);
+ res = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
switch (transition) {
case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
GstCaps *caps;
/* set rtcp caps on output pad */
- if (!(caps = GST_PAD_CAPS (rtcp_src))) {
+ if (!(caps = gst_pad_get_current_caps (rtcp_src))) {
caps = gst_caps_new_simple ("application/x-rtcp", NULL);
gst_pad_set_caps (rtcp_src, caps);
- } else
- gst_caps_ref (caps);
- gst_buffer_set_caps (buffer, caps);
+ }
gst_caps_unref (caps);
gst_object_ref (rtcp_src);
GstCaps *caps;
/* set rtcp caps on output pad */
- if (!(caps = GST_PAD_CAPS (sync_src))) {
+ if (!(caps = gst_pad_get_current_caps (sync_src))) {
caps = gst_caps_new_simple ("application/x-rtcp", NULL);
gst_pad_set_caps (sync_src, caps);
- } else
- gst_caps_ref (caps);
- gst_buffer_set_caps (buffer, caps);
+ }
gst_caps_unref (caps);
gst_object_ref (sync_src);
GST_EVENT_TYPE_NAME (event));
switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_CAPS:
+ {
+ GstCaps *caps;
+
+ /* process */
+ gst_event_parse_caps (event, &caps);
+ gst_rtp_session_sink_setcaps (pad, caps);
+ /* and eat */
+ gst_event_unref (event);
+ break;
+ }
case GST_EVENT_FLUSH_STOP:
gst_segment_init (&rtpsession->recv_rtp_seg, GST_FORMAT_UNDEFINED);
ret = gst_pad_push_event (rtpsession->recv_rtp_src, event);
break;
- case GST_EVENT_NEWSEGMENT:
+ case GST_EVENT_SEGMENT:
{
- gboolean update;
- gdouble rate, arate;
- GstFormat format;
- gint64 start, stop, time;
- GstSegment *segment;
+ GstSegment *segment, in_segment;
segment = &rtpsession->recv_rtp_seg;
/* the newsegment event is needed to convert the RTP timestamp to
* running_time, which is needed to generate a mapping from RTP to NTP
* timestamps in SR reports */
- gst_event_parse_new_segment_full (event, &update, &rate, &arate, &format,
- &start, &stop, &time);
-
- GST_DEBUG_OBJECT (rtpsession,
- "configured NEWSEGMENT update %d, rate %lf, applied rate %lf, "
- "format GST_FORMAT_TIME, "
- "%" GST_TIME_FORMAT " -- %" GST_TIME_FORMAT
- ", time %" GST_TIME_FORMAT ", accum %" GST_TIME_FORMAT,
- update, rate, arate, GST_TIME_ARGS (segment->start),
- GST_TIME_ARGS (segment->stop), GST_TIME_ARGS (segment->time),
- GST_TIME_ARGS (segment->accum));
+ gst_event_copy_segment (event, &in_segment);
+ GST_DEBUG_OBJECT (rtpsession, "received segment %" GST_SEGMENT_FORMAT,
+ in_segment);
- gst_segment_set_newsegment_full (segment, update, rate,
- arate, format, start, stop, time);
+ /* accept upstream */
+ gst_segment_copy_into (&in_segment, segment);
/* push event forward */
ret = gst_pad_push_event (rtpsession->recv_rtp_src, event);
GST_RTP_SESSION_UNLOCK (rtpsession);
if (otherpad) {
- it = gst_iterator_new_single (GST_TYPE_PAD, otherpad,
- (GstCopyFunction) gst_object_ref, (GFreeFunc) gst_object_unref);
+ GValue val = { 0, };
+
+ g_value_init (&val, GST_TYPE_PAD);
+ g_value_set_object (&val, otherpad);
+ it = gst_iterator_new_single (GST_TYPE_PAD, &val);
+ g_value_unset (&val);
gst_object_unref (otherpad);
}
GST_DEBUG_OBJECT (rtpsession, "received event");
switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_CAPS:
+ {
+ GstCaps *caps;
+
+ /* process */
+ gst_event_parse_caps (event, &caps);
+ gst_rtp_session_setcaps_send_rtp (pad, caps);
+ /* and eat */
+ gst_event_unref (event);
+ break;
+ }
case GST_EVENT_FLUSH_STOP:
gst_segment_init (&rtpsession->send_rtp_seg, GST_FORMAT_UNDEFINED);
ret = gst_pad_push_event (rtpsession->send_rtp_src, event);
break;
- case GST_EVENT_NEWSEGMENT:{
- gboolean update;
- gdouble rate, arate;
- GstFormat format;
- gint64 start, stop, time;
- GstSegment *segment;
+ case GST_EVENT_SEGMENT:{
+ GstSegment *segment, in_segment;
segment = &rtpsession->send_rtp_seg;
/* the newsegment event is needed to convert the RTP timestamp to
* running_time, which is needed to generate a mapping from RTP to NTP
* timestamps in SR reports */
- gst_event_parse_new_segment_full (event, &update, &rate, &arate, &format,
- &start, &stop, &time);
-
- GST_DEBUG_OBJECT (rtpsession,
- "configured NEWSEGMENT update %d, rate %lf, applied rate %lf, "
- "format GST_FORMAT_TIME, "
- "%" GST_TIME_FORMAT " -- %" GST_TIME_FORMAT
- ", time %" GST_TIME_FORMAT ", accum %" GST_TIME_FORMAT,
- update, rate, arate, GST_TIME_ARGS (segment->start),
- GST_TIME_ARGS (segment->stop), GST_TIME_ARGS (segment->time),
- GST_TIME_ARGS (segment->accum));
+ gst_event_copy_segment (event, &in_segment);
+ GST_DEBUG_OBJECT (rtpsession, "received segment %" GST_SEGMENT_FORMAT,
+ in_segment);
- gst_segment_set_newsegment_full (segment, update, rate,
- arate, format, start, stop, time);
+ /* accept upstream */
+ gst_segment_copy_into (&in_segment, segment);
/* push event forward */
ret = gst_pad_push_event (rtpsession->send_rtp_src, event);
}
static GstCaps *
-gst_rtp_session_getcaps_send_rtp (GstPad * pad)
+gst_rtp_session_getcaps_send_rtp (GstPad * pad, GstCaps * filter)
{
GstRtpSession *rtpsession;
GstRtpSessionPrivate *priv;
result = gst_caps_new_full (s1, s2, NULL);
+ if (filter) {
+ GstCaps *caps = result;
+
+ result = gst_caps_intersect_full (filter, caps, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (caps);
+ }
+
GST_DEBUG_OBJECT (rtpsession, "getting caps %" GST_PTR_FORMAT, result);
gst_object_unref (rtpsession);
/* All groups in an list have the same timestamp.
* So, just take it from the first group. */
- buffer = gst_buffer_list_get (GST_BUFFER_LIST_CAST (data), 0, 0);
+ buffer = gst_buffer_list_get (GST_BUFFER_LIST_CAST (data), 0);
if (buffer)
timestamp = GST_BUFFER_TIMESTAMP (buffer);
else
gst_rtp_session_chain_recv_rtp);
gst_pad_set_event_function (rtpsession->recv_rtp_sink,
(GstPadEventFunction) gst_rtp_session_event_recv_rtp_sink);
- gst_pad_set_setcaps_function (rtpsession->recv_rtp_sink,
- gst_rtp_session_sink_setcaps);
gst_pad_set_iterate_internal_links_function (rtpsession->recv_rtp_sink,
gst_rtp_session_iterate_internal_links);
gst_pad_set_active (rtpsession->recv_rtp_sink, TRUE);
gst_rtp_session_chain_send_rtp_list);
gst_pad_set_getcaps_function (rtpsession->send_rtp_sink,
gst_rtp_session_getcaps_send_rtp);
- gst_pad_set_setcaps_function (rtpsession->send_rtp_sink,
- gst_rtp_session_setcaps_send_rtp);
gst_pad_set_event_function (rtpsession->send_rtp_sink,
(GstPadEventFunction) gst_rtp_session_event_send_rtp_sink);
gst_pad_set_iterate_internal_links_function (rtpsession->send_rtp_sink,
static GstPad *
gst_rtp_session_request_new_pad (GstElement * element,
- GstPadTemplate * templ, const gchar * name)
+ GstPadTemplate * templ, const gchar * name, const GstCaps * caps)
{
GstRtpSession *rtpsession;
GstElementClass *klass;
LAST_SIGNAL
};
-GST_BOILERPLATE (GstRtpSsrcDemux, gst_rtp_ssrc_demux, GstElement,
- GST_TYPE_ELEMENT);
-
+#define gst_rtp_ssrc_demux_parent_class parent_class
+G_DEFINE_TYPE (GstRtpSsrcDemux, gst_rtp_ssrc_demux, GST_TYPE_ELEMENT);
/* GObject vmethods */
static void gst_rtp_ssrc_demux_dispose (GObject * object);
GstPadTemplate *templ;
gchar *padname;
GstRtpSsrcDemuxPad *demuxpad;
+ GstCaps *caps;
GST_DEBUG_OBJECT (demux, "creating pad for SSRC %08x", ssrc);
demux->srcpads = g_slist_prepend (demux->srcpads, demuxpad);
/* copy caps from input */
- gst_pad_set_caps (rtp_pad, GST_PAD_CAPS (demux->rtp_sink));
+ caps = gst_pad_get_current_caps (demux->rtp_sink);
+ gst_pad_set_caps (rtp_pad, caps);
+ gst_caps_unref (caps);
gst_pad_use_fixed_caps (rtp_pad);
- gst_pad_set_caps (rtcp_pad, GST_PAD_CAPS (demux->rtcp_sink));
+ caps = gst_pad_get_current_caps (demux->rtcp_sink);
+ gst_pad_set_caps (rtcp_pad, caps);
+ gst_caps_unref (caps);
gst_pad_use_fixed_caps (rtcp_pad);
gst_pad_set_event_function (rtp_pad, gst_rtp_ssrc_demux_src_event);
}
static void
-gst_rtp_ssrc_demux_base_init (gpointer g_class)
-{
- GstElementClass *gstelement_klass = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_pad_template (gstelement_klass,
- gst_static_pad_template_get (&rtp_ssrc_demux_sink_template));
- gst_element_class_add_pad_template (gstelement_klass,
- gst_static_pad_template_get (&rtp_ssrc_demux_rtcp_sink_template));
- gst_element_class_add_pad_template (gstelement_klass,
- gst_static_pad_template_get (&rtp_ssrc_demux_src_template));
- gst_element_class_add_pad_template (gstelement_klass,
- gst_static_pad_template_get (&rtp_ssrc_demux_rtcp_src_template));
-
- gst_element_class_set_details_simple (gstelement_klass, "RTP SSRC Demux",
- "Demux/Network/RTP",
- "Splits RTP streams based on the SSRC",
- "Wim Taymans <wim.taymans@gmail.com>");
-}
-
-static void
gst_rtp_ssrc_demux_class_init (GstRtpSsrcDemuxClass * klass)
{
GObjectClass *gobject_klass;
gstrtpssrcdemux_klass->clear_ssrc =
GST_DEBUG_FUNCPTR (gst_rtp_ssrc_demux_clear_ssrc);
+ gst_element_class_add_pad_template (gstelement_klass,
+ gst_static_pad_template_get (&rtp_ssrc_demux_sink_template));
+ gst_element_class_add_pad_template (gstelement_klass,
+ gst_static_pad_template_get (&rtp_ssrc_demux_rtcp_sink_template));
+ gst_element_class_add_pad_template (gstelement_klass,
+ gst_static_pad_template_get (&rtp_ssrc_demux_src_template));
+ gst_element_class_add_pad_template (gstelement_klass,
+ gst_static_pad_template_get (&rtp_ssrc_demux_rtcp_src_template));
+
+ gst_element_class_set_details_simple (gstelement_klass, "RTP SSRC Demux",
+ "Demux/Network/RTP",
+ "Splits RTP streams based on the SSRC",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
GST_DEBUG_CATEGORY_INIT (gst_rtp_ssrc_demux_debug,
"rtpssrcdemux", 0, "RTP SSRC demuxer");
}
static void
-gst_rtp_ssrc_demux_init (GstRtpSsrcDemux * demux,
- GstRtpSsrcDemuxClass * g_class)
+gst_rtp_ssrc_demux_init (GstRtpSsrcDemux * demux)
{
GstElementClass *klass = GST_ELEMENT_GET_CLASS (demux);
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_FLUSH_STOP:
gst_segment_init (&demux->segment, GST_FORMAT_UNDEFINED);
- case GST_EVENT_NEWSEGMENT:
default:
{
GSList *walk;
demux = GST_RTP_SSRC_DEMUX (gst_pad_get_parent (pad));
switch (GST_EVENT_TYPE (event)) {
- case GST_EVENT_NEWSEGMENT:
default:
{
GSList *walk;
GstRtpSsrcDemux *demux;
guint32 ssrc;
GstRtpSsrcDemuxPad *dpad;
+ GstRTPBuffer rtp;
demux = GST_RTP_SSRC_DEMUX (GST_OBJECT_PARENT (pad));
if (!gst_rtp_buffer_validate (buf))
goto invalid_payload;
- ssrc = gst_rtp_buffer_get_ssrc (buf);
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
+ ssrc = gst_rtp_buffer_get_ssrc (&rtp);
+ gst_rtp_buffer_unmap (&rtp);
GST_DEBUG_OBJECT (demux, "received buffer of SSRC %08x", ssrc);
guint32 ssrc;
GstRtpSsrcDemuxPad *dpad;
GstRTCPPacket packet;
+ GstRTCPBuffer rtcp;
demux = GST_RTP_SSRC_DEMUX (GST_OBJECT_PARENT (pad));
if (!gst_rtcp_buffer_validate (buf))
goto invalid_rtcp;
- if (!gst_rtcp_buffer_get_first_packet (buf, &packet))
+ gst_rtcp_buffer_map (buf, GST_MAP_READ, &rtcp);
+ if (!gst_rtcp_buffer_get_first_packet (&rtcp, &packet)) {
+ gst_rtcp_buffer_unmap (&rtcp);
goto invalid_rtcp;
+ }
/* first packet must be SR or RR or else the validate would have failed */
switch (gst_rtcp_packet_get_type (&packet)) {
default:
goto unexpected_rtcp;
}
+ gst_rtcp_buffer_unmap (&rtcp);
GST_DEBUG_OBJECT (demux, "received RTCP of SSRC %08x", ssrc);
GstRtpSsrcDemuxPad *dpad = (GstRtpSsrcDemuxPad *) walk->data;
if (dpad->rtp_pad == pad || dpad->rtcp_pad == pad) {
+ GstStructure *ws;
+
event =
GST_EVENT_CAST (gst_mini_object_make_writable
(GST_MINI_OBJECT_CAST (event)));
- gst_structure_set (event->structure, "ssrc", G_TYPE_UINT,
- dpad->ssrc, NULL);
+ ws = gst_event_writable_structure (event);
+ gst_structure_set (ws, "ssrc", G_TYPE_UINT, dpad->ssrc, NULL);
break;
}
}
break;
}
}
- it = gst_iterator_new_single (GST_TYPE_PAD, otherpad,
- (GstCopyFunction) gst_object_ref, (GFreeFunc) gst_object_unref);
+ if (otherpad) {
+ GValue val = { 0, };
+
+ g_value_init (&val, GST_TYPE_PAD);
+ g_value_set_object (&val, otherpad);
+ it = gst_iterator_new_single (GST_TYPE_PAD, &val);
+ g_value_unset (&val);
+ gst_object_unref (otherpad);
+ }
GST_PAD_UNLOCK (demux);
gst_object_unref (demux);
GList *list;
guint32 rtptime;
guint16 seqnum;
+ GstRTPBuffer rtp;
g_return_val_if_fail (jbuf != NULL, FALSE);
g_return_val_if_fail (buf != NULL, FALSE);
- seqnum = gst_rtp_buffer_get_seq (buf);
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
+
+ seqnum = gst_rtp_buffer_get_seq (&rtp);
/* loop the list to skip strictly smaller seqnum buffers */
for (list = jbuf->packets->head; list; list = g_list_next (list)) {
guint16 qseq;
gint gap;
+ GstRTPBuffer rtpb;
- qseq = gst_rtp_buffer_get_seq (GST_BUFFER_CAST (list->data));
+ gst_rtp_buffer_map (GST_BUFFER_CAST (list->data), GST_MAP_READ, &rtpb);
+ qseq = gst_rtp_buffer_get_seq (&rtpb);
+ gst_rtp_buffer_unmap (&rtpb);
/* compare the new seqnum to the one in the buffer */
gap = gst_rtp_buffer_compare_seqnum (seqnum, qseq);
break;
}
- rtptime = gst_rtp_buffer_get_timestamp (buf);
+ rtptime = gst_rtp_buffer_get_timestamp (&rtp);
switch (jbuf->mode) {
case RTP_JITTER_BUFFER_MODE_NONE:
case RTP_JITTER_BUFFER_MODE_BUFFER:
if (G_LIKELY (tail))
*tail = (list == NULL);
+ gst_rtp_buffer_unmap (&rtp);
+
return TRUE;
/* ERRORS */
duplicate:
{
+ gst_rtp_buffer_unmap (&rtp);
GST_WARNING ("duplicate packet %d found", (gint) seqnum);
return FALSE;
}
guint64 high_ts, low_ts;
GstBuffer *high_buf, *low_buf;
guint32 result;
+ GstRTPBuffer rtp;
g_return_val_if_fail (jbuf != NULL, 0);
if (!high_buf || !low_buf || high_buf == low_buf)
return 0;
- high_ts = gst_rtp_buffer_get_timestamp (high_buf);
- low_ts = gst_rtp_buffer_get_timestamp (low_buf);
+ gst_rtp_buffer_map (high_buf, GST_MAP_READ, &rtp);
+ high_ts = gst_rtp_buffer_get_timestamp (&rtp);
+ gst_rtp_buffer_unmap (&rtp);
+ gst_rtp_buffer_map (low_buf, GST_MAP_READ, &rtp);
+ low_ts = gst_rtp_buffer_get_timestamp (&rtp);
+ gst_rtp_buffer_unmap (&rtp);
/* it needs to work if ts wraps */
if (high_ts >= low_ts) {
cc->callback);
v_return = callback (data1,
- gst_value_get_mini_object (param_values + 1),
+ g_value_get_boxed (param_values + 1),
g_value_get_boolean (param_values + 2), data2);
g_value_set_boolean (return_value, v_return);
g_value_get_uint (param_values + 2),
g_value_get_uint (param_values + 3),
g_value_get_uint (param_values + 4),
- gst_value_get_mini_object (param_values + 5), data2);
+ g_value_get_boxed (param_values + 5), data2);
}
gboolean rtp, GstBuffer * buffer, GstClockTime current_time,
GstClockTime running_time, guint64 ntpnstime)
{
+ GstMetaNetAddress *meta;
+ GstRTPBuffer rtpb;
+
/* get time of arrival */
arrival->current_time = current_time;
arrival->running_time = running_time;
arrival->ntpnstime = ntpnstime;
/* get packet size including header overhead */
- arrival->bytes = GST_BUFFER_SIZE (buffer) + sess->header_len;
+ arrival->bytes = gst_buffer_get_size (buffer) + sess->header_len;
if (rtp) {
- arrival->payload_len = gst_rtp_buffer_get_payload_len (buffer);
+ gst_rtp_buffer_map (buffer, GST_MAP_READ, &rtpb);
+ arrival->payload_len = gst_rtp_buffer_get_payload_len (&rtpb);
+ gst_rtp_buffer_unmap (&rtpb);
} else {
arrival->payload_len = 0;
}
/* for netbuffer we can store the IP address to check for collisions */
- arrival->have_address = GST_IS_NETBUFFER (buffer);
- if (arrival->have_address) {
- GstNetBuffer *netbuf = (GstNetBuffer *) buffer;
-
- memcpy (&arrival->address, &netbuf->from, sizeof (GstNetAddress));
+ meta = gst_buffer_get_meta_net_address (buffer);
+ if (meta) {
+ arrival->have_address = TRUE;
+ memcpy (&arrival->address, &meta->naddr, sizeof (GstNetAddress));
+ } else {
+ arrival->have_address = FALSE;
}
}
guint32 csrcs[16];
guint8 i, count;
guint64 oldrate;
+ GstRTPBuffer rtp;
g_return_val_if_fail (RTP_IS_SESSION (sess), GST_FLOW_ERROR);
g_return_val_if_fail (GST_IS_BUFFER (buffer), GST_FLOW_ERROR);
goto ignore;
/* get SSRC and look up in session database */
- ssrc = gst_rtp_buffer_get_ssrc (buffer);
+ gst_rtp_buffer_map (buffer, GST_MAP_READ, &rtp);
+ ssrc = gst_rtp_buffer_get_ssrc (&rtp);
source = obtain_source (sess, ssrc, &created, &arrival, TRUE);
- if (!source)
+ if (!source) {
+ gst_rtp_buffer_unmap (&rtp);
goto collision;
-
- prevsender = RTP_SOURCE_IS_SENDER (source);
- prevactive = RTP_SOURCE_IS_ACTIVE (source);
- oldrate = source->bitrate;
+ }
/* copy available csrc for later */
- count = gst_rtp_buffer_get_csrc_count (buffer);
+ count = gst_rtp_buffer_get_csrc_count (&rtp);
/* make sure to not overflow our array. An RTP buffer can maximally contain
* 16 CSRCs */
count = MIN (count, 16);
for (i = 0; i < count; i++)
- csrcs[i] = gst_rtp_buffer_get_csrc (buffer, i);
+ csrcs[i] = gst_rtp_buffer_get_csrc (&rtp, i);
+
+ gst_rtp_buffer_unmap (&rtp);
+
+ prevsender = RTP_SOURCE_IS_SENDER (source);
+ prevactive = RTP_SOURCE_IS_ACTIVE (source);
+ oldrate = source->bitrate;
/* let source process the packet */
result = rtp_source_process_rtp (source, buffer, &arrival);
GstBuffer *fci_buffer = NULL;
if (fci_length > 0) {
- fci_buffer = gst_buffer_create_sub (packet->buffer,
- fci_data - GST_BUFFER_DATA (packet->buffer), fci_length);
+ fci_buffer = gst_buffer_copy_region (packet->rtcp->buffer,
+ GST_BUFFER_COPY_MEMORY, fci_data - packet->rtcp->data, fci_length);
GST_BUFFER_TIMESTAMP (fci_buffer) = arrival->running_time;
}
gboolean more, is_bye = FALSE, do_sync = FALSE;
RTPArrivalStats arrival;
GstFlowReturn result = GST_FLOW_OK;
+ GstRTCPBuffer rtcp;
g_return_val_if_fail (RTP_IS_SESSION (sess), GST_FLOW_ERROR);
g_return_val_if_fail (GST_IS_BUFFER (buffer), GST_FLOW_ERROR);
goto ignore;
/* start processing the compound packet */
- more = gst_rtcp_buffer_get_first_packet (buffer, &packet);
+ gst_rtcp_buffer_map (buffer, GST_MAP_READ, &rtcp);
+ more = gst_rtcp_buffer_get_first_packet (&rtcp, &packet);
while (more) {
GstRTCPType type;
more = gst_rtcp_packet_move_to_next (&packet);
}
+ gst_rtcp_buffer_unmap (&rtcp);
+
/* if we are scheduling a BYE, we only want to count bye packets, else we
* count everything */
if (sess->source->received_bye) {
/* notify caller of sr packets in the callback */
if (do_sync && sess->callbacks.sync_rtcp) {
/* make writable, we might want to change the buffer */
- buffer = gst_buffer_make_metadata_writable (buffer);
+ buffer = gst_buffer_make_writable (buffer);
result = sess->callbacks.sync_rtcp (sess, sess->source, buffer,
sess->sync_rtcp_user_data);
g_return_val_if_fail (is_list || GST_IS_BUFFER (data), GST_FLOW_ERROR);
if (is_list) {
- valid_packet = gst_rtp_buffer_list_validate (GST_BUFFER_LIST_CAST (data));
+ GstBufferList *blist = GST_BUFFER_LIST_CAST (data);
+ gint i, len = gst_buffer_list_len (blist);
+
+ valid_packet = TRUE;
+ for (i = 0; i < len; i++)
+ valid_packet &= gst_rtp_buffer_validate (gst_buffer_list_get (blist, i));
} else {
valid_packet = gst_rtp_buffer_validate (GST_BUFFER_CAST (data));
}
{
GstRTCPPacket *packet = &data->packet;
RTPSource *own = sess->source;
+ GstRTCPBuffer rtcp;
data->rtcp = gst_rtcp_buffer_new (sess->mtu);
+ gst_rtcp_buffer_map (data->rtcp, GST_MAP_WRITE, &rtcp);
+
if (RTP_SOURCE_IS_SENDER (own)) {
guint64 ntptime;
guint32 rtptime;
/* we are a sender, create SR */
GST_DEBUG ("create SR for SSRC %08x", own->ssrc);
- gst_rtcp_buffer_add_packet (data->rtcp, GST_RTCP_TYPE_SR, packet);
+ gst_rtcp_buffer_add_packet (&rtcp, GST_RTCP_TYPE_SR, packet);
/* get latest stats */
rtp_source_get_new_sr (own, data->ntpnstime, data->running_time,
} else {
/* we are only receiver, create RR */
GST_DEBUG ("create RR for SSRC %08x", own->ssrc);
- gst_rtcp_buffer_add_packet (data->rtcp, GST_RTCP_TYPE_RR, packet);
+ gst_rtcp_buffer_add_packet (&rtcp, GST_RTCP_TYPE_RR, packet);
gst_rtcp_packet_rr_set_ssrc (packet, own->ssrc);
}
+
+ gst_rtcp_buffer_unmap (&rtcp);
}
/* construct a Sender or Receiver Report */
GstRTCPPacket *packet = &data->packet;
const GstStructure *sdes;
gint i, n_fields;
+ GstRTCPBuffer rtcp;
+
+ gst_rtcp_buffer_map (data->rtcp, GST_MAP_WRITE, &rtcp);
/* add SDES packet */
- gst_rtcp_buffer_add_packet (data->rtcp, GST_RTCP_TYPE_SDES, packet);
+ gst_rtcp_buffer_add_packet (&rtcp, GST_RTCP_TYPE_SDES, packet);
gst_rtcp_packet_sdes_add_item (packet, sess->source->ssrc);
}
data->has_sdes = TRUE;
+
+ gst_rtcp_buffer_unmap (&rtcp);
}
/* schedule a BYE packet */
session_bye (RTPSession * sess, ReportData * data)
{
GstRTCPPacket *packet = &data->packet;
+ GstRTCPBuffer rtcp;
/* open packet */
session_start_rtcp (sess, data);
/* add SDES */
session_sdes (sess, data);
+ gst_rtcp_buffer_map (data->rtcp, GST_MAP_WRITE, &rtcp);
+
/* add a BYE packet */
- gst_rtcp_buffer_add_packet (data->rtcp, GST_RTCP_TYPE_BYE, packet);
+ gst_rtcp_buffer_add_packet (&rtcp, GST_RTCP_TYPE_BYE, packet);
gst_rtcp_packet_bye_add_ssrc (packet, sess->source->ssrc);
if (sess->bye_reason)
gst_rtcp_packet_bye_set_reason (packet, sess->bye_reason);
/* we have a BYE packet now */
data->is_bye = TRUE;
+
+ gst_rtcp_buffer_unmap (&rtcp);
}
static gboolean
if (sess->callbacks.send_rtcp && (do_not_suppress || !data.may_suppress)) {
guint packet_size;
- /* close the RTCP packet */
- gst_rtcp_buffer_end (data.rtcp);
-
- packet_size = GST_BUFFER_SIZE (data.rtcp) + sess->header_len;
+ packet_size = gst_buffer_get_size (data.rtcp) + sess->header_len;
UPDATE_AVG (sess->stats.avg_rtcp_packet_size, packet_size);
GST_DEBUG ("%p, sending RTCP packet, avg size %u, %u", &sess->stats,
has_pli_compare_func (gconstpointer a, gconstpointer ignored)
{
GstRTCPPacket packet;
+ GstRTCPBuffer rtcp;
+ gboolean ret = FALSE;
- packet.buffer = (GstBuffer *) a;
- packet.offset = 0;
+ gst_rtcp_buffer_map ((GstBuffer *) a, GST_MAP_READ, &rtcp);
- if (gst_rtcp_packet_get_type (&packet) == GST_RTCP_TYPE_PSFB &&
- gst_rtcp_packet_fb_get_type (&packet) == GST_RTCP_PSFB_TYPE_PLI)
- return TRUE;
- else
- return FALSE;
+ if (gst_rtcp_buffer_get_first_packet (&rtcp, &packet)) {
+ if (gst_rtcp_packet_get_type (&packet) == GST_RTCP_TYPE_PSFB &&
+ gst_rtcp_packet_fb_get_type (&packet) == GST_RTCP_PSFB_TYPE_PLI)
+ ret = TRUE;
+ }
+
+ gst_rtcp_buffer_unmap (&rtcp);
+
+ return ret;
}
static gboolean
if (media_src && !rtp_source_has_retained (media_src,
has_pli_compare_func, NULL)) {
- if (gst_rtcp_buffer_add_packet (buffer, GST_RTCP_TYPE_PSFB, &rtcppacket)) {
+ GstRTCPBuffer rtcp;
+
+ gst_rtcp_buffer_map (buffer, GST_MAP_WRITE, &rtcp);
+ if (gst_rtcp_buffer_add_packet (&rtcp, GST_RTCP_TYPE_PSFB, &rtcppacket)) {
gst_rtcp_packet_fb_set_type (&rtcppacket, GST_RTCP_PSFB_TYPE_PLI);
gst_rtcp_packet_fb_set_sender_ssrc (&rtcppacket,
rtp_source_get_ssrc (sess->source));
gst_rtcp_packet_fb_set_media_ssrc (&rtcppacket, media_ssrc);
ret = TRUE;
+ gst_rtcp_buffer_unmap (&rtcp);
} else {
/* Break because the packet is full, will put next request in a
* further packet
*/
+ gst_rtcp_buffer_unmap (&rtcp);
break;
}
}
gint32 diff;
gint clock_rate;
guint8 pt;
+ GstRTPBuffer rtp;
/* get arrival time */
if ((running_time = arrival->running_time) == GST_CLOCK_TIME_NONE)
goto no_time;
- pt = gst_rtp_buffer_get_payload_type (buffer);
+ gst_rtp_buffer_map (buffer, GST_MAP_READ, &rtp);
+ pt = gst_rtp_buffer_get_payload_type (&rtp);
GST_LOG ("SSRC %08x got payload %d", src->ssrc, pt);
/* get clockrate */
- if ((clock_rate = get_clock_rate (src, pt)) == -1)
+ if ((clock_rate = get_clock_rate (src, pt)) == -1) {
+ gst_rtp_buffer_unmap (&rtp);
goto no_clock_rate;
+ }
- rtptime = gst_rtp_buffer_get_timestamp (buffer);
+ rtptime = gst_rtp_buffer_get_timestamp (&rtp);
/* convert arrival time to RTP timestamp units, truncate to 32 bits, we don't
* care about the absolute value, just the difference. */
GST_LOG ("rtparrival %u, rtptime %u, clock-rate %d, diff %d, jitter: %f",
rtparrival, rtptime, clock_rate, diff, (src->stats.jitter) / 16.0);
+ gst_rtp_buffer_unmap (&rtp);
return;
/* ERRORS */
guint16 seqnr, udelta;
RTPSourceStats *stats;
guint16 expected;
+ GstRTPBuffer rtp;
g_return_val_if_fail (RTP_IS_SOURCE (src), GST_FLOW_ERROR);
g_return_val_if_fail (GST_IS_BUFFER (buffer), GST_FLOW_ERROR);
stats = &src->stats;
- seqnr = gst_rtp_buffer_get_seq (buffer);
+ gst_rtp_buffer_map (buffer, GST_MAP_READ, &rtp);
+ seqnr = gst_rtp_buffer_get_seq (&rtp);
+ gst_rtp_buffer_unmap (&rtp);
- rtp_source_update_caps (src, GST_BUFFER_CAPS (buffer));
+ /* FIXME-0.11
+ * would be nice to be able to pass along with buffer */
+ g_assert_not_reached ();
+ /* rtp_source_update_caps (src, GST_BUFFER_CAPS (buffer)); */
if (stats->cycles == -1) {
GST_DEBUG ("received first buffer");
src->received_bye = TRUE;
}
-static GstBufferListItem
-set_ssrc (GstBuffer ** buffer, guint group, guint idx, RTPSource * src)
+static gboolean
+set_ssrc (GstBuffer ** buffer, guint idx, RTPSource * src)
{
+ GstRTPBuffer rtp;
+
*buffer = gst_buffer_make_writable (*buffer);
- gst_rtp_buffer_set_ssrc (*buffer, src->ssrc);
- return GST_BUFFER_LIST_SKIP_GROUP;
+ gst_rtp_buffer_map (*buffer, GST_MAP_WRITE, &rtp);
+ gst_rtp_buffer_set_ssrc (&rtp, src->ssrc);
+ gst_rtp_buffer_unmap (&rtp);
+ return TRUE;
}
/**
GstBuffer *buffer = NULL;
guint packets;
guint32 ssrc;
+ GstRTPBuffer rtp;
g_return_val_if_fail (RTP_IS_SOURCE (src), GST_FLOW_ERROR);
g_return_val_if_fail (is_list || GST_IS_BUFFER (data), GST_FLOW_ERROR);
/* We can grab the caps from the first group, since all
* groups of a buffer list have same caps. */
- buffer = gst_buffer_list_get (list, 0, 0);
+ buffer = gst_buffer_list_get (list, 0);
if (!buffer)
goto no_buffer;
} else {
buffer = GST_BUFFER_CAST (data);
}
- rtp_source_update_caps (src, GST_BUFFER_CAPS (buffer));
+
+ /* FIXME-0.11 */
+ g_assert_not_reached ();
+ /* rtp_source_update_caps (src, GST_BUFFER_CAPS (buffer)); */
/* we are a sender now */
src->is_sender = TRUE;
if (is_list) {
+ gint i;
+
/* Each group makes up a network packet. */
- packets = gst_buffer_list_n_groups (list);
- len = gst_rtp_buffer_list_get_payload_len (list);
+ packets = gst_buffer_list_len (list);
+ for (i = 0, len = 0; i < packets; i++) {
+ gst_rtp_buffer_map (gst_buffer_list_get (list, i), GST_MAP_READ, &rtp);
+ len += gst_rtp_buffer_get_payload_len (&rtp);
+ gst_rtp_buffer_unmap (&rtp);
+ }
+ /* subsequent info taken from first list member */
+ gst_rtp_buffer_map (gst_buffer_list_get (list, 0), GST_MAP_READ, &rtp);
} else {
packets = 1;
- len = gst_rtp_buffer_get_payload_len (buffer);
+ gst_rtp_buffer_map (buffer, GST_MAP_READ, &rtp);
+ len = gst_rtp_buffer_get_payload_len (&rtp);
}
/* update stats for the SR */
do_bitrate_estimation (src, running_time, &src->bytes_sent);
- if (is_list) {
- rtptime = gst_rtp_buffer_list_get_timestamp (list);
- } else {
- rtptime = gst_rtp_buffer_get_timestamp (buffer);
- }
+ rtptime = gst_rtp_buffer_get_timestamp (&rtp);
ext_rtptime = src->last_rtptime;
ext_rtptime = gst_rtp_buffer_ext_timestamp (&ext_rtptime, rtptime);
src->last_rtptime = ext_rtptime;
/* push packet */
- if (!src->callbacks.push_rtp)
+ if (!src->callbacks.push_rtp) {
+ gst_rtp_buffer_unmap (&rtp);
goto no_callback;
-
- if (is_list) {
- ssrc = gst_rtp_buffer_list_get_ssrc (list);
- } else {
- ssrc = gst_rtp_buffer_get_ssrc (buffer);
}
+ ssrc = gst_rtp_buffer_get_ssrc (&rtp);
+ gst_rtp_buffer_unmap (&rtp);
+
if (ssrc != src->ssrc) {
/* the SSRC of the packet is not correct, make a writable buffer and
* update the SSRC. This could involve a complete copy of the packet when
list = gst_buffer_list_make_writable (list);
gst_buffer_list_foreach (list, (GstBufferListFunc) set_ssrc, src);
} else {
- set_ssrc (&buffer, 0, 0, src);
+ set_ssrc (&buffer, 0, src);
}
}
GST_LOG ("pushing RTP %s %" G_GUINT64_FORMAT, is_list ? "list" : "packet",
{
GstBuffer *buffer;
- buffer = gst_buffer_create_sub (packet->buffer, packet->offset,
- (gst_rtcp_packet_get_length (packet) + 1) * 4);
+ buffer = gst_buffer_copy_region (packet->rtcp->buffer, GST_BUFFER_COPY_MEMORY,
+ packet->offset, (gst_rtcp_packet_get_length (packet) + 1) * 4);
GST_BUFFER_TIMESTAMP (buffer) = running_time;
/* GStreamer
- * Copyright (C) <2005,2006> Wim Taymans <wim@fluendo.com>
+ * Copyright (C) <2005,2006> Wim Taymans <wim.taymans@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
static GstStateChangeReturn gst_rtp_dec_change_state (GstElement * element,
GstStateChange transition);
static GstPad *gst_rtp_dec_request_new_pad (GstElement * element,
- GstPadTemplate * templ, const gchar * name);
+ GstPadTemplate * templ, const gchar * name, const GstCaps * caps);
static void gst_rtp_dec_release_pad (GstElement * element, GstPad * pad);
static GstFlowReturn gst_rtp_dec_chain_rtp (GstPad * pad, GstBuffer * buffer);
static guint gst_rtp_dec_signals[LAST_SIGNAL] = { 0 };
-GST_BOILERPLATE (GstRTPDec, gst_rtp_dec, GstElement, GST_TYPE_ELEMENT);
+#define gst_rtp_dec_parent_class parent_class
+G_DEFINE_TYPE (GstRTPDec, gst_rtp_dec, GST_TYPE_ELEMENT);
-static void
-gst_rtp_dec_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- /* sink pads */
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_dec_recv_rtp_sink_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_dec_recv_rtcp_sink_template));
- /* src pads */
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_dec_recv_rtp_src_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_dec_rtcp_src_template));
-
- gst_element_class_set_details_simple (element_class, "RTP Decoder",
- "Codec/Parser/Network",
- "Accepts raw RTP and RTCP packets and sends them forward",
- "Wim Taymans <wim@fluendo.com>");
-}
/* BOXED:UINT,UINT */
#define g_marshal_value_peek_uint(v) g_value_get_uint (v)
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
+ GST_DEBUG_CATEGORY_INIT (rtpdec_debug, "rtpdec", 0, "RTP decoder");
+
gobject_class->finalize = gst_rtp_dec_finalize;
gobject_class->set_property = gst_rtp_dec_set_property;
gobject_class->get_property = gst_rtp_dec_get_property;
GST_DEBUG_FUNCPTR (gst_rtp_dec_request_new_pad);
gstelement_class->release_pad = GST_DEBUG_FUNCPTR (gst_rtp_dec_release_pad);
- GST_DEBUG_CATEGORY_INIT (rtpdec_debug, "rtpdec", 0, "RTP decoder");
+ /* sink pads */
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_dec_recv_rtp_sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_dec_recv_rtcp_sink_template));
+ /* src pads */
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_dec_recv_rtp_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_dec_rtcp_src_template));
+
+ gst_element_class_set_details_simple (gstelement_class, "RTP Decoder",
+ "Codec/Parser/Network",
+ "Accepts raw RTP and RTCP packets and sends them forward",
+ "Wim Taymans <wim.taymans@gmail.com>");
}
static void
-gst_rtp_dec_init (GstRTPDec * rtpdec, GstRTPDecClass * klass)
+gst_rtp_dec_init (GstRTPDec * rtpdec)
{
rtpdec->provided_clock = gst_system_clock_obtain ();
rtpdec->latency = DEFAULT_LATENCY_MS;
GstRTPDecSession *session;
guint32 ssrc;
guint8 pt;
+ GstRTPBuffer rtp = { NULL, };
rtpdec = GST_RTP_DEC (GST_PAD_PARENT (pad));
if (!gst_rtp_buffer_validate (buffer))
goto bad_packet;
- ssrc = gst_rtp_buffer_get_ssrc (buffer);
- pt = gst_rtp_buffer_get_payload_type (buffer);
+
+ gst_rtp_buffer_map (buffer, GST_MAP_READ, &rtp);
+ ssrc = gst_rtp_buffer_get_ssrc (&rtp);
+ pt = gst_rtp_buffer_get_payload_type (&rtp);
+ gst_rtp_buffer_unmap (&rtp);
GST_DEBUG_OBJECT (rtpdec, "SSRC %08x, PT %d", ssrc, pt);
session->recv_rtp_src = gst_pad_new_from_template (templ, name);
g_free (name);
- gst_pad_set_caps (session->recv_rtp_src, caps);
+ gst_pad_push_event (session->recv_rtp_src, gst_event_new_caps (caps));
gst_pad_set_element_private (session->recv_rtp_src, session);
gst_pad_set_query_function (session->recv_rtp_src, gst_rtp_dec_query_src);
session->active = TRUE;
}
- gst_buffer_set_caps (buffer, GST_PAD_CAPS (session->recv_rtp_src));
-
res = gst_pad_push (session->recv_rtp_src, buffer);
return res;
*/
static GstPad *
gst_rtp_dec_request_new_pad (GstElement * element,
- GstPadTemplate * templ, const gchar * name)
+ GstPadTemplate * templ, const gchar * name, const GstCaps * caps)
{
GstRTPDec *rtpdec;
GstElementClass *klass;
} G_STMT_END
/*static guint gst_rtspsrc_signals[LAST_SIGNAL] = { 0 }; */
-
-static void
-_do_init (GType rtspsrc_type)
-{
- static const GInterfaceInfo urihandler_info = {
- gst_rtspsrc_uri_handler_init,
- NULL,
- NULL
- };
-
- GST_DEBUG_CATEGORY_INIT (rtspsrc_debug, "rtspsrc", 0, "RTSP src");
-
- g_type_add_interface_static (rtspsrc_type, GST_TYPE_URI_HANDLER,
- &urihandler_info);
-}
-
-GST_BOILERPLATE_FULL (GstRTSPSrc, gst_rtspsrc, GstBin, GST_TYPE_BIN, _do_init);
-
-static void
-gst_rtspsrc_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&rtptemplate));
-
- gst_element_class_set_details_simple (element_class, "RTSP packet receiver",
- "Source/Network",
- "Receive data over the network via RTSP (RFC 2326)",
- "Wim Taymans <wim@fluendo.com>, "
- "Thijs Vermeir <thijs.vermeir@barco.com>, "
- "Lutz Mueller <lutz@topfrose.de>");
-}
+#define gst_rtspsrc_parent_class parent_class
+G_DEFINE_TYPE_WITH_CODE (GstRTSPSrc, gst_rtspsrc, GST_TYPE_BIN,
+ G_IMPLEMENT_INTERFACE (GST_TYPE_URI_HANDLER, gst_rtspsrc_uri_handler_init));
static void
gst_rtspsrc_class_init (GstRTSPSrcClass * klass)
gstelement_class = (GstElementClass *) klass;
gstbin_class = (GstBinClass *) klass;
+ GST_DEBUG_CATEGORY_INIT (rtspsrc_debug, "rtspsrc", 0, "RTSP src");
+
gobject_class->set_property = gst_rtspsrc_set_property;
gobject_class->get_property = gst_rtspsrc_get_property;
gstelement_class->send_event = gst_rtspsrc_send_event;
gstelement_class->change_state = gst_rtspsrc_change_state;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&rtptemplate));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTSP packet receiver", "Source/Network",
+ "Receive data over the network via RTSP (RFC 2326)",
+ "Wim Taymans <wim@fluendo.com>, "
+ "Thijs Vermeir <thijs.vermeir@barco.com>, "
+ "Lutz Mueller <lutz@topfrose.de>");
+
gstbin_class->handle_message = gst_rtspsrc_handle_message;
gst_rtsp_ext_list_init ();
static void
-gst_rtspsrc_init (GstRTSPSrc * src, GstRTSPSrcClass * g_class)
+gst_rtspsrc_init (GstRTSPSrc * src)
{
#ifdef G_OS_WIN32
WSADATA wsa_data;
/* we keep these elements, we configure all in configure_transport when the
* server told us to really use the UDP ports. */
- stream->udpsrc[0] = gst_object_ref (udpsrc0);
- stream->udpsrc[1] = gst_object_ref (udpsrc1);
+ stream->udpsrc[0] = gst_object_ref_sink (udpsrc0);
+ stream->udpsrc[1] = gst_object_ref_sink (udpsrc1);
/* keep track of next available port number when we have a range
* configured */
if (src->next_port_num != 0)
src->next_port_num = tmp_rtcp + 1;
- /* they are ours now */
- gst_object_sink (udpsrc0);
- gst_object_sink (udpsrc1);
-
return TRUE;
/* ERRORS */
cmd = CMD_WAIT;
state = GST_STATE_PAUSED;
} else {
- event = gst_event_new_flush_stop ();
+ event = gst_event_new_flush_stop (TRUE);
GST_DEBUG_OBJECT (src, "stop flush");
cmd = CMD_LOOP;
state = GST_STATE_PLAYING;
* right values in the segment to perform the seek */
if (event) {
GST_DEBUG_OBJECT (src, "configuring seek");
- gst_segment_set_seek (&seeksegment, rate, format, flags,
+ gst_segment_do_seek (&seeksegment, rate, format, flags,
cur_type, cur, stop_type, stop, &update);
}
/* if we started flush, we stop now */
GST_DEBUG_OBJECT (src, "stopping flush");
gst_rtspsrc_flush (src, FALSE);
- } else if (src->running) {
- /* re-engage loop */
- gst_rtspsrc_loop_send_cmd (src, CMD_LOOP, FALSE);
-
- /* we are running the current segment and doing a non-flushing seek,
- * close the segment first based on the previous last_stop. */
- GST_DEBUG_OBJECT (src, "closing running segment %" G_GINT64_FORMAT
- " to %" G_GINT64_FORMAT, src->segment.accum, src->segment.last_stop);
-
- /* queue the segment for sending in the stream thread */
- if (src->close_segment)
- gst_event_unref (src->close_segment);
- src->close_segment = gst_event_new_new_segment (TRUE,
- src->segment.rate, src->segment.format,
- src->segment.accum, src->segment.last_stop, src->segment.accum);
-
- /* keep track of our last_stop */
- seeksegment.accum = src->segment.last_stop;
}
/* now we did the seek and can activate the new segment values */
if (src->segment.flags & GST_SEEK_FLAG_SEGMENT) {
gst_element_post_message (GST_ELEMENT_CAST (src),
gst_message_new_segment_start (GST_OBJECT_CAST (src),
- src->segment.format, src->segment.last_stop));
+ src->segment.format, src->segment.position));
}
/* now create the newsegment */
GST_DEBUG_OBJECT (src, "Creating newsegment from %" G_GINT64_FORMAT
- " to %" G_GINT64_FORMAT, src->segment.last_stop, stop);
+ " to %" G_GINT64_FORMAT, src->segment.position, stop);
/* store the newsegment event so it can be sent from the streaming thread. */
if (src->start_segment)
gst_event_unref (src->start_segment);
- src->start_segment =
- gst_event_new_new_segment (FALSE, src->segment.rate,
- src->segment.format, src->segment.last_stop, stop,
- src->segment.last_stop);
+ src->start_segment = gst_event_new_segment (&src->segment);
/* mark discont */
GST_DEBUG_OBJECT (src, "mark DISCONT, we did a seek to another position");
GstFlowReturn res = GST_FLOW_OK;
guint8 *data;
guint size;
+ gsize bsize;
GstRTSPResult ret;
GstRTSPMessage message = { 0 };
GstRTSPConnection *conn;
stream = (GstRTSPStream *) gst_pad_get_element_private (pad);
src = stream->parent;
- data = GST_BUFFER_DATA (buffer);
- size = GST_BUFFER_SIZE (buffer);
+ data = gst_buffer_map (buffer, &bsize, NULL, GST_MAP_READ);
+ size = bsize;
gst_rtsp_message_init_data (&message, stream->channel[1]);
gst_rtsp_message_steal_body (&message, &data, &size);
gst_rtsp_message_unset (&message);
+ gst_buffer_unmap (buffer, data, size);
gst_buffer_unref (buffer);
return res;
}
-static void
-pad_unblocked (GstPad * pad, gboolean blocked, GstRTSPSrc * src)
+static GstProbeReturn
+pad_blocked (GstPad * pad, GstProbeType type, gpointer type_data,
+ gpointer user_data)
{
- GST_DEBUG_OBJECT (src, "pad %s:%s unblocked", GST_DEBUG_PAD_NAME (pad));
-}
+ GstRTSPSrc *src = user_data;
-static void
-pad_blocked (GstPad * pad, gboolean blocked, GstRTSPSrc * src)
-{
GST_DEBUG_OBJECT (src, "pad %s:%s blocked, activating streams",
GST_DEBUG_PAD_NAME (pad));
gst_rtspsrc_activate_streams (src);
- return;
+ return GST_PROBE_OK;
was_ok:
{
GST_OBJECT_UNLOCK (src);
- return;
+ return GST_PROBE_OK;
}
}
goto no_element;
/* take ownership */
- gst_object_ref (stream->udpsrc[0]);
- gst_object_sink (stream->udpsrc[0]);
+ gst_object_ref_sink (stream->udpsrc[0]);
/* change state */
gst_element_set_state (stream->udpsrc[0], GST_STATE_PAUSED);
goto no_element;
/* take ownership */
- gst_object_ref (stream->udpsrc[1]);
- gst_object_sink (stream->udpsrc[1]);
+ gst_object_ref_sink (stream->udpsrc[1]);
gst_element_set_state (stream->udpsrc[1], GST_STATE_PAUSED);
}
/* configure pad block on the pad. As soon as there is dataflow on the
* UDP source, we know that UDP is not blocked by a firewall and we can
* configure all the streams to let the application autoplug decoders. */
- gst_pad_set_blocked_async (stream->blockedpad, TRUE,
- (GstPadBlockCallback) pad_blocked, src);
+ stream->blockid =
+ gst_pad_add_probe (stream->blockedpad, GST_PROBE_TYPE_BLOCK,
+ pad_blocked, src, NULL);
if (stream->channelpad[0]) {
GST_DEBUG_OBJECT (src, "connecting UDP source 0 to manager");
for (walk = src->streams; walk; walk = g_list_next (walk)) {
GstRTSPStream *stream = (GstRTSPStream *) walk->data;
- if (stream->blockedpad) {
+ if (stream->blockid) {
GST_DEBUG_OBJECT (src, "unblocking stream pad %p", stream);
- gst_pad_set_blocked_async (stream->blockedpad, FALSE,
- (GstPadBlockCallback) pad_unblocked, src);
- stream->blockedpad = NULL;
+ gst_pad_remove_probe (stream->blockedpad, stream->blockid);
+ stream->blockid = 0;
}
}
GST_DEBUG_OBJECT (src, "configuring stream caps");
- start = segment->last_stop;
+ start = segment->position;
stop = segment->duration;
play_speed = segment->rate;
play_scale = segment->applied_rate;
size -= 1;
buf = gst_buffer_new ();
- GST_BUFFER_DATA (buf) = data;
- GST_BUFFER_MALLOCDATA (buf) = data;
- GST_BUFFER_SIZE (buf) = size;
+ gst_buffer_take_memory (buf, -1,
+ gst_memory_new_wrapped (0, data, g_free, size, 0, size));
/* don't need message anymore */
gst_rtsp_message_unset (&message);
src->need_activate = FALSE;
}
- if (!src->manager) {
- /* set stream caps on buffer when we don't have a session manager to do it
- * for us */
- gst_buffer_set_caps (buf, stream->caps);
- }
-
if (src->base_time == -1) {
/* Take current running_time. This timestamp will be put on
* the first buffer of each stream because we are a live source and so we
if (src->segment.flags & GST_SEEK_FLAG_SEGMENT) {
gst_element_post_message (GST_ELEMENT_CAST (src),
gst_message_new_segment_done (GST_OBJECT_CAST (src),
- src->segment.format, src->segment.last_stop));
+ src->segment.format, src->segment.position));
} else {
gst_rtspsrc_push_event (src, gst_event_new_eos (), FALSE);
}
/* we need to start playback without clipping from the position reported by
* the server */
segment->start = seconds;
- segment->last_stop = seconds;
+ segment->position = seconds;
if (therange->max.type == GST_RTSP_TIME_NOW)
seconds = -1;
/* don't change duration with unknown value, we might have a valid value
* there that we want to keep. */
if (seconds != -1)
- gst_segment_set_duration (segment, GST_FORMAT_TIME, seconds);
+ segment->duration = seconds;
return TRUE;
}
if (src->range && src->range->min.type == GST_RTSP_TIME_NOW) {
g_strlcpy (val_str, "now", sizeof (val_str));
} else {
- if (segment->last_stop == 0) {
+ if (segment->position == 0) {
g_strlcpy (val_str, "0", sizeof (val_str));
} else {
g_ascii_dtostr (val_str, sizeof (val_str),
- ((gdouble) segment->last_stop) / GST_SECOND);
+ ((gdouble) segment->position) / GST_SECOND);
}
}
return g_strdup_printf ("npt=%s-", val_str);
/* NOTE the above also disables npt based eos detection */
/* and below forces position to 0,
* which is visible feedback we lost the plot */
- segment->start = segment->last_stop = src->last_pos;
+ segment->start = segment->position = src->last_pos;
}
gst_rtsp_message_unset (&request);
/*** GSTURIHANDLER INTERFACE *************************************************/
static GstURIType
-gst_rtspsrc_uri_get_type (void)
+gst_rtspsrc_uri_get_type (GType type)
{
return GST_URI_SRC;
}
static gchar **
-gst_rtspsrc_uri_get_protocols (void)
+gst_rtspsrc_uri_get_protocols (GType type)
{
static const gchar *protocols[] =
{ "rtsp", "rtspu", "rtspt", "rtsph", "rtsp-sdp", NULL };
/* our udp sources */
GstElement *udpsrc[2];
GstPad *blockedpad;
+ gulong blockid;
gboolean is_ipv6;
/* our udp sinks back to the server */
gboolean need_range;
gboolean skip;
gint free_channel;
- GstEvent *close_segment;
GstEvent *start_segment;
GstClockTime base_time;
" channels = (int) [ 1, MAX ]"
/* Spectrum properties */
-#define DEFAULT_MESSAGE TRUE
#define DEFAULT_POST_MESSAGES TRUE
#define DEFAULT_MESSAGE_MAGNITUDE TRUE
#define DEFAULT_MESSAGE_PHASE FALSE
enum
{
PROP_0,
- PROP_MESSAGE,
PROP_POST_MESSAGES,
PROP_MESSAGE_MAGNITUDE,
PROP_MESSAGE_PHASE,
PROP_MULTI_CHANNEL
};
-GST_BOILERPLATE (GstSpectrum, gst_spectrum, GstAudioFilter,
- GST_TYPE_AUDIO_FILTER);
+#define gst_spectrum_parent_class parent_class
+G_DEFINE_TYPE (GstSpectrum, gst_spectrum, GST_TYPE_AUDIO_FILTER);
static void gst_spectrum_finalize (GObject * object);
static void gst_spectrum_set_property (GObject * object, guint prop_id,
GstRingBufferSpec * format);
static void
-gst_spectrum_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
- GstCaps *caps;
-
- gst_element_class_set_details_simple (element_class, "Spectrum analyzer",
- "Filter/Analyzer/Audio",
- "Run an FFT on the audio signal, output spectrum data",
- "Erik Walthinsen <omega@cse.ogi.edu>, "
- "Stefan Kost <ensonic@users.sf.net>, "
- "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
-
- caps = gst_caps_from_string (ALLOWED_CAPS);
- gst_audio_filter_class_add_pad_templates (GST_AUDIO_FILTER_CLASS (g_class),
- caps);
- gst_caps_unref (caps);
-}
-
-static void
gst_spectrum_class_init (GstSpectrumClass * klass)
{
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
+ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
GstBaseTransformClass *trans_class = GST_BASE_TRANSFORM_CLASS (klass);
GstAudioFilterClass *filter_class = GST_AUDIO_FILTER_CLASS (klass);
+ GstCaps *caps;
gobject_class->set_property = gst_spectrum_set_property;
gobject_class->get_property = gst_spectrum_get_property;
filter_class->setup = GST_DEBUG_FUNCPTR (gst_spectrum_setup);
- /* FIXME 0.11, remove in favour of post-messages */
- g_object_class_install_property (gobject_class, PROP_MESSAGE,
- g_param_spec_boolean ("message", "Message",
- "Whether to post a 'spectrum' element message on the bus for each "
- "passed interval (deprecated, use post-messages)", DEFAULT_MESSAGE,
- G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
/**
* GstSpectrum:post-messages
*
GST_DEBUG_CATEGORY_INIT (gst_spectrum_debug, "spectrum", 0,
"audio spectrum analyser element");
+
+ gst_element_class_set_details_simple (element_class, "Spectrum analyzer",
+ "Filter/Analyzer/Audio",
+ "Run an FFT on the audio signal, output spectrum data",
+ "Erik Walthinsen <omega@cse.ogi.edu>, "
+ "Stefan Kost <ensonic@users.sf.net>, "
+ "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
+
+ caps = gst_caps_from_string (ALLOWED_CAPS);
+ gst_audio_filter_class_add_pad_templates (filter_class, caps);
+ gst_caps_unref (caps);
}
static void
-gst_spectrum_init (GstSpectrum * spectrum, GstSpectrumClass * g_class)
+gst_spectrum_init (GstSpectrum * spectrum)
{
spectrum->post_messages = DEFAULT_POST_MESSAGES;
spectrum->message_magnitude = DEFAULT_MESSAGE_MAGNITUDE;
GstSpectrum *filter = GST_SPECTRUM (object);
switch (prop_id) {
- case PROP_MESSAGE:
case PROP_POST_MESSAGES:
filter->post_messages = g_value_get_boolean (value);
break;
GstSpectrum *filter = GST_SPECTRUM (object);
switch (prop_id) {
- case PROP_MESSAGE:
case PROP_POST_MESSAGES:
g_value_set_boolean (value, filter->post_messages);
break;
guint nfft = 2 * bands - 2;
guint input_pos;
gfloat *input;
- const guint8 *data = GST_BUFFER_DATA (buffer);
- guint size = GST_BUFFER_SIZE (buffer);
+ const guint8 *data, *mdata;
+ gsize size;
guint frame_size = width * channels;
guint fft_todo, msg_todo, block_size;
gboolean have_full_interval;
GstSpectrumChannel *cd;
GstSpectrumInputData input_data;
- GST_LOG_OBJECT (spectrum, "input size: %d bytes", GST_BUFFER_SIZE (buffer));
+ data = mdata = gst_buffer_map (buffer, &size, NULL, GST_MAP_READ);
+
+ GST_LOG_OBJECT (spectrum, "input size: %" G_GSIZE_FORMAT " bytes", size);
if (GST_BUFFER_IS_DISCONT (buffer)) {
GST_DEBUG_OBJECT (spectrum, "Discontinuity detected -- flushing");
spectrum->input_pos = input_pos;
+ gst_buffer_unmap (buffer, (guint8 *) mdata, -1);
+
g_assert (size == 0);
return GST_FLOW_OK;
udpctx->sock = -1; \
} G_STMT_END
-static void gst_dynudpsink_base_init (gpointer g_class);
-static void gst_dynudpsink_class_init (GstDynUDPSink * klass);
-static void gst_dynudpsink_init (GstDynUDPSink * udpsink);
static void gst_dynudpsink_finalize (GObject * object);
static GstFlowReturn gst_dynudpsink_render (GstBaseSink * sink,
static void gst_dynudpsink_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
-static GstElementClass *parent_class = NULL;
-
static guint gst_dynudpsink_signals[LAST_SIGNAL] = { 0 };
-GType
-gst_dynudpsink_get_type (void)
-{
- static GType dynudpsink_type = 0;
-
- if (!dynudpsink_type) {
- static const GTypeInfo dynudpsink_info = {
- sizeof (GstDynUDPSinkClass),
- gst_dynudpsink_base_init,
- NULL,
- (GClassInitFunc) gst_dynudpsink_class_init,
- NULL,
- NULL,
- sizeof (GstDynUDPSink),
- 0,
- (GInstanceInitFunc) gst_dynudpsink_init,
- NULL
- };
-
- dynudpsink_type =
- g_type_register_static (GST_TYPE_BASE_SINK, "GstDynUDPSink",
- &dynudpsink_info, 0);
- }
- return dynudpsink_type;
-}
+#define gst_dynudpsink_parent_class parent_class
+G_DEFINE_TYPE (GstDynUDPSink, gst_dynudpsink, GST_TYPE_BASE_SINK);
static void
-gst_dynudpsink_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&sink_template));
-
- gst_element_class_set_details_simple (element_class, "UDP packet sender",
- "Sink/Network",
- "Send data over the network via UDP",
- "Philippe Khalaf <burger@speedy.org>");
-}
-
-static void
-gst_dynudpsink_class_init (GstDynUDPSink * klass)
+gst_dynudpsink_class_init (GstDynUDPSinkClass * klass)
{
GObjectClass *gobject_class;
GstElementClass *gstelement_class;
gstelement_class->change_state = gst_dynudpsink_change_state;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class, "UDP packet sender",
+ "Sink/Network",
+ "Send data over the network via UDP",
+ "Philippe Khalaf <burger@speedy.org>");
+
gstbasesink_class->render = gst_dynudpsink_render;
GST_DEBUG_CATEGORY_INIT (dynudpsink_debug, "dynudpsink", 0, "UDP sink");
gst_dynudpsink_render (GstBaseSink * bsink, GstBuffer * buffer)
{
GstDynUDPSink *sink;
- gint ret, size;
+ gint ret;
+ gsize size;
guint8 *data;
- GstNetBuffer *netbuf;
+ GstMetaNetAddress *meta;
struct sockaddr_in theiraddr;
guint16 destport;
guint32 destaddr;
memset (&theiraddr, 0, sizeof (theiraddr));
- if (GST_IS_NETBUFFER (buffer)) {
- netbuf = GST_NETBUFFER (buffer);
- } else {
+ meta = gst_buffer_get_meta_net_address (buffer);
+
+ if (meta == NULL) {
GST_DEBUG ("Received buffer is not a GstNetBuffer, skipping");
return GST_FLOW_OK;
}
sink = GST_DYNUDPSINK (bsink);
- size = GST_BUFFER_SIZE (netbuf);
- data = GST_BUFFER_DATA (netbuf);
+ data = gst_buffer_map (buffer, &size, NULL, GST_MAP_READ);
GST_DEBUG ("about to send %d bytes", size);
- // let's get the address from the netbuffer
- gst_netaddress_get_ip4_address (&netbuf->to, &destaddr, &destport);
+ /* let's get the address from the metaata */
+ gst_netaddress_get_ip4_address (&meta->naddr, &destaddr, &destport);
GST_DEBUG ("sending %d bytes to client %d port %d", size, destaddr, destport);
#endif
(struct sockaddr *) &theiraddr, sizeof (theiraddr));
+ gst_buffer_unmap (buffer, data, size);
+
if (ret < 0) {
if (errno != EINTR && errno != EAGAIN) {
goto send_error;
udpctx->sock = DEFAULT_SOCK; \
} G_STMT_END
-static void gst_multiudpsink_base_init (gpointer g_class);
-static void gst_multiudpsink_class_init (GstMultiUDPSinkClass * klass);
-static void gst_multiudpsink_init (GstMultiUDPSink * udpsink);
static void gst_multiudpsink_finalize (GObject * object);
static GstFlowReturn gst_multiudpsink_render (GstBaseSink * sink,
GstBuffer * buffer);
+#if 0
#ifndef G_OS_WIN32 /* sendmsg() is not available on Windows */
static GstFlowReturn gst_multiudpsink_render_list (GstBaseSink * bsink,
GstBufferList * list);
#endif
+#endif
static GstStateChangeReturn gst_multiudpsink_change_state (GstElement *
element, GstStateChange transition);
static void gst_multiudpsink_clear_internal (GstMultiUDPSink * sink,
gboolean lock);
-static GstElementClass *parent_class = NULL;
-
static guint gst_multiudpsink_signals[LAST_SIGNAL] = { 0 };
-GType
-gst_multiudpsink_get_type (void)
-{
- static GType multiudpsink_type = 0;
-
- if (!multiudpsink_type) {
- static const GTypeInfo multiudpsink_info = {
- sizeof (GstMultiUDPSinkClass),
- gst_multiudpsink_base_init,
- NULL,
- (GClassInitFunc) gst_multiudpsink_class_init,
- NULL,
- NULL,
- sizeof (GstMultiUDPSink),
- 0,
- (GInstanceInitFunc) gst_multiudpsink_init,
- NULL
- };
-
- multiudpsink_type =
- g_type_register_static (GST_TYPE_BASE_SINK, "GstMultiUDPSink",
- &multiudpsink_info, 0);
- }
- return multiudpsink_type;
-}
-
-static void
-gst_multiudpsink_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&sink_template));
-
- gst_element_class_set_details_simple (element_class, "UDP packet sender",
- "Sink/Network",
- "Send data over the network via UDP",
- "Wim Taymans <wim.taymans@gmail.com>");
-}
+#define gst_multiudpsink_parent_class parent_class
+G_DEFINE_TYPE (GstMultiUDPSink, gst_multiudpsink, GST_TYPE_BASE_SINK);
static void
gst_multiudpsink_class_init (GstMultiUDPSinkClass * klass)
gstelement_class->change_state = gst_multiudpsink_change_state;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class, "UDP packet sender",
+ "Sink/Network",
+ "Send data over the network via UDP",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
gstbasesink_class->render = gst_multiudpsink_render;
+#if 0
#ifndef G_OS_WIN32
gstbasesink_class->render_list = gst_multiudpsink_render_list;
#endif
+#endif
klass->add = gst_multiudpsink_add;
klass->remove = gst_multiudpsink_remove;
klass->clear = gst_multiudpsink_clear;
#endif
}
+#ifdef G_OS_WIN32
+/* version without sendmsg */
static GstFlowReturn
gst_multiudpsink_render (GstBaseSink * bsink, GstBuffer * buffer)
{
GstMultiUDPSink *sink;
- gint ret, size, num = 0, no_clients = 0;
+ gint ret, num = 0, no_clients = 0;
+ gsize size;
guint8 *data;
GList *clients;
gint len;
sink = GST_MULTIUDPSINK (bsink);
- size = GST_BUFFER_SIZE (buffer);
- data = GST_BUFFER_DATA (buffer);
+ data = gst_buffer_map (buffer, &size, NULL, GST_MAP_READ);
if (size > UDP_MAX_SIZE) {
GST_WARNING ("Attempting to send a UDP packet larger than maximum "
}
g_mutex_unlock (sink->client_lock);
+ gst_buffer_unmap (buffer, data, size);
+
GST_LOG_OBJECT (sink, "sent %d bytes to %d (of %d) clients", size, num,
no_clients);
return GST_FLOW_OK;
}
-
-#ifndef G_OS_WIN32
+#else /* !G_OS_WIN32 */
+/* version with sendmsg */
static GstFlowReturn
-gst_multiudpsink_render_list (GstBaseSink * bsink, GstBufferList * list)
+gst_multiudpsink_render (GstBaseSink * bsink, GstBuffer * buffer)
{
GstMultiUDPSink *sink;
GList *clients;
gint ret, size = 0, num = 0, no_clients = 0;
struct iovec *iov;
struct msghdr msg = { 0 };
-
- GstBufferListIterator *it;
- guint gsize;
- GstBuffer *buf;
+ guint n_mem, i;
+ gpointer bdata;
+ gsize bsize;
+ GstMemory *mem;
sink = GST_MULTIUDPSINK (bsink);
- g_return_val_if_fail (list != NULL, GST_FLOW_ERROR);
+ msg.msg_iovlen = 0;
+ size = 0;
+
+ n_mem = gst_buffer_n_memory (buffer);
+ if (n_mem == 0)
+ goto no_data;
- it = gst_buffer_list_iterate (list);
- g_return_val_if_fail (it != NULL, GST_FLOW_ERROR);
+ iov = (struct iovec *) g_malloc (n_mem * sizeof (struct iovec));
+ msg.msg_iov = iov;
- while (gst_buffer_list_iterator_next_group (it)) {
- msg.msg_iovlen = 0;
- size = 0;
+ for (i = 0; i < n_mem; i++) {
+ mem = gst_buffer_peek_memory (buffer, i, GST_MAP_READ);
+ bdata = gst_memory_map (mem, &bsize, NULL, GST_MAP_READ);
- if ((gsize = gst_buffer_list_iterator_n_buffers (it)) == 0) {
- goto invalid_list;
+ if (bsize > UDP_MAX_SIZE) {
+ GST_WARNING ("Attempting to send a UDP packet larger than maximum "
+ "size (%d > %d)", bsize, UDP_MAX_SIZE);
}
- iov = (struct iovec *) g_malloc (gsize * sizeof (struct iovec));
- msg.msg_iov = iov;
+ msg.msg_iov[msg.msg_iovlen].iov_len = bsize;
+ msg.msg_iov[msg.msg_iovlen].iov_base = bdata;
+ msg.msg_iovlen++;
- while ((buf = gst_buffer_list_iterator_next (it))) {
- if (GST_BUFFER_SIZE (buf) > UDP_MAX_SIZE) {
- GST_WARNING ("Attempting to send a UDP packet larger than maximum "
- "size (%d > %d)", GST_BUFFER_SIZE (buf), UDP_MAX_SIZE);
- }
+ size += bsize;
+ }
- msg.msg_iov[msg.msg_iovlen].iov_len = GST_BUFFER_SIZE (buf);
- msg.msg_iov[msg.msg_iovlen].iov_base = GST_BUFFER_DATA (buf);
- msg.msg_iovlen++;
- size += GST_BUFFER_SIZE (buf);
- }
+ sink->bytes_to_serve += size;
- sink->bytes_to_serve += size;
+ /* grab lock while iterating and sending to clients, this should be
+ * fast as UDP never blocks */
+ g_mutex_lock (sink->client_lock);
+ GST_LOG_OBJECT (bsink, "about to send %d bytes", size);
- /* grab lock while iterating and sending to clients, this should be
- * fast as UDP never blocks */
- g_mutex_lock (sink->client_lock);
- GST_LOG_OBJECT (bsink, "about to send %d bytes", size);
-
- for (clients = sink->clients; clients; clients = g_list_next (clients)) {
- GstUDPClient *client;
- gint count;
-
- client = (GstUDPClient *) clients->data;
- no_clients++;
- GST_LOG_OBJECT (sink, "sending %d bytes to client %p", size, client);
-
- count = sink->send_duplicates ? client->refcount : 1;
-
- while (count--) {
- while (TRUE) {
- msg.msg_name = (void *) &client->theiraddr;
- msg.msg_namelen = sizeof (client->theiraddr);
- ret = sendmsg (*client->sock, &msg, 0);
-
- if (ret < 0) {
- if (!socket_error_is_ignorable ()) {
- break;
- }
- } else {
- num++;
- client->bytes_sent += ret;
- client->packets_sent++;
- sink->bytes_served += ret;
+ for (clients = sink->clients; clients; clients = g_list_next (clients)) {
+ GstUDPClient *client;
+ gint count;
+
+ client = (GstUDPClient *) clients->data;
+ no_clients++;
+ GST_LOG_OBJECT (sink, "sending %d bytes to client %p", size, client);
+
+ count = sink->send_duplicates ? client->refcount : 1;
+
+ while (count--) {
+ while (TRUE) {
+ msg.msg_name = (void *) &client->theiraddr;
+ msg.msg_namelen = sizeof (client->theiraddr);
+ ret = sendmsg (*client->sock, &msg, 0);
+
+ if (ret < 0) {
+ if (!socket_error_is_ignorable ()) {
+ gchar *errormessage = socket_last_error_message ();
+ GST_WARNING_OBJECT (sink, "client %p gave error %d (%s)", client,
+ socket_last_error_code (), errormessage);
+ g_free (errormessage);
+ break;
break;
}
+ } else {
+ num++;
+ client->bytes_sent += ret;
+ client->packets_sent++;
+ sink->bytes_served += ret;
+ break;
}
}
}
- g_mutex_unlock (sink->client_lock);
+ }
+ g_mutex_unlock (sink->client_lock);
- g_free (iov);
- msg.msg_iov = NULL;
+ /* unmap all memory again */
+ for (i = 0; i < n_mem; i++) {
+ mem = gst_buffer_peek_memory (buffer, i, GST_MAP_READ);
- GST_LOG_OBJECT (sink, "sent %d bytes to %d (of %d) clients", size, num,
- no_clients);
+ bsize = msg.msg_iov[i].iov_len;
+ bdata = msg.msg_iov[i].iov_base;
+
+ gst_memory_unmap (mem, bdata, bsize);
}
+ g_free (iov);
- gst_buffer_list_iterator_free (it);
+ GST_LOG_OBJECT (sink, "sent %d bytes to %d (of %d) clients", size, num,
+ no_clients);
return GST_FLOW_OK;
-invalid_list:
- gst_buffer_list_iterator_free (it);
- return GST_FLOW_ERROR;
+no_data:
+ {
+ return GST_FLOW_OK;
+ }
+}
+#endif
+
+#if 0
+/* DISABLED, core sends buffers to our render one by one, we can't really do
+ * much better */
+static GstFlowReturn
+gst_multiudpsink_render_list (GstBaseSink * bsink, GstBufferList * list)
+{
}
#endif
return FALSE;
#endif
- /* register type of the netbuffer so that we can use it from multiple threads
- * right away. Note that the plugin loading is always serialized */
- gst_netbuffer_get_type ();
+ /* register info of the netaddress metadata so that we can use it from
+ * multiple threads right away. Note that the plugin loading is always
+ * serialized */
+ gst_meta_net_address_get_info ();
if (!gst_element_register (plugin, "udpsink", GST_RANK_NONE,
GST_TYPE_UDPSINK))
/* FILL ME */
};
-static void gst_udpsink_base_init (gpointer g_class);
-static void gst_udpsink_class_init (GstUDPSink * klass);
-static void gst_udpsink_init (GstUDPSink * udpsink);
static void gst_udpsink_finalize (GstUDPSink * udpsink);
static void gst_udpsink_uri_handler_init (gpointer g_iface,
static void gst_udpsink_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
-static GstElementClass *parent_class = NULL;
-
/*static guint gst_udpsink_signals[LAST_SIGNAL] = { 0 }; */
-
-GType
-gst_udpsink_get_type (void)
-{
- static GType udpsink_type = 0;
-
- if (!udpsink_type) {
- static const GTypeInfo udpsink_info = {
- sizeof (GstUDPSinkClass),
- gst_udpsink_base_init,
- NULL,
- (GClassInitFunc) gst_udpsink_class_init,
- NULL,
- NULL,
- sizeof (GstUDPSink),
- 0,
- (GInstanceInitFunc) gst_udpsink_init,
- NULL
- };
- static const GInterfaceInfo urihandler_info = {
- gst_udpsink_uri_handler_init,
- NULL,
- NULL
- };
-
- udpsink_type =
- g_type_register_static (GST_TYPE_MULTIUDPSINK, "GstUDPSink",
- &udpsink_info, 0);
-
- g_type_add_interface_static (udpsink_type, GST_TYPE_URI_HANDLER,
- &urihandler_info);
-
- }
- return udpsink_type;
-}
-
-static void
-gst_udpsink_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_set_details_simple (element_class, "UDP packet sender",
- "Sink/Network",
- "Send data over the network via UDP", "Wim Taymans <wim@fluendo.com>");
-}
+#define gst_udpsink_parent_class parent_class
+G_DEFINE_TYPE_WITH_CODE (GstUDPSink, gst_udpsink, GST_TYPE_MULTIUDPSINK,
+ G_IMPLEMENT_INTERFACE (GST_TYPE_URI_HANDLER, gst_udpsink_uri_handler_init));
static void
-gst_udpsink_class_init (GstUDPSink * klass)
+gst_udpsink_class_init (GstUDPSinkClass * klass)
{
GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
gobject_class = (GObjectClass *) klass;
-
- parent_class = g_type_class_peek_parent (klass);
+ gstelement_class = (GstElementClass *) klass;
gobject_class->set_property = gst_udpsink_set_property;
gobject_class->get_property = gst_udpsink_get_property;
g_param_spec_int ("port", "port", "The port to send the packets to",
0, 65535, UDP_DEFAULT_PORT,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
-}
+ gst_element_class_set_details_simple (gstelement_class, "UDP packet sender",
+ "Sink/Network",
+ "Send data over the network via UDP", "Wim Taymans <wim@fluendo.com>");
+}
static void
gst_udpsink_init (GstUDPSink * udpsink)
/*** GSTURIHANDLER INTERFACE *************************************************/
static GstURIType
-gst_udpsink_uri_get_type (void)
+gst_udpsink_uri_get_type (GType type)
{
return GST_URI_SINK;
}
static gchar **
-gst_udpsink_uri_get_protocols (void)
+gst_udpsink_uri_get_protocols (GType type)
{
static gchar *protocols[] = { (char *) "udp", NULL };
static void gst_udpsrc_uri_handler_init (gpointer g_iface, gpointer iface_data);
-static GstCaps *gst_udpsrc_getcaps (GstBaseSrc * src);
+static GstCaps *gst_udpsrc_getcaps (GstBaseSrc * src, GstCaps * filter);
static GstFlowReturn gst_udpsrc_create (GstPushSrc * psrc, GstBuffer ** buf);
static void gst_udpsrc_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
-static void
-_do_init (GType type)
-{
- static const GInterfaceInfo urihandler_info = {
- gst_udpsrc_uri_handler_init,
- NULL,
- NULL
- };
-
- g_type_add_interface_static (type, GST_TYPE_URI_HANDLER, &urihandler_info);
-
- GST_DEBUG_CATEGORY_INIT (udpsrc_debug, "udpsrc", 0, "UDP src");
-}
-
-GST_BOILERPLATE_FULL (GstUDPSrc, gst_udpsrc, GstPushSrc, GST_TYPE_PUSH_SRC,
- _do_init);
-
-static void
-gst_udpsrc_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&src_template));
-
- gst_element_class_set_details_simple (element_class, "UDP packet receiver",
- "Source/Network",
- "Receive data over the network via UDP",
- "Wim Taymans <wim@fluendo.com>, "
- "Thijs Vermeir <thijs.vermeir@barco.com>");
-}
+#define gst_udpsrc_parent_class parent_class
+G_DEFINE_TYPE_WITH_CODE (GstUDPSrc, gst_udpsrc, GST_TYPE_PUSH_SRC,
+ G_IMPLEMENT_INTERFACE (GST_TYPE_URI_HANDLER, gst_udpsrc_uri_handler_init));
static void
gst_udpsrc_class_init (GstUDPSrcClass * klass)
{
GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
GstBaseSrcClass *gstbasesrc_class;
GstPushSrcClass *gstpushsrc_class;
gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
gstbasesrc_class = (GstBaseSrcClass *) klass;
gstpushsrc_class = (GstPushSrcClass *) klass;
+ GST_DEBUG_CATEGORY_INIT (udpsrc_debug, "udpsrc", 0, "UDP src");
+
gobject_class->set_property = gst_udpsrc_set_property;
gobject_class->get_property = gst_udpsrc_get_property;
gobject_class->finalize = gst_udpsrc_finalize;
g_param_spec_boolean ("reuse", "Reuse", "Enable reuse of the port",
UDP_DEFAULT_REUSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&src_template));
+
+ gst_element_class_set_details_simple (gstelement_class, "UDP packet receiver",
+ "Source/Network",
+ "Receive data over the network via UDP",
+ "Wim Taymans <wim@fluendo.com>, "
+ "Thijs Vermeir <thijs.vermeir@barco.com>");
+
gstbasesrc_class->start = gst_udpsrc_start;
gstbasesrc_class->stop = gst_udpsrc_stop;
gstbasesrc_class->unlock = gst_udpsrc_unlock;
}
static void
-gst_udpsrc_init (GstUDPSrc * udpsrc, GstUDPSrcClass * g_class)
+gst_udpsrc_init (GstUDPSrc * udpsrc)
{
WSA_STARTUP (udpsrc);
}
static GstCaps *
-gst_udpsrc_getcaps (GstBaseSrc * src)
+gst_udpsrc_getcaps (GstBaseSrc * src, GstCaps * filter)
{
GstUDPSrc *udpsrc;
gst_udpsrc_create (GstPushSrc * psrc, GstBuffer ** buf)
{
GstUDPSrc *udpsrc;
- GstNetBuffer *outbuf;
+ GstMetaNetAddress *meta;
+ GstBuffer *outbuf;
union gst_sockaddr
{
struct sockaddr sa;
socklen_t slen;
guint8 *pktdata;
gint pktsize;
+ gsize offset;
#ifdef G_OS_UNIX
gint readsize;
#elif defined G_OS_WIN32
pktdata = g_malloc (readsize);
pktsize = readsize;
+ offset = 0;
while (TRUE) {
slen = sizeof (sa);
break;
}
- /* special case buffer so receivers can also track the address */
- outbuf = gst_netbuffer_new ();
- GST_BUFFER_MALLOCDATA (outbuf) = pktdata;
-
/* patch pktdata and len when stripping off the headers */
if (G_UNLIKELY (udpsrc->skip_first_bytes != 0)) {
if (G_UNLIKELY (readsize < udpsrc->skip_first_bytes))
goto skip_error;
- pktdata += udpsrc->skip_first_bytes;
+ offset += udpsrc->skip_first_bytes;
ret -= udpsrc->skip_first_bytes;
}
- GST_BUFFER_DATA (outbuf) = pktdata;
- GST_BUFFER_SIZE (outbuf) = ret;
+
+ outbuf = gst_buffer_new ();
+ gst_buffer_take_memory (outbuf, -1,
+ gst_memory_new_wrapped (0, pktdata, g_free, pktsize, offset, ret));
+
+ /* use buffer metadata so receivers can also track the address */
+ meta = gst_buffer_add_meta_net_address (outbuf);
switch (sa.sa.sa_family) {
case AF_INET:
{
- gst_netaddress_set_ip4_address (&outbuf->from, sa.sa_in.sin_addr.s_addr,
+ gst_netaddress_set_ip4_address (&meta->naddr, sa.sa_in.sin_addr.s_addr,
sa.sa_in.sin_port);
}
break;
guint8 ip6[16];
memcpy (ip6, &sa.sa_in6.sin6_addr, sizeof (ip6));
- gst_netaddress_set_ip6_address (&outbuf->from, ip6, sa.sa_in6.sin6_port);
+ gst_netaddress_set_ip6_address (&meta->naddr, ip6, sa.sa_in6.sin6_port);
}
break;
default:
/*** GSTURIHANDLER INTERFACE *************************************************/
static GstURIType
-gst_udpsrc_uri_get_type (void)
+gst_udpsrc_uri_get_type (GType type)
{
return GST_URI_SRC;
}
static gchar **
-gst_udpsrc_uri_get_protocols (void)
+gst_udpsrc_uri_get_protocols (GType type)
{
static gchar *protocols[] = { (char *) "udp", NULL };
#define DEFAULT_PROP_GAMMA 1
static GstStaticPadTemplate gst_gamma_src_template =
- GST_STATIC_PAD_TEMPLATE ("src",
+GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("AYUV") ";"
- GST_VIDEO_CAPS_ARGB ";" GST_VIDEO_CAPS_BGRA ";"
- GST_VIDEO_CAPS_ABGR ";" GST_VIDEO_CAPS_RGBA ";"
- GST_VIDEO_CAPS_YUV ("Y444") ";"
- GST_VIDEO_CAPS_xRGB ";" GST_VIDEO_CAPS_RGBx ";"
- GST_VIDEO_CAPS_xBGR ";" GST_VIDEO_CAPS_BGRx ";"
- GST_VIDEO_CAPS_RGB ";" GST_VIDEO_CAPS_BGR ";"
- GST_VIDEO_CAPS_YUV ("Y42B") ";"
- GST_VIDEO_CAPS_YUV ("NV12") ";"
- GST_VIDEO_CAPS_YUV ("NV21") ";"
- GST_VIDEO_CAPS_YUV ("YUY2") ";"
- GST_VIDEO_CAPS_YUV ("UYVY") ";"
- GST_VIDEO_CAPS_YUV ("YVYU") ";"
- GST_VIDEO_CAPS_YUV ("I420") ";"
- GST_VIDEO_CAPS_YUV ("YV12") ";"
- GST_VIDEO_CAPS_YUV ("IYUV") ";" GST_VIDEO_CAPS_YUV ("Y41B")
- )
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ AYUV, "
+ "ARGB, BGRA, ABGR, RGBA, ABGR, RGBA, Y444, "
+ "xRGB, RGBx, xBGR, BGRx, RGB, BGR, Y42B, NV12, "
+ "NV21, YUY2, UYVY, YVYU, I420, YV12, IYUV, Y41B }"))
);
static GstStaticPadTemplate gst_gamma_sink_template =
- GST_STATIC_PAD_TEMPLATE ("sink",
+GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("AYUV") ";"
- GST_VIDEO_CAPS_ARGB ";" GST_VIDEO_CAPS_BGRA ";"
- GST_VIDEO_CAPS_ABGR ";" GST_VIDEO_CAPS_RGBA ";"
- GST_VIDEO_CAPS_YUV ("Y444") ";"
- GST_VIDEO_CAPS_xRGB ";" GST_VIDEO_CAPS_RGBx ";"
- GST_VIDEO_CAPS_xBGR ";" GST_VIDEO_CAPS_BGRx ";"
- GST_VIDEO_CAPS_RGB ";" GST_VIDEO_CAPS_BGR ";"
- GST_VIDEO_CAPS_YUV ("Y42B") ";"
- GST_VIDEO_CAPS_YUV ("NV12") ";"
- GST_VIDEO_CAPS_YUV ("NV21") ";"
- GST_VIDEO_CAPS_YUV ("YUY2") ";"
- GST_VIDEO_CAPS_YUV ("UYVY") ";"
- GST_VIDEO_CAPS_YUV ("YVYU") ";"
- GST_VIDEO_CAPS_YUV ("I420") ";"
- GST_VIDEO_CAPS_YUV ("YV12") ";"
- GST_VIDEO_CAPS_YUV ("IYUV") ";" GST_VIDEO_CAPS_YUV ("Y41B")
- )
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ AYUV, "
+ "ARGB, BGRA, ABGR, RGBA, ABGR, RGBA, Y444, "
+ "xRGB, RGBx, xBGR, BGRx, RGB, BGR, Y42B, NV12, "
+ "NV21, YUY2, UYVY, YVYU, I420, YV12, IYUV, Y41B }"))
);
static void gst_gamma_set_property (GObject * object, guint prop_id,
static void gst_gamma_calculate_tables (GstGamma * gamma);
-GST_BOILERPLATE (GstGamma, gst_gamma, GstVideoFilter, GST_TYPE_VIDEO_FILTER);
-
-static void
-gst_gamma_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_set_details_simple (element_class, "Video gamma correction",
- "Filter/Effect/Video",
- "Adjusts gamma on a video stream",
- "Arwed v. Merkatz <v.merkatz@gmx.net>");
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_gamma_sink_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_gamma_src_template));
-}
+G_DEFINE_TYPE (GstGamma, gst_gamma, GST_TYPE_VIDEO_FILTER);
static void
gst_gamma_class_init (GstGammaClass * g_class)
{
GObjectClass *gobject_class = (GObjectClass *) g_class;
+ GstElementClass *gstelement_class = (GstElementClass *) g_class;
GstBaseTransformClass *trans_class = (GstBaseTransformClass *) g_class;
GST_DEBUG_CATEGORY_INIT (gamma_debug, "gamma", 0, "gamma");
0.01, 10, DEFAULT_PROP_GAMMA,
GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS | G_PARAM_READWRITE));
+ gst_element_class_set_details_simple (gstelement_class,
+ "Video gamma correction", "Filter/Effect/Video",
+ "Adjusts gamma on a video stream", "Arwed v. Merkatz <v.merkatz@gmx.net");
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_gamma_sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_gamma_src_template));
+
trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_gamma_set_caps);
trans_class->transform_ip = GST_DEBUG_FUNCPTR (gst_gamma_transform_ip);
trans_class->before_transform =
}
static void
-gst_gamma_init (GstGamma * gamma, GstGammaClass * g_class)
+gst_gamma_init (GstGamma * gamma)
{
/* properties */
gamma->gamma = DEFAULT_PROP_GAMMA;
}
static void
-gst_gamma_planar_yuv_ip (GstGamma * gamma, guint8 * data)
+gst_gamma_planar_yuv_ip (GstGamma * gamma, GstVideoFrame * frame)
{
gint i, j, height;
- gint width, row_stride, row_wrap;
+ gint width, stride, row_wrap;
const guint8 *table = gamma->gamma_table;
+ guint8 *data;
- data =
- data + gst_video_format_get_component_offset (gamma->format, 0,
- gamma->width, gamma->height);
-
- width = gst_video_format_get_component_width (gamma->format, 0, gamma->width);
- height = gst_video_format_get_component_height (gamma->format, 0,
- gamma->height);
- row_stride = gst_video_format_get_row_stride (gamma->format, 0, gamma->width);
- row_wrap = row_stride - width;
+ data = GST_VIDEO_FRAME_COMP_DATA (frame, 0);
+ stride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
+ width = GST_VIDEO_FRAME_COMP_WIDTH (frame, 0);
+ height = GST_VIDEO_FRAME_COMP_HEIGHT (frame, 0);
+ row_wrap = stride - width;
for (i = 0; i < height; i++) {
for (j = 0; j < width; j++) {
}
static void
-gst_gamma_packed_yuv_ip (GstGamma * gamma, guint8 * data)
+gst_gamma_packed_yuv_ip (GstGamma * gamma, GstVideoFrame * frame)
{
gint i, j, height;
- gint width, row_stride, row_wrap;
+ gint width, stride, row_wrap;
gint pixel_stride;
const guint8 *table = gamma->gamma_table;
+ guint8 *data;
- data = data + gst_video_format_get_component_offset (gamma->format, 0,
- gamma->width, gamma->height);
-
- width = gst_video_format_get_component_width (gamma->format, 0, gamma->width);
- height = gst_video_format_get_component_height (gamma->format, 0,
- gamma->height);
- row_stride = gst_video_format_get_row_stride (gamma->format, 0, gamma->width);
- pixel_stride = gst_video_format_get_pixel_stride (gamma->format, 0);
- row_wrap = row_stride - pixel_stride * width;
+ data = GST_VIDEO_FRAME_COMP_DATA (frame, 0);
+ stride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
+ width = GST_VIDEO_FRAME_COMP_WIDTH (frame, 0);
+ height = GST_VIDEO_FRAME_COMP_HEIGHT (frame, 0);
+ pixel_stride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
+ row_wrap = stride - pixel_stride * width;
for (i = 0; i < height; i++) {
for (j = 0; j < width; j++) {
#define APPLY_MATRIX(m,o,v1,v2,v3) ((m[o*4] * v1 + m[o*4+1] * v2 + m[o*4+2] * v3 + m[o*4+3]) >> 8)
static void
-gst_gamma_packed_rgb_ip (GstGamma * gamma, guint8 * data)
+gst_gamma_packed_rgb_ip (GstGamma * gamma, GstVideoFrame * frame)
{
gint i, j, height;
- gint width, row_stride, row_wrap;
+ gint width, stride, row_wrap;
gint pixel_stride;
const guint8 *table = gamma->gamma_table;
gint offsets[3];
gint r, g, b;
gint y, u, v;
+ guint8 *data;
+
+ data = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
+ stride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
+ width = GST_VIDEO_FRAME_COMP_WIDTH (frame, 0);
+ height = GST_VIDEO_FRAME_COMP_HEIGHT (frame, 0);
- offsets[0] = gst_video_format_get_component_offset (gamma->format, 0,
- gamma->width, gamma->height);
- offsets[1] = gst_video_format_get_component_offset (gamma->format, 1,
- gamma->width, gamma->height);
- offsets[2] = gst_video_format_get_component_offset (gamma->format, 2,
- gamma->width, gamma->height);
+ offsets[0] = GST_VIDEO_FRAME_COMP_OFFSET (frame, 0);
+ offsets[1] = GST_VIDEO_FRAME_COMP_OFFSET (frame, 1);
+ offsets[2] = GST_VIDEO_FRAME_COMP_OFFSET (frame, 2);
- width = gst_video_format_get_component_width (gamma->format, 0, gamma->width);
- height = gst_video_format_get_component_height (gamma->format, 0,
- gamma->height);
- row_stride = gst_video_format_get_row_stride (gamma->format, 0, gamma->width);
- pixel_stride = gst_video_format_get_pixel_stride (gamma->format, 0);
- row_wrap = row_stride - pixel_stride * width;
+ pixel_stride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
+ row_wrap = stride - pixel_stride * width;
for (i = 0; i < height; i++) {
for (j = 0; j < width; j++) {
GstCaps * outcaps)
{
GstGamma *gamma = GST_GAMMA (base);
+ GstVideoInfo info;
GST_DEBUG_OBJECT (gamma,
"setting caps: in %" GST_PTR_FORMAT " out %" GST_PTR_FORMAT, incaps,
outcaps);
- if (!gst_video_format_parse_caps (incaps, &gamma->format, &gamma->width,
- &gamma->height))
+ if (!gst_video_info_from_caps (&info, incaps))
goto invalid_caps;
- gamma->size =
- gst_video_format_get_size (gamma->format, gamma->width, gamma->height);
+ gamma->info = info;
- switch (gamma->format) {
+ switch (GST_VIDEO_INFO_FORMAT (&info)) {
case GST_VIDEO_FORMAT_I420:
case GST_VIDEO_FORMAT_YV12:
case GST_VIDEO_FORMAT_Y41B:
return TRUE;
invalid_caps:
- GST_ERROR_OBJECT (gamma, "Invalid caps: %" GST_PTR_FORMAT, incaps);
- return FALSE;
+ {
+ GST_ERROR_OBJECT (gamma, "Invalid caps: %" GST_PTR_FORMAT, incaps);
+ return FALSE;
+ }
}
static void
gst_gamma_transform_ip (GstBaseTransform * base, GstBuffer * outbuf)
{
GstGamma *gamma = GST_GAMMA (base);
- guint8 *data;
- guint size;
+ GstVideoFrame frame;
if (!gamma->process)
goto not_negotiated;
if (base->passthrough)
goto done;
- data = GST_BUFFER_DATA (outbuf);
- size = GST_BUFFER_SIZE (outbuf);
-
- if (size != gamma->size)
- goto wrong_size;
+ if (!gst_video_frame_map (&frame, &gamma->info, outbuf, GST_MAP_READWRITE))
+ goto wrong_buffer;
GST_OBJECT_LOCK (gamma);
- gamma->process (gamma, data);
+ gamma->process (gamma, &frame);
GST_OBJECT_UNLOCK (gamma);
+ gst_video_frame_unmap (&frame);
+
done:
return GST_FLOW_OK;
/* ERRORS */
-wrong_size:
+wrong_buffer:
{
GST_ELEMENT_ERROR (gamma, STREAM, FORMAT,
- (NULL), ("Invalid buffer size %d, expected %d", size, gamma->size));
+ (NULL), ("Invalid buffer received"));
return GST_FLOW_ERROR;
}
not_negotiated:
/* < private > */
/* format */
- GstVideoFormat format;
- gint width;
- gint height;
- gint size;
+ GstVideoInfo info;
/* properties */
gdouble gamma;
/* tables */
guint8 gamma_table[256];
- void (*process) (GstGamma *gamma, guint8 *data);
+ void (*process) (GstGamma *gamma, GstVideoFrame *frame);
};
struct _GstGammaClass
};
static GstStaticPadTemplate gst_video_balance_src_template =
- GST_STATIC_PAD_TEMPLATE ("src",
+GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("AYUV") ";"
- GST_VIDEO_CAPS_ARGB ";" GST_VIDEO_CAPS_BGRA ";"
- GST_VIDEO_CAPS_ABGR ";" GST_VIDEO_CAPS_RGBA ";"
- GST_VIDEO_CAPS_YUV ("Y444") ";"
- GST_VIDEO_CAPS_xRGB ";" GST_VIDEO_CAPS_RGBx ";"
- GST_VIDEO_CAPS_xBGR ";" GST_VIDEO_CAPS_BGRx ";"
- GST_VIDEO_CAPS_RGB ";" GST_VIDEO_CAPS_BGR ";"
- GST_VIDEO_CAPS_YUV ("Y42B") ";"
- GST_VIDEO_CAPS_YUV ("YUY2") ";"
- GST_VIDEO_CAPS_YUV ("UYVY") ";"
- GST_VIDEO_CAPS_YUV ("YVYU") ";"
- GST_VIDEO_CAPS_YUV ("I420") ";"
- GST_VIDEO_CAPS_YUV ("YV12") ";"
- GST_VIDEO_CAPS_YUV ("IYUV") ";" GST_VIDEO_CAPS_YUV ("Y41B")
- )
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ AYUV, "
+ "ARGB, BGRA, ABGR, RGBA, Y444, xRGB, RGBx, "
+ "xBGR, BGRx, RGB, BGR, Y42B, YUY2, UYVY, YVYU, "
+ "I420, YV12, IYUV, Y41B }"))
);
static GstStaticPadTemplate gst_video_balance_sink_template =
- GST_STATIC_PAD_TEMPLATE ("sink",
+GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("AYUV") ";"
- GST_VIDEO_CAPS_ARGB ";" GST_VIDEO_CAPS_BGRA ";"
- GST_VIDEO_CAPS_ABGR ";" GST_VIDEO_CAPS_RGBA ";"
- GST_VIDEO_CAPS_YUV ("Y444") ";"
- GST_VIDEO_CAPS_xRGB ";" GST_VIDEO_CAPS_RGBx ";"
- GST_VIDEO_CAPS_xBGR ";" GST_VIDEO_CAPS_BGRx ";"
- GST_VIDEO_CAPS_RGB ";" GST_VIDEO_CAPS_BGR ";"
- GST_VIDEO_CAPS_YUV ("Y42B") ";"
- GST_VIDEO_CAPS_YUV ("YUY2") ";"
- GST_VIDEO_CAPS_YUV ("UYVY") ";"
- GST_VIDEO_CAPS_YUV ("YVYU") ";"
- GST_VIDEO_CAPS_YUV ("I420") ";"
- GST_VIDEO_CAPS_YUV ("YV12") ";"
- GST_VIDEO_CAPS_YUV ("IYUV") ";" GST_VIDEO_CAPS_YUV ("Y41B")
- )
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ AYUV, "
+ "ARGB, BGRA, ABGR, RGBA, Y444, xRGB, RGBx, "
+ "xBGR, BGRx, RGB, BGR, Y42B, YUY2, UYVY, YVYU, "
+ "I420, YV12, IYUV, Y41B }"))
);
static void gst_video_balance_colorbalance_init (GstColorBalanceClass * iface);
-static void gst_video_balance_interface_init (GstImplementsInterfaceClass *
- klass);
static void gst_video_balance_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void gst_video_balance_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
-static void
-_do_init (GType video_balance_type)
-{
- static const GInterfaceInfo iface_info = {
- (GInterfaceInitFunc) gst_video_balance_interface_init,
- NULL,
- NULL,
- };
- static const GInterfaceInfo colorbalance_info = {
- (GInterfaceInitFunc) gst_video_balance_colorbalance_init,
- NULL,
- NULL,
- };
-
- g_type_add_interface_static (video_balance_type,
- GST_TYPE_IMPLEMENTS_INTERFACE, &iface_info);
- g_type_add_interface_static (video_balance_type, GST_TYPE_COLOR_BALANCE,
- &colorbalance_info);
-}
-
-GST_BOILERPLATE_FULL (GstVideoBalance, gst_video_balance, GstVideoFilter,
- GST_TYPE_VIDEO_FILTER, _do_init);
+#define gst_video_balance_parent_class parent_class
+G_DEFINE_TYPE_WITH_CODE (GstVideoBalance, gst_video_balance,
+ GST_TYPE_VIDEO_FILTER,
+ G_IMPLEMENT_INTERFACE (GST_TYPE_COLOR_BALANCE,
+ gst_video_balance_colorbalance_init));
/*
* look-up tables (LUT).
}
static void
-gst_video_balance_planar_yuv (GstVideoBalance * videobalance, guint8 * data)
+gst_video_balance_planar_yuv (GstVideoBalance * videobalance,
+ GstVideoFrame * frame)
{
gint x, y;
guint8 *ydata;
guint8 *udata, *vdata;
gint ystride, ustride, vstride;
- GstVideoFormat format;
gint width, height;
gint width2, height2;
guint8 *tabley = videobalance->tabley;
guint8 **tableu = videobalance->tableu;
guint8 **tablev = videobalance->tablev;
- format = videobalance->format;
- width = videobalance->width;
- height = videobalance->height;
+ width = GST_VIDEO_FRAME_WIDTH (frame);
+ height = GST_VIDEO_FRAME_HEIGHT (frame);
- ydata =
- data + gst_video_format_get_component_offset (format, 0, width, height);
- ystride = gst_video_format_get_row_stride (format, 0, width);
+ ydata = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
+ ystride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
for (y = 0; y < height; y++) {
guint8 *yptr;
}
}
- width2 = gst_video_format_get_component_width (format, 1, width);
- height2 = gst_video_format_get_component_height (format, 1, height);
+ width2 = GST_VIDEO_FRAME_COMP_WIDTH (frame, 1);
+ height2 = GST_VIDEO_FRAME_COMP_HEIGHT (frame, 1);
- udata =
- data + gst_video_format_get_component_offset (format, 1, width, height);
- vdata =
- data + gst_video_format_get_component_offset (format, 2, width, height);
- ustride = gst_video_format_get_row_stride (format, 1, width);
- vstride = gst_video_format_get_row_stride (format, 1, width);
+ udata = GST_VIDEO_FRAME_PLANE_DATA (frame, 1);
+ vdata = GST_VIDEO_FRAME_PLANE_DATA (frame, 2);
+ ustride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 1);
+ vstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 2);
for (y = 0; y < height2; y++) {
guint8 *uptr, *vptr;
}
static void
-gst_video_balance_packed_yuv (GstVideoBalance * videobalance, guint8 * data)
+gst_video_balance_packed_yuv (GstVideoBalance * videobalance,
+ GstVideoFrame * frame)
{
- gint x, y;
- guint8 *ydata;
- guint8 *udata, *vdata;
- gint ystride, ustride, vstride;
+ gint x, y, stride;
+ guint8 *ydata, *udata, *vdata;
gint yoff, uoff, voff;
- GstVideoFormat format;
gint width, height;
gint width2, height2;
guint8 *tabley = videobalance->tabley;
guint8 **tableu = videobalance->tableu;
guint8 **tablev = videobalance->tablev;
- format = videobalance->format;
- width = videobalance->width;
- height = videobalance->height;
+ width = GST_VIDEO_FRAME_WIDTH (frame);
+ height = GST_VIDEO_FRAME_HEIGHT (frame);
- ydata =
- data + gst_video_format_get_component_offset (format, 0, width, height);
- ystride = gst_video_format_get_row_stride (format, 0, width);
- yoff = gst_video_format_get_pixel_stride (format, 0);
+ stride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
+ ydata = GST_VIDEO_FRAME_COMP_DATA (frame, 0);
+ yoff = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
for (y = 0; y < height; y++) {
guint8 *yptr;
- yptr = ydata + y * ystride;
+ yptr = ydata + y * stride;
for (x = 0; x < width; x++) {
*yptr = tabley[*yptr];
yptr += yoff;
}
}
- width2 = gst_video_format_get_component_width (format, 1, width);
- height2 = gst_video_format_get_component_height (format, 1, height);
+ width2 = GST_VIDEO_FRAME_COMP_WIDTH (frame, 1);
+ height2 = GST_VIDEO_FRAME_COMP_HEIGHT (frame, 1);
- udata =
- data + gst_video_format_get_component_offset (format, 1, width, height);
- vdata =
- data + gst_video_format_get_component_offset (format, 2, width, height);
- ustride = gst_video_format_get_row_stride (format, 1, width);
- vstride = gst_video_format_get_row_stride (format, 1, width);
- uoff = gst_video_format_get_pixel_stride (format, 1);
- voff = gst_video_format_get_pixel_stride (format, 2);
+ udata = GST_VIDEO_FRAME_COMP_DATA (frame, 1);
+ vdata = GST_VIDEO_FRAME_COMP_DATA (frame, 2);
+ uoff = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 1);
+ voff = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 2);
for (y = 0; y < height2; y++) {
guint8 *uptr, *vptr;
guint8 u1, v1;
- uptr = udata + y * ustride;
- vptr = vdata + y * vstride;
+ uptr = udata + y * stride;
+ vptr = vdata + y * stride;
for (x = 0; x < width2; x++) {
u1 = *uptr;
#define APPLY_MATRIX(m,o,v1,v2,v3) ((m[o*4] * v1 + m[o*4+1] * v2 + m[o*4+2] * v3 + m[o*4+3]) >> 8)
static void
-gst_video_balance_packed_rgb (GstVideoBalance * videobalance, guint8 * data)
+gst_video_balance_packed_rgb (GstVideoBalance * videobalance,
+ GstVideoFrame * frame)
{
gint i, j, height;
- gint width, row_stride, row_wrap;
+ gint width, stride, row_wrap;
gint pixel_stride;
+ guint8 *data;
gint offsets[3];
gint r, g, b;
gint y, u, v;
guint8 **tableu = videobalance->tableu;
guint8 **tablev = videobalance->tablev;
- offsets[0] = gst_video_format_get_component_offset (videobalance->format, 0,
- videobalance->width, videobalance->height);
- offsets[1] = gst_video_format_get_component_offset (videobalance->format, 1,
- videobalance->width, videobalance->height);
- offsets[2] = gst_video_format_get_component_offset (videobalance->format, 2,
- videobalance->width, videobalance->height);
-
- width =
- gst_video_format_get_component_width (videobalance->format, 0,
- videobalance->width);
- height =
- gst_video_format_get_component_height (videobalance->format, 0,
- videobalance->height);
- row_stride =
- gst_video_format_get_row_stride (videobalance->format, 0,
- videobalance->width);
- pixel_stride = gst_video_format_get_pixel_stride (videobalance->format, 0);
- row_wrap = row_stride - pixel_stride * width;
+ width = GST_VIDEO_FRAME_WIDTH (frame);
+ height = GST_VIDEO_FRAME_HEIGHT (frame);
+
+ offsets[0] = GST_VIDEO_FRAME_COMP_OFFSET (frame, 0);
+ offsets[1] = GST_VIDEO_FRAME_COMP_OFFSET (frame, 1);
+ offsets[2] = GST_VIDEO_FRAME_COMP_OFFSET (frame, 2);
+
+ data = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
+ stride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
+
+ pixel_stride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
+ row_wrap = stride - pixel_stride * width;
for (i = 0; i < height; i++) {
for (j = 0; j < width; j++) {
GstCaps * outcaps)
{
GstVideoBalance *videobalance = GST_VIDEO_BALANCE (base);
+ GstVideoInfo info;
GST_DEBUG_OBJECT (videobalance,
"in %" GST_PTR_FORMAT " out %" GST_PTR_FORMAT, incaps, outcaps);
videobalance->process = NULL;
- if (!gst_video_format_parse_caps (incaps, &videobalance->format,
- &videobalance->width, &videobalance->height))
+ if (!gst_video_info_from_caps (&info, incaps))
goto invalid_caps;
- videobalance->size =
- gst_video_format_get_size (videobalance->format, videobalance->width,
- videobalance->height);
-
- switch (videobalance->format) {
+ switch (GST_VIDEO_INFO_FORMAT (&info)) {
case GST_VIDEO_FORMAT_I420:
case GST_VIDEO_FORMAT_YV12:
case GST_VIDEO_FORMAT_Y41B:
videobalance->process = gst_video_balance_packed_rgb;
break;
default:
+ goto unknown_format;
break;
}
- return videobalance->process != NULL;
+ videobalance->info = info;
+
+ return TRUE;
invalid_caps:
- GST_ERROR_OBJECT (videobalance, "Invalid caps: %" GST_PTR_FORMAT, incaps);
- return FALSE;
+ {
+ GST_ERROR_OBJECT (videobalance, "Invalid caps: %" GST_PTR_FORMAT, incaps);
+ return FALSE;
+ }
+unknown_format:
+ {
+ GST_ERROR_OBJECT (videobalance, "unknown format %" GST_PTR_FORMAT, incaps);
+ return FALSE;
+ }
}
static void
gst_video_balance_transform_ip (GstBaseTransform * base, GstBuffer * outbuf)
{
GstVideoBalance *videobalance = GST_VIDEO_BALANCE (base);
- guint8 *data;
- guint size;
+ GstVideoFrame frame;
if (!videobalance->process)
goto not_negotiated;
if (base->passthrough)
goto done;
- data = GST_BUFFER_DATA (outbuf);
- size = GST_BUFFER_SIZE (outbuf);
-
- if (size != videobalance->size)
- goto wrong_size;
+ if (!gst_video_frame_map (&frame, &videobalance->info, outbuf,
+ GST_MAP_READWRITE))
+ goto wrong_frame;
GST_OBJECT_LOCK (videobalance);
- videobalance->process (videobalance, data);
+ videobalance->process (videobalance, &frame);
GST_OBJECT_UNLOCK (videobalance);
+ gst_video_frame_unmap (&frame);
+
done:
return GST_FLOW_OK;
/* ERRORS */
-wrong_size:
+wrong_frame:
{
GST_ELEMENT_ERROR (videobalance, STREAM, FORMAT,
- (NULL), ("Invalid buffer size %d, expected %d", size,
- videobalance->size));
+ (NULL), ("Invalid buffer received"));
return GST_FLOW_ERROR;
}
not_negotiated:
- GST_ERROR_OBJECT (videobalance, "Not negotiated yet");
- return GST_FLOW_NOT_NEGOTIATED;
-}
-
-static void
-gst_video_balance_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_set_details_simple (element_class, "Video balance",
- "Filter/Effect/Video",
- "Adjusts brightness, contrast, hue, saturation on a video stream",
- "David Schleef <ds@schleef.org>");
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_video_balance_sink_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_video_balance_src_template));
+ {
+ GST_ERROR_OBJECT (videobalance, "Not negotiated yet");
+ return GST_FLOW_NOT_NEGOTIATED;
+ }
}
static void
gst_video_balance_class_init (GstVideoBalanceClass * klass)
{
GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass;
GST_DEBUG_CATEGORY_INIT (videobalance_debug, "videobalance", 0,
DEFAULT_PROP_SATURATION,
GST_PARAM_CONTROLLABLE | G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ gst_element_class_set_details_simple (gstelement_class, "Video balance",
+ "Filter/Effect/Video",
+ "Adjusts brightness, contrast, hue, saturation on a video stream",
+ "David Schleef <ds@schleef.org>");
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_video_balance_sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_video_balance_src_template));
+
trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_video_balance_set_caps);
trans_class->transform_ip =
GST_DEBUG_FUNCPTR (gst_video_balance_transform_ip);
}
static void
-gst_video_balance_init (GstVideoBalance * videobalance,
- GstVideoBalanceClass * klass)
+gst_video_balance_init (GstVideoBalance * videobalance)
{
const gchar *channels[4] = { "HUE", "SATURATION",
"BRIGHTNESS", "CONTRAST"
}
}
-static gboolean
-gst_video_balance_interface_supported (GstImplementsInterface * iface,
- GType type)
-{
- g_assert (type == GST_TYPE_COLOR_BALANCE);
- return TRUE;
-}
-
-static void
-gst_video_balance_interface_init (GstImplementsInterfaceClass * klass)
-{
- klass->supported = gst_video_balance_interface_supported;
-}
-
static const GList *
gst_video_balance_colorbalance_list_channels (GstColorBalance * balance)
{
gdouble saturation;
/* format */
- GstVideoFormat format;
- gint width;
- gint height;
- gint size;
+ GstVideoInfo info;
/* tables */
guint8 tabley[256];
guint8 *tableu[256];
guint8 *tablev[256];
- void (*process) (GstVideoBalance *balance, guint8 *data);
+ void (*process) (GstVideoBalance *balance, GstVideoFrame *frame);
};
struct _GstVideoBalanceClass {
#define GST_CAT_DEFAULT video_flip_debug
static GstStaticPadTemplate gst_video_flip_src_template =
- GST_STATIC_PAD_TEMPLATE ("src",
+GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("AYUV") ";"
- GST_VIDEO_CAPS_ARGB ";" GST_VIDEO_CAPS_BGRA ";"
- GST_VIDEO_CAPS_ABGR ";" GST_VIDEO_CAPS_RGBA ";"
- GST_VIDEO_CAPS_YUV ("Y444") ";"
- GST_VIDEO_CAPS_xRGB ";" GST_VIDEO_CAPS_RGBx ";"
- GST_VIDEO_CAPS_xBGR ";" GST_VIDEO_CAPS_BGRx ";"
- GST_VIDEO_CAPS_RGB ";" GST_VIDEO_CAPS_BGR ";"
- GST_VIDEO_CAPS_YUV ("I420") ";"
- GST_VIDEO_CAPS_YUV ("YV12") ";" GST_VIDEO_CAPS_YUV ("IYUV") ";"
- GST_VIDEO_CAPS_YUV ("YUY2") ";" GST_VIDEO_CAPS_YUV ("UYVY") ";"
- GST_VIDEO_CAPS_YUV ("YVYU")
-
- )
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ AYUV, "
+ "ARGB, BGRA, ABGR, RGBA, Y444, xRGB, RGBx,xBGR, BGRx, "
+ "RGB, BGR, I420, YV12, IYUV, YUY2, UYVY, YVYU }"))
);
static GstStaticPadTemplate gst_video_flip_sink_template =
- GST_STATIC_PAD_TEMPLATE ("sink",
+GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("AYUV") ";"
- GST_VIDEO_CAPS_ARGB ";" GST_VIDEO_CAPS_BGRA ";"
- GST_VIDEO_CAPS_ABGR ";" GST_VIDEO_CAPS_RGBA ";"
- GST_VIDEO_CAPS_YUV ("Y444") ";"
- GST_VIDEO_CAPS_xRGB ";" GST_VIDEO_CAPS_RGBx ";"
- GST_VIDEO_CAPS_xBGR ";" GST_VIDEO_CAPS_BGRx ";"
- GST_VIDEO_CAPS_RGB ";" GST_VIDEO_CAPS_BGR ";"
- GST_VIDEO_CAPS_YUV ("I420") ";"
- GST_VIDEO_CAPS_YUV ("YV12") ";" GST_VIDEO_CAPS_YUV ("IYUV") ";"
- GST_VIDEO_CAPS_YUV ("YUY2") ";" GST_VIDEO_CAPS_YUV ("UYVY") ";"
- GST_VIDEO_CAPS_YUV ("YVYU")
- )
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ AYUV, "
+ "ARGB, BGRA, ABGR, RGBA, Y444, xRGB, RGBx,xBGR, BGRx, "
+ "RGB, BGR, I420, YV12, IYUV, YUY2, UYVY, YVYU }"))
);
#define GST_TYPE_VIDEO_FLIP_METHOD (gst_video_flip_method_get_type())
return video_flip_method_type;
}
-GST_BOILERPLATE (GstVideoFlip, gst_video_flip, GstVideoFilter,
- GST_TYPE_VIDEO_FILTER);
+#define gst_video_flip_parent_class parent_class
+G_DEFINE_TYPE (GstVideoFlip, gst_video_flip, GST_TYPE_VIDEO_FILTER);
static GstCaps *
gst_video_flip_transform_caps (GstBaseTransform * trans,
- GstPadDirection direction, GstCaps * caps)
+ GstPadDirection direction, GstCaps * caps, GstCaps * filter)
{
GstVideoFlip *videoflip = GST_VIDEO_FLIP (trans);
GstCaps *ret;
GST_DEBUG_OBJECT (videoflip, "transformed %" GST_PTR_FORMAT " to %"
GST_PTR_FORMAT, caps, ret);
+ if (filter) {
+ GstCaps *intersection;
+
+ GST_DEBUG_OBJECT (videoflip, "Using filter caps %" GST_PTR_FORMAT, filter);
+ intersection =
+ gst_caps_intersect_full (filter, ret, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (ret);
+ ret = intersection;
+ GST_DEBUG_OBJECT (videoflip, "Intersection %" GST_PTR_FORMAT, ret);
+ }
+
return ret;
}
static gboolean
gst_video_flip_get_unit_size (GstBaseTransform * btrans, GstCaps * caps,
- guint * size)
+ gsize * size)
{
- GstVideoFormat format;
- gint width, height;
+ GstVideoInfo info;
- if (!gst_video_format_parse_caps (caps, &format, &width, &height))
+ if (!gst_video_info_from_caps (&info, caps))
return FALSE;
- *size = gst_video_format_get_size (format, width, height);
+ *size = info.size;
GST_DEBUG_OBJECT (btrans, "our frame size is %d bytes (%dx%d)", *size,
- width, height);
+ info.width, info.height);
return TRUE;
}
static void
-gst_video_flip_planar_yuv (GstVideoFlip * videoflip, guint8 * dest,
- const guint8 * src)
+gst_video_flip_planar_yuv (GstVideoFlip * videoflip, GstVideoFrame * dest,
+ const GstVideoFrame * src)
{
gint x, y;
guint8 const *s;
guint8 *d;
- GstVideoFormat format = videoflip->format;
- gint sw = videoflip->from_width;
- gint sh = videoflip->from_height;
- gint dw = videoflip->to_width;
- gint dh = videoflip->to_height;
gint src_y_stride, src_u_stride, src_v_stride;
- gint src_y_offset, src_u_offset, src_v_offset;
gint src_y_height, src_u_height, src_v_height;
gint src_y_width, src_u_width, src_v_width;
gint dest_y_stride, dest_u_stride, dest_v_stride;
- gint dest_y_offset, dest_u_offset, dest_v_offset;
gint dest_y_height, dest_u_height, dest_v_height;
gint dest_y_width, dest_u_width, dest_v_width;
- src_y_stride = gst_video_format_get_row_stride (format, 0, sw);
- src_u_stride = gst_video_format_get_row_stride (format, 1, sw);
- src_v_stride = gst_video_format_get_row_stride (format, 2, sw);
-
- dest_y_stride = gst_video_format_get_row_stride (format, 0, dw);
- dest_u_stride = gst_video_format_get_row_stride (format, 1, dw);
- dest_v_stride = gst_video_format_get_row_stride (format, 2, dw);
+ src_y_stride = GST_VIDEO_FRAME_PLANE_STRIDE (src, 0);
+ src_u_stride = GST_VIDEO_FRAME_PLANE_STRIDE (src, 1);
+ src_v_stride = GST_VIDEO_FRAME_PLANE_STRIDE (src, 2);
- src_y_offset = gst_video_format_get_component_offset (format, 0, sw, sh);
- src_u_offset = gst_video_format_get_component_offset (format, 1, sw, sh);
- src_v_offset = gst_video_format_get_component_offset (format, 2, sw, sh);
+ dest_y_stride = GST_VIDEO_FRAME_PLANE_STRIDE (dest, 0);
+ dest_u_stride = GST_VIDEO_FRAME_PLANE_STRIDE (dest, 1);
+ dest_v_stride = GST_VIDEO_FRAME_PLANE_STRIDE (dest, 2);
- dest_y_offset = gst_video_format_get_component_offset (format, 0, dw, dh);
- dest_u_offset = gst_video_format_get_component_offset (format, 1, dw, dh);
- dest_v_offset = gst_video_format_get_component_offset (format, 2, dw, dh);
+ src_y_width = GST_VIDEO_FRAME_COMP_WIDTH (src, 0);
+ src_u_width = GST_VIDEO_FRAME_COMP_WIDTH (src, 1);
+ src_v_width = GST_VIDEO_FRAME_COMP_WIDTH (src, 2);
- src_y_width = gst_video_format_get_component_width (format, 0, sw);
- src_u_width = gst_video_format_get_component_width (format, 1, sw);
- src_v_width = gst_video_format_get_component_width (format, 2, sw);
+ dest_y_width = GST_VIDEO_FRAME_COMP_WIDTH (dest, 0);
+ dest_u_width = GST_VIDEO_FRAME_COMP_WIDTH (dest, 1);
+ dest_v_width = GST_VIDEO_FRAME_COMP_WIDTH (dest, 2);
- dest_y_width = gst_video_format_get_component_width (format, 0, dw);
- dest_u_width = gst_video_format_get_component_width (format, 1, dw);
- dest_v_width = gst_video_format_get_component_width (format, 2, dw);
+ src_y_height = GST_VIDEO_FRAME_COMP_HEIGHT (src, 0);
+ src_u_height = GST_VIDEO_FRAME_COMP_HEIGHT (src, 1);
+ src_v_height = GST_VIDEO_FRAME_COMP_HEIGHT (src, 2);
- src_y_height = gst_video_format_get_component_height (format, 0, sh);
- src_u_height = gst_video_format_get_component_height (format, 1, sh);
- src_v_height = gst_video_format_get_component_height (format, 2, sh);
-
- dest_y_height = gst_video_format_get_component_height (format, 0, dh);
- dest_u_height = gst_video_format_get_component_height (format, 1, dh);
- dest_v_height = gst_video_format_get_component_height (format, 2, dh);
+ dest_y_height = GST_VIDEO_FRAME_COMP_HEIGHT (dest, 0);
+ dest_u_height = GST_VIDEO_FRAME_COMP_HEIGHT (dest, 1);
+ dest_v_height = GST_VIDEO_FRAME_COMP_HEIGHT (dest, 2);
switch (videoflip->method) {
case GST_VIDEO_FLIP_METHOD_90R:
/* Flip Y */
- s = src + src_y_offset;
- d = dest + dest_y_offset;
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 0);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 0);
for (y = 0; y < dest_y_height; y++) {
for (x = 0; x < dest_y_width; x++) {
d[y * dest_y_stride + x] =
}
}
/* Flip U */
- s = src + src_u_offset;
- d = dest + dest_u_offset;
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 1);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 1);
for (y = 0; y < dest_u_height; y++) {
for (x = 0; x < dest_u_width; x++) {
d[y * dest_u_stride + x] =
}
}
/* Flip V */
- s = src + src_v_offset;
- d = dest + dest_v_offset;
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 2);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 2);
for (y = 0; y < dest_v_height; y++) {
for (x = 0; x < dest_v_width; x++) {
d[y * dest_v_stride + x] =
break;
case GST_VIDEO_FLIP_METHOD_90L:
/* Flip Y */
- s = src + src_y_offset;
- d = dest + dest_y_offset;
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 0);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 0);
for (y = 0; y < dest_y_height; y++) {
for (x = 0; x < dest_y_width; x++) {
d[y * dest_y_stride + x] =
}
}
/* Flip U */
- s = src + src_u_offset;
- d = dest + dest_u_offset;
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 1);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 1);
for (y = 0; y < dest_u_height; y++) {
for (x = 0; x < dest_u_width; x++) {
d[y * dest_u_stride + x] =
}
}
/* Flip V */
- s = src + src_v_offset;
- d = dest + dest_v_offset;
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 2);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 2);
for (y = 0; y < dest_v_height; y++) {
for (x = 0; x < dest_v_width; x++) {
d[y * dest_v_stride + x] =
break;
case GST_VIDEO_FLIP_METHOD_180:
/* Flip Y */
- s = src + src_y_offset;
- d = dest + dest_y_offset;
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 0);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 0);
for (y = 0; y < dest_y_height; y++) {
for (x = 0; x < dest_y_width; x++) {
d[y * dest_y_stride + x] =
}
}
/* Flip U */
- s = src + src_u_offset;
- d = dest + dest_u_offset;
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 1);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 1);
for (y = 0; y < dest_u_height; y++) {
for (x = 0; x < dest_u_width; x++) {
d[y * dest_u_stride + x] =
}
}
/* Flip V */
- s = src + src_v_offset;
- d = dest + dest_v_offset;
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 2);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 2);
for (y = 0; y < dest_v_height; y++) {
for (x = 0; x < dest_v_width; x++) {
d[y * dest_v_stride + x] =
break;
case GST_VIDEO_FLIP_METHOD_HORIZ:
/* Flip Y */
- s = src + src_y_offset;
- d = dest + dest_y_offset;
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 0);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 0);
for (y = 0; y < dest_y_height; y++) {
for (x = 0; x < dest_y_width; x++) {
d[y * dest_y_stride + x] =
}
}
/* Flip U */
- s = src + src_u_offset;
- d = dest + dest_u_offset;
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 1);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 1);
for (y = 0; y < dest_u_height; y++) {
for (x = 0; x < dest_u_width; x++) {
d[y * dest_u_stride + x] =
}
}
/* Flip V */
- s = src + src_v_offset;
- d = dest + dest_v_offset;
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 2);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 2);
for (y = 0; y < dest_v_height; y++) {
for (x = 0; x < dest_v_width; x++) {
d[y * dest_v_stride + x] =
break;
case GST_VIDEO_FLIP_METHOD_VERT:
/* Flip Y */
- s = src + src_y_offset;
- d = dest + dest_y_offset;
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 0);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 0);
for (y = 0; y < dest_y_height; y++) {
for (x = 0; x < dest_y_width; x++) {
d[y * dest_y_stride + x] =
}
}
/* Flip U */
- s = src + src_u_offset;
- d = dest + dest_u_offset;
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 1);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 1);
for (y = 0; y < dest_u_height; y++) {
for (x = 0; x < dest_u_width; x++) {
d[y * dest_u_stride + x] =
}
}
/* Flip V */
- s = src + src_v_offset;
- d = dest + dest_v_offset;
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 2);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 2);
for (y = 0; y < dest_v_height; y++) {
for (x = 0; x < dest_v_width; x++) {
d[y * dest_v_stride + x] =
break;
case GST_VIDEO_FLIP_METHOD_TRANS:
/* Flip Y */
- s = src + src_y_offset;
- d = dest + dest_y_offset;
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 0);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 0);
for (y = 0; y < dest_y_height; y++) {
for (x = 0; x < dest_y_width; x++) {
d[y * dest_y_stride + x] = s[x * src_y_stride + y];
}
}
/* Flip U */
- s = src + src_u_offset;
- d = dest + dest_u_offset;
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 1);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 1);
for (y = 0; y < dest_u_height; y++) {
for (x = 0; x < dest_u_width; x++) {
d[y * dest_u_stride + x] = s[x * src_u_stride + y];
}
}
/* Flip V */
- s = src + src_v_offset;
- d = dest + dest_v_offset;
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 2);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 2);
for (y = 0; y < dest_u_height; y++) {
for (x = 0; x < dest_u_width; x++) {
d[y * dest_v_stride + x] = s[x * src_v_stride + y];
break;
case GST_VIDEO_FLIP_METHOD_OTHER:
/* Flip Y */
- s = src + src_y_offset;
- d = dest + dest_y_offset;
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 0);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 0);
for (y = 0; y < dest_y_height; y++) {
for (x = 0; x < dest_y_width; x++) {
d[y * dest_y_stride + x] =
}
}
/* Flip U */
- s = src + src_u_offset;
- d = dest + dest_u_offset;
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 1);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 1);
for (y = 0; y < dest_u_height; y++) {
for (x = 0; x < dest_u_width; x++) {
d[y * dest_u_stride + x] =
}
}
/* Flip V */
- s = src + src_v_offset;
- d = dest + dest_v_offset;
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 2);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 2);
for (y = 0; y < dest_v_height; y++) {
for (x = 0; x < dest_v_width; x++) {
d[y * dest_v_stride + x] =
}
static void
-gst_video_flip_packed_simple (GstVideoFlip * videoflip, guint8 * dest,
- const guint8 * src)
+gst_video_flip_packed_simple (GstVideoFlip * videoflip, GstVideoFrame * dest,
+ const GstVideoFrame * src)
{
gint x, y, z;
- guint8 const *s = src;
- guint8 *d = dest;
- GstVideoFormat format = videoflip->format;
- gint sw = videoflip->from_width;
- gint sh = videoflip->from_height;
- gint dw = videoflip->to_width;
- gint dh = videoflip->to_height;
+ guint8 const *s;
+ guint8 *d;
+ gint sw = GST_VIDEO_FRAME_WIDTH (src);
+ gint sh = GST_VIDEO_FRAME_HEIGHT (src);
+ gint dw = GST_VIDEO_FRAME_WIDTH (dest);
+ gint dh = GST_VIDEO_FRAME_HEIGHT (dest);
gint src_stride, dest_stride;
gint bpp;
- src_stride = gst_video_format_get_row_stride (format, 0, sw);
- dest_stride = gst_video_format_get_row_stride (format, 0, dw);
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 0);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 0);
+
+ src_stride = GST_VIDEO_FRAME_PLANE_STRIDE (src, 0);
+ dest_stride = GST_VIDEO_FRAME_PLANE_STRIDE (dest, 0);
/* This is only true for non-subsampled formats! */
- bpp = gst_video_format_get_pixel_stride (format, 0);
+ bpp = GST_VIDEO_FRAME_COMP_PSTRIDE (src, 0);
switch (videoflip->method) {
case GST_VIDEO_FLIP_METHOD_90R:
static void
-gst_video_flip_y422 (GstVideoFlip * videoflip, guint8 * dest,
- const guint8 * src)
+gst_video_flip_y422 (GstVideoFlip * videoflip, GstVideoFrame * dest,
+ const GstVideoFrame * src)
{
gint x, y;
- guint8 const *s = src;
- guint8 *d = dest;
- GstVideoFormat format = videoflip->format;
- gint sw = videoflip->from_width;
- gint sh = videoflip->from_height;
- gint dw = videoflip->to_width;
- gint dh = videoflip->to_height;
+ guint8 const *s;
+ guint8 *d;
+ gint sw = GST_VIDEO_FRAME_WIDTH (src);
+ gint sh = GST_VIDEO_FRAME_HEIGHT (src);
+ gint dw = GST_VIDEO_FRAME_WIDTH (dest);
+ gint dh = GST_VIDEO_FRAME_HEIGHT (dest);
gint src_stride, dest_stride;
gint bpp;
gint y_offset;
gint v_offset;
gint y_stride;
- src_stride = gst_video_format_get_row_stride (format, 0, sw);
- dest_stride = gst_video_format_get_row_stride (format, 0, dw);
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 0);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 0);
- y_offset = gst_video_format_get_component_offset (format, 0, sw, sh);
- u_offset = gst_video_format_get_component_offset (format, 1, sw, sh);
- v_offset = gst_video_format_get_component_offset (format, 2, sw, sh);
- y_stride = gst_video_format_get_pixel_stride (format, 0);
+ src_stride = GST_VIDEO_FRAME_PLANE_STRIDE (src, 0);
+ dest_stride = GST_VIDEO_FRAME_PLANE_STRIDE (dest, 0);
+
+ y_offset = GST_VIDEO_FRAME_COMP_OFFSET (src, 0);
+ u_offset = GST_VIDEO_FRAME_COMP_OFFSET (src, 1);
+ v_offset = GST_VIDEO_FRAME_COMP_OFFSET (src, 2);
+ y_stride = GST_VIDEO_FRAME_COMP_PSTRIDE (src, 0);
bpp = y_stride;
switch (videoflip->method) {
GstCaps * outcaps)
{
GstVideoFlip *vf = GST_VIDEO_FLIP (btrans);
- GstVideoFormat in_format, out_format;
+ GstVideoInfo in_info, out_info;
gboolean ret = FALSE;
vf->process = NULL;
- if (!gst_video_format_parse_caps (incaps, &in_format, &vf->from_width,
- &vf->from_height)
- || !gst_video_format_parse_caps (outcaps, &out_format, &vf->to_width,
- &vf->to_height))
+ if (!gst_video_info_from_caps (&in_info, incaps)
+ || !gst_video_info_from_caps (&out_info, outcaps))
goto invalid_caps;
- if (in_format != out_format)
+ if (GST_VIDEO_INFO_FORMAT (&in_info) != GST_VIDEO_INFO_FORMAT (&out_info))
goto invalid_caps;
- vf->format = in_format;
/* Check that they are correct */
switch (vf->method) {
case GST_VIDEO_FLIP_METHOD_90L:
case GST_VIDEO_FLIP_METHOD_TRANS:
case GST_VIDEO_FLIP_METHOD_OTHER:
- if ((vf->from_width != vf->to_height) ||
- (vf->from_height != vf->to_width)) {
+ if ((in_info.width != out_info.height) ||
+ (in_info.height != out_info.width)) {
GST_ERROR_OBJECT (vf, "we are inverting width and height but caps "
- "are not correct : %dx%d to %dx%d", vf->from_width,
- vf->from_height, vf->to_width, vf->to_height);
+ "are not correct : %dx%d to %dx%d", in_info.width,
+ in_info.height, out_info.width, out_info.height);
goto beach;
}
break;
case GST_VIDEO_FLIP_METHOD_180:
case GST_VIDEO_FLIP_METHOD_HORIZ:
case GST_VIDEO_FLIP_METHOD_VERT:
- if ((vf->from_width != vf->to_width) ||
- (vf->from_height != vf->to_height)) {
+ if ((in_info.width != out_info.width) ||
+ (in_info.height != out_info.height)) {
GST_ERROR_OBJECT (vf, "we are keeping width and height but caps "
- "are not correct : %dx%d to %dx%d", vf->from_width,
- vf->from_height, vf->to_width, vf->to_height);
+ "are not correct : %dx%d to %dx%d", in_info.width,
+ in_info.height, out_info.width, out_info.height);
goto beach;
}
break;
}
ret = TRUE;
+ vf->in_info = in_info;
+ vf->out_info = out_info;
- switch (vf->format) {
+ switch (GST_VIDEO_INFO_FORMAT (&in_info)) {
case GST_VIDEO_FORMAT_I420:
case GST_VIDEO_FORMAT_YV12:
case GST_VIDEO_FORMAT_Y444:
GstBuffer * out)
{
GstVideoFlip *videoflip = GST_VIDEO_FLIP (trans);
- guint8 *dest;
- const guint8 *src;
+ GstVideoFrame dest;
+ GstVideoFrame src;
if (G_UNLIKELY (videoflip->process == NULL))
goto not_negotiated;
- src = GST_BUFFER_DATA (in);
- dest = GST_BUFFER_DATA (out);
+ if (!gst_video_frame_map (&src, &videoflip->in_info, in, GST_MAP_READ))
+ goto invalid_in;
+
+ if (!gst_video_frame_map (&dest, &videoflip->out_info, out, GST_MAP_WRITE))
+ goto invalid_out;
GST_LOG_OBJECT (videoflip, "videoflip: flipping %dx%d to %dx%d (%s)",
- videoflip->from_width, videoflip->from_height, videoflip->to_width,
- videoflip->to_height, video_flip_methods[videoflip->method].value_nick);
+ videoflip->in_info.width, videoflip->in_info.height,
+ videoflip->out_info.width, videoflip->out_info.height,
+ video_flip_methods[videoflip->method].value_nick);
GST_OBJECT_LOCK (videoflip);
- videoflip->process (videoflip, dest, src);
+ videoflip->process (videoflip, &dest, &src);
GST_OBJECT_UNLOCK (videoflip);
+ gst_video_frame_unmap (&src);
+ gst_video_frame_unmap (&dest);
+
return GST_FLOW_OK;
not_negotiated:
- GST_ERROR_OBJECT (videoflip, "Not negotiated yet");
- return GST_FLOW_NOT_NEGOTIATED;
+ {
+ GST_ERROR_OBJECT (videoflip, "Not negotiated yet");
+ return GST_FLOW_NOT_NEGOTIATED;
+ }
+invalid_in:
+ {
+ GST_ERROR_OBJECT (videoflip, "invalid input frame");
+ return GST_FLOW_OK;
+ }
+invalid_out:
+ {
+ GST_ERROR_OBJECT (videoflip, "invalid output frame");
+ gst_video_frame_unmap (&src);
+ return GST_FLOW_OK;
+ }
}
static gboolean
switch (vf->method) {
case GST_VIDEO_FLIP_METHOD_90R:
new_x = y;
- new_y = vf->to_width - x;
+ new_y = vf->out_info.width - x;
break;
case GST_VIDEO_FLIP_METHOD_90L:
- new_x = vf->to_height - y;
+ new_x = vf->out_info.height - y;
new_y = x;
break;
case GST_VIDEO_FLIP_METHOD_OTHER:
- new_x = vf->to_height - y;
- new_y = vf->to_width - x;
+ new_x = vf->out_info.height - y;
+ new_y = vf->out_info.width - x;
break;
case GST_VIDEO_FLIP_METHOD_TRANS:
new_x = y;
new_y = x;
break;
case GST_VIDEO_FLIP_METHOD_180:
- new_x = vf->to_width - x;
- new_y = vf->to_height - y;
+ new_x = vf->out_info.width - x;
+ new_y = vf->out_info.height - y;
break;
case GST_VIDEO_FLIP_METHOD_HORIZ:
- new_x = vf->to_width - x;
+ new_x = vf->out_info.width - x;
new_y = y;
break;
case GST_VIDEO_FLIP_METHOD_VERT:
new_x = x;
- new_y = vf->to_height - y;
+ new_y = vf->out_info.height - y;
break;
default:
new_x = x;
}
static void
-gst_video_flip_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_set_details_simple (element_class, "Video flipper",
- "Filter/Effect/Video",
- "Flips and rotates video", "David Schleef <ds@schleef.org>");
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_video_flip_sink_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_video_flip_src_template));
-}
-
-static void
gst_video_flip_class_init (GstVideoFlipClass * klass)
{
GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass;
GST_DEBUG_CATEGORY_INIT (video_flip_debug, "videoflip", 0, "videoflip");
GST_TYPE_VIDEO_FLIP_METHOD, PROP_METHOD_DEFAULT,
GST_PARAM_CONTROLLABLE | G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ gst_element_class_set_details_simple (gstelement_class, "Video flipper",
+ "Filter/Effect/Video",
+ "Flips and rotates video", "David Schleef <ds@schleef.org>");
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_video_flip_sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_video_flip_src_template));
+
trans_class->transform_caps =
GST_DEBUG_FUNCPTR (gst_video_flip_transform_caps);
trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_video_flip_set_caps);
}
static void
-gst_video_flip_init (GstVideoFlip * videoflip, GstVideoFlipClass * klass)
+gst_video_flip_init (GstVideoFlip * videoflip)
{
videoflip->method = PROP_METHOD_DEFAULT;
gst_base_transform_set_passthrough (GST_BASE_TRANSFORM (videoflip), TRUE);
GstVideoFilter videofilter;
/* < private > */
- GstVideoFormat format;
- gint from_width, from_height;
- gint to_width, to_height;
+ GstVideoInfo in_info;
+ GstVideoInfo out_info;
GstVideoFlipMethod method;
- void (*process) (GstVideoFlip *videoflip, guint8 *dest, const guint8 *src);
+ void (*process) (GstVideoFlip *videoflip, GstVideoFrame *dest, const GstVideoFrame *src);
};
struct _GstVideoFlipClass {
/* Do something only on a change and if not locked */
if (!GST_COLLECT_PADS2_STATE_IS_SET (data, GST_COLLECT_PADS2_STATE_LOCKED) &&
(GST_COLLECT_PADS2_STATE_IS_SET (data, GST_COLLECT_PADS2_STATE_WAITING) !=
- ! !waiting)) {
+ !!waiting)) {
/* Set waiting state for this pad */
if (waiting)
GST_COLLECT_PADS2_STATE_SET (data, GST_COLLECT_PADS2_STATE_WAITING);
EXTRA_DIST = \
a52.m4 \
aalib.m4 \
- as-arts.m4 \
as-ffmpeg.m4 \
as-liblame.m4 \
as-slurp-ffmpeg.m4 \
check-libheader.m4 \
codeset.m4 \
- esd.m4 \
freetype2.m4 \
- gconf-2.m4 \
gettext.m4 \
glibc21.m4 \
glib.m4 \
+++ /dev/null
-dnl as-arts.m4 0.1.0
-
-dnl $Id: as-arts.m4,v 1.5 2004/05/21 11:20:49 thomasvs Exp $
-dnl if you copy this file to your cvs,
-dnl add this file using cvs -ko add to retain this header
-
-dnl This is an example arts .m4 adapted and scrubbed by thomasvs
-
-# Configure paths for ARTS
-# Philip Stadermann 2001-06-21
-# stolen from esd.m4
-
-dnl AM_PATH_ARTS([MINIMUM-VERSION, [ACTION-IF-FOUND [, ACTION-IF-NOT-FOUND]]])
-dnl Test for ARTS, and define ARTS_CXXFLAGS and ARTS_LIBS
-dnl
-AC_DEFUN([AM_PATH_ARTS],
-[dnl
-dnl Get the cflags and libraries from the artsc-config script
-dnl
-AC_ARG_WITH(arts-prefix,
- AC_HELP_STRING([--with-arts-prefix=PFX],
- [prefix where ARTS is installed (optional)]),
- arts_prefix="$withval", arts_prefix="")
-
-AC_ARG_WITH(arts-exec-prefix,
- AC_HELP_STRING([--with-arts-exec-prefix=PFX],
- [exec prefix where ARTS is installed (optional)]),
- arts_exec_prefix="$withval", arts_exec_prefix="")
-
-AC_ARG_ENABLE(artstest,
- AC_HELP_STRING([--disable-artstest],
- [do not try to compile and run a test ARTS program]),
- , enable_artstest=yes)
-
- if test x$arts_exec_prefix != x ; then
- arts_args="$arts_args --exec-prefix=$arts_exec_prefix"
- if test x${ARTS_CONFIG+set} != xset ; then
- ARTS_CONFIG=$arts_exec_prefix/bin/artsc-config
- fi
- fi
- if test x$arts_prefix != x ; then
- arts_args="$arts_args --prefix=$arts_prefix"
- if test x${ARTS_CONFIG+set} != xset ; then
- ARTS_CONFIG=$arts_prefix/bin/artsc-config
- fi
- fi
-
- AC_PATH_PROG(ARTS_CONFIG, artsc-config, no)
- min_arts_version=ifelse([$1], ,0.9.5,$1)
- AC_MSG_CHECKING(for ARTS artsc - version >= $min_arts_version)
- no_arts=""
- if test "$ARTS_CONFIG" = "no" ; then
- no_arts=yes
- else
- # FIXME: thomas added this sed to get arts path instead of artsc
- # replace -I.../artsc with -I.../arts
- ARTS_CXXFLAGS=`$ARTS_CONFIG $artsconf_args --cflags | artsc-config --cflags | sed 's/\(-I.*\)artsc/\1arts/'`
- ARTS_LIBS=`$ARTS_CONFIG $artsconf_args --libs | sed 's/artsc$/arts/'`
-
- arts_major_version=`$ARTS_CONFIG $arts_args --version | \
- sed 's/\([[0-9]]*\).\([[0-9]]*\).\([[0-9]]*\)/\1/'`
- arts_minor_version=`$ARTS_CONFIG $arts_args --version | \
- sed 's/\([[0-9]]*\).\([[0-9]]*\).\([[0-9]]*\)/\2/'`
- arts_micro_version=`$ARTS_CONFIG $arts_config_args --version | \
- sed 's/\([[0-9]]*\).\([[0-9]]*\).\([[0-9]]*\)/\3/'`
- if test "x$enable_artstest" = "xyes" ; then
- dnl ac_save_CXXFLAGS="$CXXFLAGS"
- dnl ac_save_LIBS="$LIBS"
- dnl CFLAGS="$CFLAGS $ARTS_CXXFLAGS"
- dnl LIBS="$LIBS $ARTS_LIBS"
-dnl
-dnl Now check if the installed ARTS is sufficiently new. (Also sanity
-dnl checks the results of artsc-config to some extent)
-dnl
-
-dnl a*s: to successfully compile the C++ test app, we need to
-dnl first make sure we're going to compile it as C++ (with AC_LANG_PUSH),
-dnl then add the CFLAGS and CLIBS of arts which we just discovered to the
-dnl C++ compilation and linking flags.
-dnl We also need to clean up after the test; this means using AC_LANG_POP
-dnl and restoring the CPPFLAGS and LDFLAGS from the saved values we take
-dnl here.
-
-dnl ask nicely for C++ compilation
- AC_LANG_PUSH(C++)
-
-dnl save compilation and link flags and make our own
- ac_save_CPPFLAGS="$CPPFLAGS"
- ac_save_LDFLAGS="$LDFLAGS"
- AC_SUBST(CPPFLAGS,"$CPPFLAGS $ARTS_CXXFLAGS")
- AC_SUBST(LDFLAGS,"$LDFLAGS $ARTS_CLIBS")
-
- rm -f conf.artstest
- AC_TRY_RUN([
-#include <stdio.h>
-#include <stdlib.h>
-#include <string.h>
-#include <artsflow.h>
-
-char*
-my_strdup (char *str)
-{
- char *new_str;
-
- if (str)
- {
- // thomas: the original test did not have the typecast
- new_str = (char *) malloc ((strlen (str) + 1) * sizeof(char));
- strcpy (new_str, str);
- }
- else
- new_str = NULL;
-
- return new_str;
-}
-
-int main ()
-{
- int major, minor, micro;
- char *tmp_version;
-
- system ("touch conf.artstest");
-
- /* HP/UX 9 (%@#!) writes to sscanf strings */
- tmp_version = my_strdup("$min_arts_version");
- if (sscanf(tmp_version, "%d.%d.%d", &major, &minor, µ) != 3) {
- printf("%s, bad version string\n", "$min_arts_version");
- exit(1);
- }
-
- if (($arts_major_version > major) ||
- (($arts_major_version == major) && ($arts_minor_version > minor)) ||
- (($arts_major_version == major) && ($arts_minor_version == minor) && ($arts_micro_version >= micro)))
- {
- return 0;
- }
- else
- {
- printf("\n*** 'artsc-config --version' returned %d.%d.%d, but the minimum version\n", $arts_major_version, $arts_minor_version, $arts_micro_version);
- printf("*** of ARTS required is %d.%d.%d. If artsc-config is correct, then it is\n", major, minor, micro);
- printf("*** best to upgrade to the required version.\n");
- printf("*** If artsc-config was wrong, set the environment variable ARTS_CONFIG\n");
- printf("*** to point to the correct copy of artsc-config, and remove the file\n");
- printf("*** config.cache before re-running configure\n");
- return 1;
- }
-}
-
-],, no_arts=yes,[echo $ac_n "cross compiling; assumed OK... $ac_c"])
- dnl CFLAGS="$ac_save_CFLAGS"
- dnl LIBS="$ac_save_LIBS"
- dnl a*s this is were we clean up after the test
- AC_LANG_POP(C++)
- CXXFLAGS="$ac_save_CXXFLAGS"
- LDFLAGS="$ac_save_LDFLAGS"
- dnl a*s we are sure that these are right, so we make them active
- AC_SUBST(CXXFLAGS,"$CXXFLAGS")
- AC_SUBST(LDFLAGS,"$LDFLAGS")
- fi
- fi
- if test "x$no_arts" = x ; then
- AC_MSG_RESULT(yes)
- ifelse([$2], , :, [$2])
- else
- AC_MSG_RESULT(no)
- if test "$ARTS_CONFIG" = "no" ; then
- echo "*** The artsc-config script installed by ARTS could not be found"
- echo "*** If ARTS was installed in PREFIX, make sure PREFIX/bin is in"
- echo "*** your path, or set the ARTS_CONFIG environment variable to the"
- echo "*** full path to artsc-config."
- else
- if test -f conf.artstest ; then
- :
- else
- echo "*** Could not run ARTS test program, checking why..."
- CFLAGS="$CFLAGS $ARTS_CXXFLAGS"
- LIBS="$LIBS $ARTS_LIBS"
- AC_TRY_LINK([
-#include <stdio.h>
-#include <artsflow.h>
-], [ return 0; ],
- [ echo "*** The test program compiled, but did not run. This usually means"
- echo "*** that the run-time linker is not finding ARTS or finding the wrong"
- echo "*** version of ARTS. If it is not finding ARTS, you'll need to set your"
- echo "*** LD_LIBRARY_PATH environment variable, or edit /etc/ld.so.conf to point"
- echo "*** to the installed location Also, make sure you have run ldconfig if that"
- echo "*** is required on your system"
- echo "***"
- echo "*** If you have an old version installed, it is best to remove it, although"
- echo "*** you may also be able to get things to work by modifying LD_LIBRARY_PATH"],
- [ echo "*** The test program failed to compile or link. See the file config.log for the"
- echo "*** exact error that occured. This usually means ARTS was incorrectly installed"
- echo "*** or that you have moved ARTS since it was installed. In the latter case, you"
- echo "*** may want to edit the artsc-config script: $ARTS_CONFIG" ])
- CFLAGS="$ac_save_CFLAGS"
- LIBS="$ac_save_LIBS"
- fi
- fi
- ARTS_CXXFLAGS=""
- ARTS_LIBS=""
- ifelse([$3], , :, [$3])
- fi
- AC_SUBST(ARTS_CXXFLAGS)
- AC_SUBST(ARTS_LIBS)
- rm -f conf.artstest
-])
-
-dnl release C++ question
-
+++ /dev/null
-# Configure paths for ESD
-# Manish Singh 98-9-30
-# stolen back from Frank Belew
-# stolen from Manish Singh
-# Shamelessly stolen from Owen Taylor
-
-dnl AM_PATH_ESD([MINIMUM-VERSION, [ACTION-IF-FOUND [, ACTION-IF-NOT-FOUND]]])
-dnl Test for ESD, and define ESD_CFLAGS and ESD_LIBS
-dnl
-AC_DEFUN([AM_PATH_ESD],
-[dnl
-dnl Get the cflags and libraries from the esd-config script
-dnl
-AC_ARG_WITH(esd-prefix,
- AC_HELP_STRING([--with-esd-prefix=PFX],
- [prefix where ESD is installed (optional)]),
- esd_prefix="$withval", esd_prefix="")
-
-AC_ARG_WITH(esd-exec-prefix,
- AC_HELP_STRING([--with-esd-exec-prefix=PFX],
- [exec prefix where ESD is installed (optional)]),
- esd_exec_prefix="$withval", esd_exec_prefix="")
-
-AC_ARG_ENABLE(esdtest,
- AC_HELP_STRING([--disable-esdtest],
- [do not try to compile and run a test ESD program]),
- , enable_esdtest=yes)
-
- if test x$esd_exec_prefix != x ; then
- esd_args="$esd_args --exec-prefix=$esd_exec_prefix"
- if test x${ESD_CONFIG+set} != xset ; then
- ESD_CONFIG=$esd_exec_prefix/bin/esd-config
- fi
- fi
- if test x$esd_prefix != x ; then
- esd_args="$esd_args --prefix=$esd_prefix"
- if test x${ESD_CONFIG+set} != xset ; then
- ESD_CONFIG=$esd_prefix/bin/esd-config
- fi
- fi
-
- AC_PATH_PROG(ESD_CONFIG, esd-config, no)
- min_esd_version=ifelse([$1], ,0.2.7,$1)
- AC_MSG_CHECKING(for ESD - version >= $min_esd_version)
- no_esd=""
- if test "$ESD_CONFIG" = "no" ; then
- no_esd=yes
- else
- AC_LANG_SAVE
- AC_LANG_C
- ESD_CFLAGS=`$ESD_CONFIG $esdconf_args --cflags`
- ESD_LIBS=`$ESD_CONFIG $esdconf_args --libs`
-
- esd_major_version=`$ESD_CONFIG $esd_args --version | \
- sed 's/\([[0-9]]*\).\([[0-9]]*\).\([[0-9]]*\)/\1/'`
- esd_minor_version=`$ESD_CONFIG $esd_args --version | \
- sed 's/\([[0-9]]*\).\([[0-9]]*\).\([[0-9]]*\)/\2/'`
- esd_micro_version=`$ESD_CONFIG $esd_config_args --version | \
- sed 's/\([[0-9]]*\).\([[0-9]]*\).\([[0-9]]*\)/\3/'`
- if test "x$enable_esdtest" = "xyes" ; then
- ac_save_CFLAGS="$CFLAGS"
- ac_save_LIBS="$LIBS"
- CFLAGS="$CFLAGS $ESD_CFLAGS"
- LIBS="$LIBS $ESD_LIBS"
-dnl
-dnl Now check if the installed ESD is sufficiently new. (Also sanity
-dnl checks the results of esd-config to some extent
-dnl
- rm -f conf.esdtest
- AC_TRY_RUN([
-#include <stdio.h>
-#include <stdlib.h>
-#include <string.h>
-#include <esd.h>
-
-char*
-my_strdup (char *str)
-{
- char *new_str;
-
- if (str)
- {
- new_str = malloc ((strlen (str) + 1) * sizeof(char));
- strcpy (new_str, str);
- }
- else
- new_str = NULL;
-
- return new_str;
-}
-
-int main ()
-{
- int major, minor, micro;
- char *tmp_version;
-
- system ("touch conf.esdtest");
-
- /* HP/UX 9 (%@#!) writes to sscanf strings */
- tmp_version = my_strdup("$min_esd_version");
- if (sscanf(tmp_version, "%d.%d.%d", &major, &minor, µ) != 3) {
- printf("%s, bad version string\n", "$min_esd_version");
- exit(1);
- }
-
- if (($esd_major_version > major) ||
- (($esd_major_version == major) && ($esd_minor_version > minor)) ||
- (($esd_major_version == major) && ($esd_minor_version == minor) && ($esd_micro_version >= micro)))
- {
- return 0;
- }
- else
- {
- printf("\n*** 'esd-config --version' returned %d.%d.%d, but the minimum version\n", $esd_major_version, $esd_minor_version, $esd_micro_version);
- printf("*** of ESD required is %d.%d.%d. If esd-config is correct, then it is\n", major, minor, micro);
- printf("*** best to upgrade to the required version.\n");
- printf("*** If esd-config was wrong, set the environment variable ESD_CONFIG\n");
- printf("*** to point to the correct copy of esd-config, and remove the file\n");
- printf("*** config.cache before re-running configure\n");
- return 1;
- }
-}
-
-],, no_esd=yes,[echo $ac_n "cross compiling; assumed OK... $ac_c"])
- CFLAGS="$ac_save_CFLAGS"
- LIBS="$ac_save_LIBS"
- AC_LANG_RESTORE
- fi
- fi
- if test "x$no_esd" = x ; then
- AC_MSG_RESULT(yes)
- ifelse([$2], , :, [$2])
- else
- AC_MSG_RESULT(no)
- if test "$ESD_CONFIG" = "no" ; then
- echo "*** The esd-config script installed by ESD could not be found"
- echo "*** If ESD was installed in PREFIX, make sure PREFIX/bin is in"
- echo "*** your path, or set the ESD_CONFIG environment variable to the"
- echo "*** full path to esd-config."
- else
- if test -f conf.esdtest ; then
- :
- else
- echo "*** Could not run ESD test program, checking why..."
- CFLAGS="$CFLAGS $ESD_CFLAGS"
- LIBS="$LIBS $ESD_LIBS"
- AC_LANG_SAVE
- AC_LANG_C
- AC_TRY_LINK([
-#include <stdio.h>
-#include <esd.h>
-], [ return 0; ],
- [ echo "*** The test program compiled, but did not run. This usually means"
- echo "*** that the run-time linker is not finding ESD or finding the wrong"
- echo "*** version of ESD. If it is not finding ESD, you'll need to set your"
- echo "*** LD_LIBRARY_PATH environment variable, or edit /etc/ld.so.conf to point"
- echo "*** to the installed location Also, make sure you have run ldconfig if that"
- echo "*** is required on your system"
- echo "***"
- echo "*** If you have an old version installed, it is best to remove it, although"
- echo "*** you may also be able to get things to work by modifying LD_LIBRARY_PATH"],
- [ echo "*** The test program failed to compile or link. See the file config.log for the"
- echo "*** exact error that occured. This usually means ESD was incorrectly installed"
- echo "*** or that you have moved ESD since it was installed. In the latter case, you"
- echo "*** may want to edit the esd-config script: $ESD_CONFIG" ])
- CFLAGS="$ac_save_CFLAGS"
- LIBS="$ac_save_LIBS"
- AC_LANG_RESTORE
- fi
- fi
- ESD_CFLAGS=""
- ESD_LIBS=""
- ifelse([$3], , :, [$3])
- fi
- AC_SUBST(ESD_CFLAGS)
- AC_SUBST(ESD_LIBS)
- rm -f conf.esdtest
-])
-
-dnl AM_ESD_SUPPORTS_MULTIPLE_RECORD([ACTION-IF-SUPPORTS [, ACTION-IF-NOT-SUPPORTS]])
-dnl Test, whether esd supports multiple recording clients (version >=0.2.21)
-dnl
-AC_DEFUN([AM_ESD_SUPPORTS_MULTIPLE_RECORD],
-[dnl
- AC_MSG_NOTICE([whether installed esd version supports multiple recording clients])
- ac_save_ESD_CFLAGS="$ESD_CFLAGS"
- ac_save_ESD_LIBS="$ESD_LIBS"
- AM_PATH_ESD(0.2.21,
- ifelse([$1], , [
- AM_CONDITIONAL(ESD_SUPPORTS_MULTIPLE_RECORD, true)
- AC_DEFINE(ESD_SUPPORTS_MULTIPLE_RECORD, 1,
- [Define if you have esound with support of multiple recording clients.])],
- [$1]),
- ifelse([$2], , [AM_CONDITIONAL(ESD_SUPPORTS_MULTIPLE_RECORD, false)], [$2])
- if test "x$ac_save_ESD_CFLAGS" != x ; then
- ESD_CFLAGS="$ac_save_ESD_CFLAGS"
- fi
- if test "x$ac_save_ESD_LIBS" != x ; then
- ESD_LIBS="$ac_save_ESD_LIBS"
- fi
- )
-])
+++ /dev/null
-dnl AM_GCONF_SOURCE_2
-dnl Defines GCONF_SCHEMA_CONFIG_SOURCE which is where you should install schemas
-dnl (i.e. pass to gconftool-2
-dnl Defines GCONF_SCHEMA_FILE_DIR which is a filesystem directory where
-dnl you should install foo.schemas files
-dnl
-
-AC_DEFUN([AM_GCONF_SOURCE_2],
-[
- if test "x$GCONF_SCHEMA_INSTALL_SOURCE" = "x"; then
- GCONF_SCHEMA_CONFIG_SOURCE=`gconftool-2 --get-default-source`
- else
- GCONF_SCHEMA_CONFIG_SOURCE=$GCONF_SCHEMA_INSTALL_SOURCE
- fi
-
- AC_ARG_WITH(gconf-source,
- [ --with-gconf-source=sourceaddress Config database for installing schema files.],GCONF_SCHEMA_CONFIG_SOURCE="$withval",)
-
- AC_SUBST(GCONF_SCHEMA_CONFIG_SOURCE)
- AC_MSG_RESULT([Using config source $GCONF_SCHEMA_CONFIG_SOURCE for schema installation])
-
- if test "x$GCONF_SCHEMA_FILE_DIR" = "x"; then
- GCONF_SCHEMA_FILE_DIR='$(sysconfdir)/gconf/schemas'
- fi
-
- AC_ARG_WITH(gconf-schema-file-dir,
- [ --with-gconf-schema-file-dir=dir Directory for installing schema files.],GCONF_SCHEMA_FILE_DIR="$withval",)
-
- AC_SUBST(GCONF_SCHEMA_FILE_DIR)
- AC_MSG_RESULT([Using $GCONF_SCHEMA_FILE_DIR as install directory for schema files])
-
- AC_ARG_ENABLE(schemas-install,
- [ --disable-schemas-install Disable the schemas installation],
- [case "${enableval}" in
- yes) schemas_install=true ;;
- no) schemas_install=false ;;
- *) AC_MSG_ERROR(bad value ${enableval} for --disable-schemas-install) ;;
- esac],[schemas_install=true])
- AM_CONDITIONAL(GCONF_SCHEMAS_INSTALL, test x$schemas_install = xtrue)
-])
-ext/esd/esdsink.c
ext/pulse/pulsesink.c
ext/flac/gstflacdec.c
ext/jpeg/gstjpegdec.c
ext/shout2/gstshout2.c
ext/soup/gstsouphttpsrc.c
ext/wavpack/gstwavpackparse.c
-gconf/gstreamer.schemas.in
gst/avi/gstavidemux.c
gst/avi/gstavimux.c
gst/isomp4/qtdemux.c
msgstr ""
"Project-Id-Version: gst-plugins 0.7.6\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-05-10 09:38+0100\n"
+"POT-Creation-Date: 2011-06-15 15:02+0100\n"
"PO-Revision-Date: 2004-03-18 14:16+0200\n"
"Last-Translator: Petri Jooste <rkwjpj@puk.ac.za>\n"
"Language-Team: Afrikaans <i18n@af.org.za>\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
-msgid "Could not establish connection to sound server"
-msgstr ""
-
-msgid "Failed to query sound server capabilities"
-msgstr ""
-
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
msgstr ""
"Project-Id-Version: gst-plugins-0.8.0\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-05-10 09:38+0100\n"
+"POT-Creation-Date: 2011-06-15 15:02+0100\n"
"PO-Revision-Date: 2004-03-19 18:29+0200\n"
"Last-Translator: Metin Amiroff <metin@karegen.com>\n"
"Language-Team: Azerbaijani <translation-team-az@lists.sourceforge.net>\n"
"Content-Transfer-Encoding: 8bit\n"
"X-Generator: KBabel 1.0.2\n"
-msgid "Could not establish connection to sound server"
-msgstr ""
-
-msgid "Failed to query sound server capabilities"
-msgstr ""
-
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
msgstr ""
"Project-Id-Version: gst-plugins-good 0.10.28.2\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-05-10 09:38+0100\n"
+"POT-Creation-Date: 2011-06-15 15:02+0100\n"
"PO-Revision-Date: 2011-04-26 22:35+0300\n"
"Last-Translator: Alexander Shopov <ash@kambanaria.org>\n"
"Language-Team: Bulgarian <dict@fsa-bg.org>\n"
"Content-Transfer-Encoding: 8bit\n"
"Plural-Forms: nplurals=2; plural=n != 1;\n"
-msgid "Could not establish connection to sound server"
-msgstr "Не може да се осъществи връзка към звуковия сървър."
-
-msgid "Failed to query sound server capabilities"
-msgstr "Неуспешно запитване за възможностите на звуковия сървър."
-
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
msgid "Cannot operate without a clock"
msgstr "Работата без часовник е невъзможна."
+
+#~ msgid "Could not establish connection to sound server"
+#~ msgstr "Не може да се осъществи връзка към звуковия сървър."
+
+#~ msgid "Failed to query sound server capabilities"
+#~ msgstr "Неуспешно запитване за възможностите на звуковия сървър."
msgstr ""
"Project-Id-Version: gst-plugins-good 0.9.7\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-05-10 09:38+0100\n"
+"POT-Creation-Date: 2011-06-15 15:02+0100\n"
"PO-Revision-Date: 2005-12-04 21:54+0100\n"
"Last-Translator: Jordi Mallach <jordi@sindominio.net>\n"
"Language-Team: Catalan <ca@dodds.net>\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
-msgid "Could not establish connection to sound server"
-msgstr ""
-
-msgid "Failed to query sound server capabilities"
-msgstr ""
-
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
msgstr ""
"Project-Id-Version: gst-plugins-good-0.10.26.2\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-05-10 09:38+0100\n"
+"POT-Creation-Date: 2011-06-15 15:02+0100\n"
"PO-Revision-Date: 2011-01-12 23:07+0100\n"
"Last-Translator: Petr Kovar <pknbe@volny.cz>\n"
"Language-Team: Czech <translation-team-cs@lists.sourceforge.net>\n"
"Plural-Forms: nplurals=3; plural=(n==1) ? 0 : (n>=2 && n<=4) ? 1 : 2;\n"
"X-Generator: Lokalize 1.1\n"
-msgid "Could not establish connection to sound server"
-msgstr "Nezdařilo se navázání spojení se zvukovým serverem"
-
-msgid "Failed to query sound server capabilities"
-msgstr "Nezdařil se dotaz na schopnosti zvukového serveru"
-
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
msgid "Cannot operate without a clock"
msgstr "Není možné fungovat bez hodin"
+
+#~ msgid "Could not establish connection to sound server"
+#~ msgstr "Nezdařilo se navázání spojení se zvukovým serverem"
+
+#~ msgid "Failed to query sound server capabilities"
+#~ msgstr "Nezdařil se dotaz na schopnosti zvukového serveru"
msgstr ""
"Project-Id-Version: gst-plugins-good-0.10.26.2\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-05-10 09:38+0100\n"
+"POT-Creation-Date: 2011-06-15 15:02+0100\n"
"PO-Revision-Date: 2011-01-07 23:54+0200\n"
"Last-Translator: Joe Hansen <joedalton2@yahoo.dk>\n"
"Language-Team: Danish <dansk@dansk-gruppen.dk>\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
-msgid "Could not establish connection to sound server"
-msgstr "Kunne ikke skabe kontakt til lyd-serveren"
-
-msgid "Failed to query sound server capabilities"
-msgstr "Listning af lydservers egenskaber fejlede"
-
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
msgid "Cannot operate without a clock"
msgstr "Kan ikke fungere uden et ur"
+
+#~ msgid "Could not establish connection to sound server"
+#~ msgstr "Kunne ikke skabe kontakt til lyd-serveren"
+
+#~ msgid "Failed to query sound server capabilities"
+#~ msgstr "Listning af lydservers egenskaber fejlede"
msgstr ""
"Project-Id-Version: gst-plugins-good 0.10.26.2\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-05-10 09:38+0100\n"
+"POT-Creation-Date: 2011-06-15 15:02+0100\n"
"PO-Revision-Date: 2011-01-21 22:36+0100\n"
"Last-Translator: Christian Kirbach <christian.kirbach@googlemail.com>\n"
"Language-Team: German <translation-team-de@lists.sourceforge.net>\n"
"X-Poedit-Country: GERMANY\n"
"Plural-Forms: nplurals=2; plural=(n != 1);\n"
-msgid "Could not establish connection to sound server"
-msgstr "Es konnte keine Verbindung zum Audio-Server hergestellt werden"
-
-msgid "Failed to query sound server capabilities"
-msgstr "Abfrage der Fähigkeiten des Audio-Servers ist fehlgeschlagen"
-
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
msgid "Cannot operate without a clock"
msgstr "Es kann nicht ohne einen Taktgeber gearbeitet werden."
+#~ msgid "Could not establish connection to sound server"
+#~ msgstr "Es konnte keine Verbindung zum Audio-Server hergestellt werden"
+
+#~ msgid "Failed to query sound server capabilities"
+#~ msgstr "Abfrage der Fähigkeiten des Audio-Servers ist fehlgeschlagen"
+
#~ msgid "Failed to enumerate possible video formats device '%s' can work with"
#~ msgstr ""
#~ "Aufzählen möglicher Videoformate, mit denen Gerät »%s« arbeiten kann, "
msgstr ""
"Project-Id-Version: gst-plugins-good 0.10.25.3\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-05-10 09:38+0100\n"
+"POT-Creation-Date: 2011-06-15 15:02+0100\n"
"PO-Revision-Date: 2010-10-27 12:16+0200\n"
"Last-Translator: Michael Kotsarinis <mk73628@gmail.com>\n"
"Language-Team: Greek <team@lists.gnome.gr>\n"
"X-Generator: Lokalize 0.3\n"
"Plural-Forms: nplurals=2; plural=(n != 1);\n"
-msgid "Could not establish connection to sound server"
-msgstr "Δεν ήταν δυνατή η σύνδεση με τον διακομιστή ήχου"
-
-msgid "Failed to query sound server capabilities"
-msgstr "Αποτυχία αναζήτησης των δυνατοτήτων του διακομιστή ήχου"
-
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
msgid "Cannot operate without a clock"
msgstr "Δεν είναι δυνατή η λειτουργία χωρίς ρολόι "
+#~ msgid "Could not establish connection to sound server"
+#~ msgstr "Δεν ήταν δυνατή η σύνδεση με τον διακομιστή ήχου"
+
+#~ msgid "Failed to query sound server capabilities"
+#~ msgstr "Αποτυχία αναζήτησης των δυνατοτήτων του διακομιστή ήχου"
+
#~ msgid "Describes the selected input element."
#~ msgstr "Περιγράφει το επιλεγμένο στοιχείο εισαγωγής."
msgstr ""
"Project-Id-Version: gst-plugins 0.8.1\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-05-10 09:38+0100\n"
+"POT-Creation-Date: 2011-06-15 15:02+0100\n"
"PO-Revision-Date: 2004-04-26 10:41-0400\n"
"Last-Translator: Gareth Owen <gowen72@yahoo.com>\n"
"Language-Team: English (British) <en_gb@li.org>\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
-msgid "Could not establish connection to sound server"
-msgstr ""
-
-msgid "Failed to query sound server capabilities"
-msgstr ""
-
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
msgstr ""
"Project-Id-Version: gst-plugins-good 0.10.26.2\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-05-10 09:38+0100\n"
+"POT-Creation-Date: 2011-06-15 15:02+0100\n"
"PO-Revision-Date: 2011-02-12 18:30+0100\n"
"Last-Translator: Jorge González González <aloriel@gmail.com>\n"
"Language-Team: Spanish <es@li.org>\n"
"X-Generator: KBabel 1.11.4\n"
"Plural-Forms: nplurals=2; plural=(n != 1);\n"
-msgid "Could not establish connection to sound server"
-msgstr "No se pudo establecer la conexión con el servidor de sonido"
-
-msgid "Failed to query sound server capabilities"
-msgstr "Falló al preguntar al servidor de sonido sus capacidades"
-
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
msgid "Cannot operate without a clock"
msgstr "No se puede operar sin reloj"
+#~ msgid "Could not establish connection to sound server"
+#~ msgstr "No se pudo establecer la conexión con el servidor de sonido"
+
+#~ msgid "Failed to query sound server capabilities"
+#~ msgstr "Falló al preguntar al servidor de sonido sus capacidades"
+
#~ msgid "Failed to enumerate possible video formats device '%s' can work with"
#~ msgstr ""
#~ "Falló al enumerar los posibles formatos de vídeo con los que el "
msgstr ""
"Project-Id-Version: gst-plugins-good-0.10.18.2\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-05-10 09:38+0100\n"
+"POT-Creation-Date: 2011-06-15 15:02+0100\n"
"PO-Revision-Date: 2010-03-25 12:37+0100\n"
"Last-Translator: Mikel Olasagasti Uranga <hey_neken@mundurat.net>\n"
"Language-Team: Basque <translation-team-eu@lists.sourceforge.net>\n"
"X-Generator: KBabel 1.11.4\n"
"Plural-Forms: nplurals=2; plural=(n != 1);\n"
-msgid "Could not establish connection to sound server"
-msgstr "Ezin izan da konexioa ezarri soinu-zerbitzariarekin"
-
-msgid "Failed to query sound server capabilities"
-msgstr "Huts egin du soinu-zerbitzariaren ahalmena kontsultatzean"
-
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
msgid "Cannot operate without a clock"
msgstr "Ezin du funtzionatu erlojurik gabe"
+#~ msgid "Could not establish connection to sound server"
+#~ msgstr "Ezin izan da konexioa ezarri soinu-zerbitzariarekin"
+
+#~ msgid "Failed to query sound server capabilities"
+#~ msgstr "Huts egin du soinu-zerbitzariaren ahalmena kontsultatzean"
+
#~ msgid "Describes the selected input element."
#~ msgstr "Sarrerako elementu hautatua deskribatzen du."
msgstr ""
"Project-Id-Version: gst-plugins-good 0.10.25.3\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-05-10 09:38+0100\n"
+"POT-Creation-Date: 2011-06-15 15:02+0100\n"
"PO-Revision-Date: 2010-11-17 23:03+0200\n"
"Last-Translator: Tommi Vainikainen <Tommi.Vainikainen@iki.fi>\n"
"Language-Team: Finnish <translation-team-fi@lists.sourceforge.net>\n"
"Content-Transfer-Encoding: 8bit\n"
"Plural-Forms: nplurals=2; plural=(n != 1);\n"
-msgid "Could not establish connection to sound server"
-msgstr "Yhteyttä äänipalvelimeen ei voitu avata"
-
-msgid "Failed to query sound server capabilities"
-msgstr "Äänipalvelimen ominaisuuksia ei voitu selvittää"
-
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
msgid "Cannot operate without a clock"
msgstr "Ei voitu toimia ilman kelloa"
+#~ msgid "Could not establish connection to sound server"
+#~ msgstr "Yhteyttä äänipalvelimeen ei voitu avata"
+
+#~ msgid "Failed to query sound server capabilities"
+#~ msgstr "Äänipalvelimen ominaisuuksia ei voitu selvittää"
+
#~ msgid ""
#~ "The buffer type is not supported, or the index is out of bounds, or no "
#~ "buffers have been allocated yet, or the userptr or length are invalid. "
msgstr ""
"Project-Id-Version: gst-plugins-good 0.10.28.2\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-05-10 09:38+0100\n"
+"POT-Creation-Date: 2011-06-15 15:02+0100\n"
"PO-Revision-Date: 2011-04-28 09:22+0200\n"
"Last-Translator: Claude Paroz <claude@2xlibre.net>\n"
"Language-Team: French <traduc@traduc.org>\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
-msgid "Could not establish connection to sound server"
-msgstr "Impossible d'établir une connexion vers le serveur de son"
-
-msgid "Failed to query sound server capabilities"
-msgstr ""
-"Échec lors de l'interrogation du serveur de son au sujet de ses capacités"
-
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
msgid "Cannot operate without a clock"
msgstr "Impossible de fonctionner sans horloge"
+
+#~ msgid "Could not establish connection to sound server"
+#~ msgstr "Impossible d'établir une connexion vers le serveur de son"
+
+#~ msgid "Failed to query sound server capabilities"
+#~ msgstr ""
+#~ "Échec lors de l'interrogation du serveur de son au sujet de ses capacités"
msgstr ""
"Project-Id-Version: gst-plugins-good 0.10.26.2\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-05-10 09:38+0100\n"
+"POT-Creation-Date: 2011-06-15 15:02+0100\n"
"PO-Revision-Date: 2011-01-09 21:20+0100\n"
"Last-Translator: Fran Diéguez <frandieguez@ubuntu.com>\n"
"Language-Team: Galician <proxecto@trasno.net>\n"
"Content-Transfer-Encoding: 8bit\n"
"Plural-Forms: nplurals=2; plural=(n!=1);\n"
-msgid "Could not establish connection to sound server"
-msgstr "Non foi posíbel estabelecer a conexión co servidor de son"
-
-msgid "Failed to query sound server capabilities"
-msgstr "Produciuse un erro ao consultar as capacidades do servidor de son"
-
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
msgid "Cannot operate without a clock"
msgstr "Non é posíbel operar sen reloxo"
+#~ msgid "Could not establish connection to sound server"
+#~ msgstr "Non foi posíbel estabelecer a conexión co servidor de son"
+
+#~ msgid "Failed to query sound server capabilities"
+#~ msgstr "Produciuse un erro ao consultar as capacidades do servidor de son"
+
#~ msgid "Describes the selected input element."
#~ msgstr "Describe o elemento de entrada seleccionado."
msgstr ""
"Project-Id-Version: gst-plugins-good 0.10.25.3\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-05-10 09:38+0100\n"
+"POT-Creation-Date: 2011-06-15 15:02+0100\n"
"PO-Revision-Date: 2010-11-04 01:21+0100\n"
"Last-Translator: Gabor Kelemen <kelemeng@gnome.hu>\n"
"Language-Team: Hungarian <translation-team-hu@lists.sourceforge.net>\n"
"X-Rosetta-Export-Date: 2007-07-27 19:18:15+0000\n"
"Plural-Forms: nplurals=2; plural=(n != 1);\n"
-msgid "Could not establish connection to sound server"
-msgstr "Nem hozható létre kapcsolat a hangkiszolgálóhoz"
-
-msgid "Failed to query sound server capabilities"
-msgstr "A hangkiszolgáló képességeinek lekérdezése meghiúsult"
-
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
msgid "Cannot operate without a clock"
msgstr "Óra nélkül lehetetlen a működés"
+
+#~ msgid "Could not establish connection to sound server"
+#~ msgstr "Nem hozható létre kapcsolat a hangkiszolgálóhoz"
+
+#~ msgid "Failed to query sound server capabilities"
+#~ msgstr "A hangkiszolgáló képességeinek lekérdezése meghiúsult"
msgstr ""
"Project-Id-Version: gst-plugins-good 0.10.26.2\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-05-10 09:38+0100\n"
+"POT-Creation-Date: 2011-06-15 15:02+0100\n"
"PO-Revision-Date: 2011-02-26 18:09+0700\n"
"Last-Translator: Andhika Padmawan <andhika.padmawan@gmail.com>\n"
"Language-Team: Indonesian <translation-team-id@lists.sourceforge.net>\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
-msgid "Could not establish connection to sound server"
-msgstr "Tak dapat membangun koneksi ke server suara"
-
-msgid "Failed to query sound server capabilities"
-msgstr "Gagal untuk kueri kemampuan server suara"
-
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
msgid "Cannot operate without a clock"
msgstr "Tak dapat beroperasi tanpa jam"
+#~ msgid "Could not establish connection to sound server"
+#~ msgstr "Tak dapat membangun koneksi ke server suara"
+
+#~ msgid "Failed to query sound server capabilities"
+#~ msgstr "Gagal untuk kueri kemampuan server suara"
+
#~ msgid "Failed to enumerate possible video formats device '%s' can work with"
#~ msgstr ""
#~ "Gagal menyebutkan kemungkinan format video yang dapat bekerja dengan "
msgstr ""
"Project-Id-Version: gst-plugins-good 0.10.25.3\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-05-10 09:38+0100\n"
+"POT-Creation-Date: 2011-06-15 15:02+0100\n"
"PO-Revision-Date: 2010-10-25 10:11+0200\n"
"Last-Translator: Luca Ferretti <elle.uca@infinito.it>\n"
"Language-Team: Italian <tp@lists.linux.it>\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
-msgid "Could not establish connection to sound server"
-msgstr "Impossibile stabilire la connessione al server audio"
-
-msgid "Failed to query sound server capabilities"
-msgstr "Interrogazione delle funzionalità del server audio non riuscita"
-
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
msgid "Cannot operate without a clock"
msgstr "Impossibile operare senza un clock"
+
+#~ msgid "Could not establish connection to sound server"
+#~ msgstr "Impossibile stabilire la connessione al server audio"
+
+#~ msgid "Failed to query sound server capabilities"
+#~ msgstr "Interrogazione delle funzionalità del server audio non riuscita"
msgstr ""
"Project-Id-Version: gst-plugins-good 0.10.28.2\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-05-10 09:38+0100\n"
+"POT-Creation-Date: 2011-06-15 15:02+0100\n"
"PO-Revision-Date: 2011-04-26 20:15+0900\n"
"Last-Translator: Makoto Kato <makoto.kt@gmail.com>\n"
"Language-Team: Japanese <translation-team-ja@lists.sourceforge.net>\n"
"X-Generator: Lokalize 0.3\n"
"Plural-Forms: nplurals=1; plural=0;\n"
-msgid "Could not establish connection to sound server"
-msgstr "サウンドサーバーへの接続に失敗しました"
-
-msgid "Failed to query sound server capabilities"
-msgstr "サウンドサーバーのケイパビリティのクエリーに失敗しました"
-
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
msgid "Cannot operate without a clock"
msgstr ""
+#~ msgid "Could not establish connection to sound server"
+#~ msgstr "サウンドサーバーへの接続に失敗しました"
+
+#~ msgid "Failed to query sound server capabilities"
+#~ msgstr "サウンドサーバーのケイパビリティのクエリーに失敗しました"
+
#~ msgid ""
#~ "Failed trying to get video frames from device '%s'. Not enough memory."
#~ msgstr ""
msgstr ""
"Project-Id-Version: gst-plugins-good 0.10.23.2\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-05-10 09:38+0100\n"
+"POT-Creation-Date: 2011-06-15 15:02+0100\n"
"PO-Revision-Date: 2010-07-16 19:34+0300\n"
"Last-Translator: Žygimantas Beručka <uid0@akl.lt>\n"
"Language-Team: Lithuanian <komp_lt@konferencijos.lt>\n"
"%100<10 || n%100>=20) ? 1 : 2);\n"
"X-Generator: Virtaal 0.6.1\n"
-msgid "Could not establish connection to sound server"
-msgstr "Nepavyko prisijungti prie garso serverio"
-
-msgid "Failed to query sound server capabilities"
-msgstr "Nepavyko užklausti garso serverio galimybių"
-
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
msgid "Cannot operate without a clock"
msgstr "Negali veikti be laikrodžio"
+#~ msgid "Could not establish connection to sound server"
+#~ msgstr "Nepavyko prisijungti prie garso serverio"
+
+#~ msgid "Failed to query sound server capabilities"
+#~ msgstr "Nepavyko užklausti garso serverio galimybių"
+
#~ msgid "Describes the selected input element."
#~ msgstr "Aprašo pasirinktą įvesties elementą."
msgstr ""
"Project-Id-Version: gst-plugins-good 0.10.23.2\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-05-10 09:38+0100\n"
+"POT-Creation-Date: 2011-06-15 15:02+0100\n"
"PO-Revision-Date: 2010-07-07 11:53+0100\n"
"Last-Translator: Rihards Priedītis <rprieditis@gmail.com>\n"
"Language-Team: Latvian <translation-team-lv@lists.sourceforge.net>\n"
"2);\n"
"X-Generator: Lokalize 1.0\n"
-msgid "Could not establish connection to sound server"
-msgstr "Nevar izveidot savienojumu ar skaņas serveri"
-
-msgid "Failed to query sound server capabilities"
-msgstr "Neizdevās noskaidrot skaņas servera iespējas"
-
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
msgid "Cannot operate without a clock"
msgstr "Nevar veikt darbības bez pulksteņa"
+
+#~ msgid "Could not establish connection to sound server"
+#~ msgstr "Nevar izveidot savienojumu ar skaņas serveri"
+
+#~ msgid "Failed to query sound server capabilities"
+#~ msgstr "Neizdevās noskaidrot skaņas servera iespējas"
msgstr ""
"Project-Id-Version: gst-plugins-good-0.10.10.3\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-05-10 09:38+0100\n"
+"POT-Creation-Date: 2011-06-15 15:02+0100\n"
"PO-Revision-Date: 2008-10-26 19:09+0100\n"
"Last-Translator: Michel Bugeja <michelbugeja@rabatmalta.com>\n"
"Language-Team: Maltese <translation-team-mt@lists.sourceforge.net>\n"
"X-Poedit-Language: Maltese\n"
"X-Poedit-Country: MALTA\n"
-msgid "Could not establish connection to sound server"
-msgstr "Kuntatt mas-sound server ma ġiex stabbilit"
-
-msgid "Failed to query sound server capabilities"
-msgstr "Mhux possibli t-tfittxija għall-kapaċita tas-sound server"
-
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
msgid "Cannot operate without a clock"
msgstr "Ma nistax nħaddem mingħajr arloġġ"
+#~ msgid "Could not establish connection to sound server"
+#~ msgstr "Kuntatt mas-sound server ma ġiex stabbilit"
+
+#~ msgid "Failed to query sound server capabilities"
+#~ msgstr "Mhux possibli t-tfittxija għall-kapaċita tas-sound server"
+
#~ msgid "Error stopping streaming capture from device '%s'."
#~ msgstr "Problem biex inwaqqaf streaming capture mill-apparat '%s'."
msgstr ""
"Project-Id-Version: gst-plugins-good 0.10.25.3\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-05-10 09:38+0100\n"
+"POT-Creation-Date: 2011-06-15 15:02+0100\n"
"PO-Revision-Date: 2010-10-24 21:53+0200\n"
"Last-Translator: Kjartan Maraas <kmaraas@gnome.org>\n"
"Language-Team: Norwegian Bokmaal <i18n-nb@lister.ping.uio.no>\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
-msgid "Could not establish connection to sound server"
-msgstr "Kunne ikke etablere tilkobling til lydtjener"
-
-msgid "Failed to query sound server capabilities"
-msgstr ""
-
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
msgid "Cannot operate without a clock"
msgstr "Kan ikke operere uten en klokke"
+#~ msgid "Could not establish connection to sound server"
+#~ msgstr "Kunne ikke etablere tilkobling til lydtjener"
+
#~ msgid "Describes the selected input element."
#~ msgstr "Beskriver valgt inndataelement."
msgstr ""
"Project-Id-Version: gst-plugins-good 0.10.28.2\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-05-10 09:38+0100\n"
+"POT-Creation-Date: 2011-06-15 15:02+0100\n"
"PO-Revision-Date: 2011-04-27 00:05+0200\n"
"Last-Translator: Freek de Kruijf <f.de.kruijf@gmail.com>\n"
"Language-Team: Dutch <vertaling@vrijschrift.org>\n"
"X-Generator: Lokalize 1.1\n"
"Plural-Forms: nplurals=2; plural=n != 1;\n"
-msgid "Could not establish connection to sound server"
-msgstr "Kan geen verbinding maken met de geluidsserver"
-
-msgid "Failed to query sound server capabilities"
-msgstr "Kan de eigenschappen van de geluidsserver niet opvragen"
-
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
msgid "Cannot operate without a clock"
msgstr "Kan niet werken zonder een klok."
+
+#~ msgid "Could not establish connection to sound server"
+#~ msgstr "Kan geen verbinding maken met de geluidsserver"
+
+#~ msgid "Failed to query sound server capabilities"
+#~ msgstr "Kan de eigenschappen van de geluidsserver niet opvragen"
msgstr ""
"Project-Id-Version: gst-plugins-0.8.3\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-05-10 09:38+0100\n"
+"POT-Creation-Date: 2011-06-15 15:02+0100\n"
"PO-Revision-Date: 2004-09-27 13:32+0530\n"
"Last-Translator: Gora Mohanty <gora_mohanty@yahoo.co.in>\n"
"Language-Team: Oriya <gora_mohanty@yahoo.co.in>\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
-msgid "Could not establish connection to sound server"
-msgstr ""
-
-msgid "Failed to query sound server capabilities"
-msgstr ""
-
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
msgstr ""
"Project-Id-Version: gst-plugins-good 0.10.26.2\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-05-10 09:38+0100\n"
+"POT-Creation-Date: 2011-06-15 15:02+0100\n"
"PO-Revision-Date: 2011-01-07 21:53+0100\n"
"Last-Translator: Jakub Bogusz <qboosh@pld-linux.org>\n"
"Language-Team: Polish <translation-team-pl@lists.sourceforge.net>\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
-msgid "Could not establish connection to sound server"
-msgstr "Nie udało się nawiązać połączenia z serwerem dźwięku"
-
-msgid "Failed to query sound server capabilities"
-msgstr "Nie udało się odpytać o możliwości serwera dźwięku"
-
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
msgid "Cannot operate without a clock"
msgstr "Nie można pracować bez zegara"
+
+#~ msgid "Could not establish connection to sound server"
+#~ msgstr "Nie udało się nawiązać połączenia z serwerem dźwięku"
+
+#~ msgid "Failed to query sound server capabilities"
+#~ msgstr "Nie udało się odpytać o możliwości serwera dźwięku"
msgstr ""
"Project-Id-Version: gst-plugins-good 0.10.26.2\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-05-10 09:38+0100\n"
+"POT-Creation-Date: 2011-06-15 15:02+0100\n"
"PO-Revision-Date: 2011-01-08 01:28-0300\n"
"Last-Translator: Fabrício Godoy <skarllot@gmail.com>\n"
"Language-Team: Brazilian Portuguese <ldp-br@bazar.conectiva.com.br>\n"
"Content-Transfer-Encoding: 8bit\n"
"Plural-Forms: nplurals=2; plural=(n > 1);\n"
-msgid "Could not establish connection to sound server"
-msgstr "Não foi possível estabelecer uma conexão com servidor de som"
-
-msgid "Failed to query sound server capabilities"
-msgstr "Falha ao examinar os recursos do servidor de som"
-
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
msgid "Cannot operate without a clock"
msgstr "Não é possível operar sem um temporizador"
+
+#~ msgid "Could not establish connection to sound server"
+#~ msgstr "Não foi possível estabelecer uma conexão com servidor de som"
+
+#~ msgid "Failed to query sound server capabilities"
+#~ msgstr "Falha ao examinar os recursos do servidor de som"
msgstr ""
"Project-Id-Version: gst-plugins-good 0.10.23.2\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-05-10 09:38+0100\n"
+"POT-Creation-Date: 2011-06-15 15:02+0100\n"
"PO-Revision-Date: 2010-08-16 03:22+0300\n"
"Last-Translator: Lucian Adrian Grijincu <lucian.grijincu@gmail.com>\n"
"Language-Team: Romanian <translation-team-ro@lists.sourceforge.net>\n"
"X-Generator: Virtaal 0.6.1\n"
"X-Launchpad-Export-Date: 2010-08-16 00:08+0000\n"
-msgid "Could not establish connection to sound server"
-msgstr "Nu se poate stabili o conexiune la serverul de sunet"
-
-msgid "Failed to query sound server capabilities"
-msgstr "Interogarea capabilităților serverului de sunet a eșuat"
-
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
msgid "Cannot operate without a clock"
msgstr "Nu se poate opera fără un ceas"
+
+#~ msgid "Could not establish connection to sound server"
+#~ msgstr "Nu se poate stabili o conexiune la serverul de sunet"
+
+#~ msgid "Failed to query sound server capabilities"
+#~ msgstr "Interogarea capabilităților serverului de sunet a eșuat"
msgstr ""
"Project-Id-Version: gst-plugins-good 0.10.28.2\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-05-10 09:38+0100\n"
+"POT-Creation-Date: 2011-06-15 15:02+0100\n"
"PO-Revision-Date: 2011-04-26 21:13+0400\n"
"Last-Translator: Yuri Kozlov <yuray@komyakino.ru>\n"
"Language-Team: Russian <gnu@mx.ru>\n"
"Plural-Forms: nplurals=3; plural=(n%10==1 && n%100!=11 ? 0 : n%10>=2 && n"
"%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2);\n"
-msgid "Could not establish connection to sound server"
-msgstr "Не удалось установить соединение с сервером звука"
-
-msgid "Failed to query sound server capabilities"
-msgstr "Ошибка при запросе возможностей сервера звука"
-
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
msgid "Cannot operate without a clock"
msgstr "Операция невозможна без часов"
+#~ msgid "Could not establish connection to sound server"
+#~ msgstr "Не удалось установить соединение с сервером звука"
+
+#~ msgid "Failed to query sound server capabilities"
+#~ msgstr "Ошибка при запросе возможностей сервера звука"
+
#~ msgid "Failed to enumerate possible video formats device '%s' can work with"
#~ msgstr ""
#~ "Не удалось создать список видео-форматов, с которыми может работать "
msgstr ""
"Project-Id-Version: gst-plugins-good 0.10.25.3\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-05-10 09:38+0100\n"
+"POT-Creation-Date: 2011-06-15 15:02+0100\n"
"PO-Revision-Date: 2010-11-08 15:48+0100\n"
"Last-Translator: Peter Tuhársky <tuharsky@misbb.sk>\n"
"Language-Team: Slovak <sk-i18n@lists.linux.sk>\n"
"X-Generator: KBabel 1.11.4\n"
"X-Poedit-Country: SLOVAKIA\n"
-msgid "Could not establish connection to sound server"
-msgstr "Nepodarilo sa nadviazať spojenie so zvukovým serverom"
-
-msgid "Failed to query sound server capabilities"
-msgstr "Nepodarilo sa zistiť schopnosti zvukového servera"
-
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
msgid "Cannot operate without a clock"
msgstr "Nemôžem fungovať bez hodín"
+#~ msgid "Could not establish connection to sound server"
+#~ msgstr "Nepodarilo sa nadviazať spojenie so zvukovým serverom"
+
+#~ msgid "Failed to query sound server capabilities"
+#~ msgstr "Nepodarilo sa zistiť schopnosti zvukového servera"
+
#~ msgid "Failed getting controls attributes on device '%s.'"
#~ msgstr "Nepodarilo sa získať atribúty ovládacích prvkov na zariadení '%s.'"
msgstr ""
"Project-Id-Version: gst-plugins-good 0.10.26.2\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-05-10 09:38+0100\n"
+"POT-Creation-Date: 2011-06-15 15:02+0100\n"
"PO-Revision-Date: 2011-03-12 18:35+0100\n"
"Last-Translator: Klemen Košir <klemen.kosir@gmx.com>\n"
"Language-Team: Slovenian <translation-team-sl@lists.sourceforge.net>\n"
"X-Poedit-Country: SLOVENIA\n"
"X-Poedit-SourceCharset: utf-8\n"
-msgid "Could not establish connection to sound server"
-msgstr "Povezave z zvočnim strežnikom ni mogoče vzpostaviti"
-
-msgid "Failed to query sound server capabilities"
-msgstr "Poizvedba o zmogljivosti zvočnega strežnika je spodletela"
-
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
msgid "Cannot operate without a clock"
msgstr "Delovanje brez ure ni mogoče"
+#~ msgid "Could not establish connection to sound server"
+#~ msgstr "Povezave z zvočnim strežnikom ni mogoče vzpostaviti"
+
+#~ msgid "Failed to query sound server capabilities"
+#~ msgstr "Poizvedba o zmogljivosti zvočnega strežnika je spodletela"
+
#~ msgid "Describes the selected input element."
#~ msgstr "Opisuje izbran vnosni predmet."
msgstr ""
"Project-Id-Version: gst-plugins 0.8.3\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-05-10 09:38+0100\n"
+"POT-Creation-Date: 2011-06-15 15:02+0100\n"
"PO-Revision-Date: 2004-08-07 20:29+0200\n"
"Last-Translator: Laurent Dhima <laurenti@alblinux.net>\n"
"Language-Team: Albanian <begraj@hotmail.com>\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
-msgid "Could not establish connection to sound server"
-msgstr ""
-
-msgid "Failed to query sound server capabilities"
-msgstr ""
-
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
msgstr ""
"Project-Id-Version: gst-plugins 0.7.6\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-05-10 09:38+0100\n"
+"POT-Creation-Date: 2011-06-15 15:02+0100\n"
"PO-Revision-Date: 2004-03-13 00:18+0100\n"
"Last-Translator: Danilo Segan <dsegan@gmx.net>\n"
"Language-Team: Serbian <gnu@prevod.org>\n"
"Plural-Forms: nplurals=3; plural=n%10==1 && n%100!=11 ? 0 : (n%10>=2 && n"
"%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2);\n"
-msgid "Could not establish connection to sound server"
-msgstr ""
-
-msgid "Failed to query sound server capabilities"
-msgstr ""
-
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
msgstr ""
"Project-Id-Version: gst-plugins-good 0.10.26.2\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-05-10 09:38+0100\n"
+"POT-Creation-Date: 2011-06-15 15:02+0100\n"
"PO-Revision-Date: 2011-01-09 19:36+0100\n"
"Last-Translator: Daniel Nylander <po@danielnylander.se>\n"
"Language-Team: Swedish <tp-sv@listor.tp-sv.se>\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
-msgid "Could not establish connection to sound server"
-msgstr "Kunde inte etablera en anslutning till ljudservern"
-
-msgid "Failed to query sound server capabilities"
-msgstr "Misslyckades med att fråga efter ljudserverförmågor"
-
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
msgid "Cannot operate without a clock"
msgstr "Kan inte fungera utan en klocka"
+#~ msgid "Could not establish connection to sound server"
+#~ msgstr "Kunde inte etablera en anslutning till ljudservern"
+
+#~ msgid "Failed to query sound server capabilities"
+#~ msgstr "Misslyckades med att fråga efter ljudserverförmågor"
+
#~ msgid "Failed to enumerate possible video formats device '%s' can work with"
#~ msgstr ""
#~ "Misslyckades med att räkna upp möjliga videoformat som enheten \"%s\" kan "
msgstr ""
"Project-Id-Version: gst-plugins-good 0.10.26.2\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-05-10 09:38+0100\n"
+"POT-Creation-Date: 2011-06-15 15:02+0100\n"
"PO-Revision-Date: 2011-01-08 00:03+0200\n"
"Last-Translator: Server Acim <serveracim@gmail.com>\n"
"Language-Team: Turkish <gnu-tr-u12a@lists.sourceforge.net>\n"
"Content-Type: text/plain; charset=utf-8\n"
"Content-Transfer-Encoding: 8bit\n"
-msgid "Could not establish connection to sound server"
-msgstr "Ses sunucusuyla bağlantı kurulumayor"
-
-msgid "Failed to query sound server capabilities"
-msgstr "Ses sunucusu olanakları sorgulanamadı"
-
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
msgid "Cannot operate without a clock"
msgstr "Saat olmadan çalışamaz"
+#~ msgid "Could not establish connection to sound server"
+#~ msgstr "Ses sunucusuyla bağlantı kurulumayor"
+
+#~ msgid "Failed to query sound server capabilities"
+#~ msgstr "Ses sunucusu olanakları sorgulanamadı"
+
#~ msgid "Failed to enumerate possible video formats device '%s' can work with"
#~ msgstr ""
#~ "Aygıtın '%s' birlikte çalışabileceği vidyo kiplerini sıralamada hata"
msgstr ""
"Project-Id-Version: gst-plugins-good 0.10.28.2\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-05-10 09:38+0100\n"
+"POT-Creation-Date: 2011-06-15 15:02+0100\n"
"PO-Revision-Date: 2011-05-01 11:55+0300\n"
"Last-Translator: Yuri Chornoivan <yurchor@ukr.net>\n"
"Language-Team: Ukrainian <translation-team-uk@lists.sourceforge.net>\n"
"X-Generator: Lokalize 1.2\n"
"Plural-Forms: nplurals=1; plural=0;\n"
-msgid "Could not establish connection to sound server"
-msgstr "Не вдалося встановити з'єднання із звуковим сервером."
-
-msgid "Failed to query sound server capabilities"
-msgstr "Помилка при запиті можливостей звукового сервера"
-
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
msgid "Cannot operate without a clock"
msgstr "Робота без годинника неможлива"
+#~ msgid "Could not establish connection to sound server"
+#~ msgstr "Не вдалося встановити з'єднання із звуковим сервером."
+
+#~ msgid "Failed to query sound server capabilities"
+#~ msgstr "Помилка при запиті можливостей звукового сервера"
+
#~ msgid "Could not read from CD."
#~ msgstr "Не вдається прочитати з компакт-диску."
msgstr ""
"Project-Id-Version: gst-plugins-good 0.10.23.2\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-05-10 09:38+0100\n"
+"POT-Creation-Date: 2011-06-15 15:02+0100\n"
"PO-Revision-Date: 2010-10-03 18:48+1030\n"
"Last-Translator: Clytie Siddall <clytie@riverland.net.au>\n"
"Language-Team: Vietnamese <vi-VN@googlegroups.com>\n"
"Plural-Forms: nplurals=1; plural=0;\n"
"X-Generator: LocFactoryEditor 1.8\n"
-msgid "Could not establish connection to sound server"
-msgstr "Không thể thiết lập sự kết nối tới máy phục vụ âm thanh"
-
-msgid "Failed to query sound server capabilities"
-msgstr "Lỗi truy vấn khả năng của máy phục vụ âm thanh"
-
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
msgid "Cannot operate without a clock"
msgstr "Không thể thao tác khi không có đồng hồ."
+
+#~ msgid "Could not establish connection to sound server"
+#~ msgstr "Không thể thiết lập sự kết nối tới máy phục vụ âm thanh"
+
+#~ msgid "Failed to query sound server capabilities"
+#~ msgstr "Lỗi truy vấn khả năng của máy phục vụ âm thanh"
msgstr ""
"Project-Id-Version: gst-plugins-good 0.10.16.2\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-05-10 09:38+0100\n"
+"POT-Creation-Date: 2011-06-15 15:02+0100\n"
"PO-Revision-Date: 2009-11-13 22:20+0800\n"
"Last-Translator: Ji ZhengYu <zhengyuji@gmail.com>\n"
"Language-Team: Chinese (simplified) <i18n-zh@googlegroups.com>\n"
"Content-Type: text/plain; charset=utf-8\n"
"Content-Transfer-Encoding: 8bit\n"
-msgid "Could not establish connection to sound server"
-msgstr "无法建立与音频服务器的连接"
-
-msgid "Failed to query sound server capabilities"
-msgstr "查寻音频服务器的服务失败"
-
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
msgid "Cannot operate without a clock"
msgstr "没有时钟的话无法操作"
+#~ msgid "Could not establish connection to sound server"
+#~ msgstr "无法建立与音频服务器的连接"
+
+#~ msgid "Failed to query sound server capabilities"
+#~ msgstr "查寻音频服务器的服务失败"
+
#~ msgid "Failed to enumerate possible video formats device '%s' can work with"
#~ msgstr "枚举设备‘%s’可能支持的视频格式时出错"
msgstr ""
"Project-Id-Version: gst-plugins-good-0.10.2 0.10.2\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-05-10 09:38+0100\n"
+"POT-Creation-Date: 2011-06-15 15:02+0100\n"
"PO-Revision-Date: 2006-08-29 01:08+0800\n"
"Last-Translator: Abel Cheung <abelcheung@gmail.com>\n"
"Language-Team: Chinese (Hong Kong) <community@linuxhall.org>\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
-msgid "Could not establish connection to sound server"
-msgstr ""
-
-msgid "Failed to query sound server capabilities"
-msgstr ""
-
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
msgstr ""
"Project-Id-Version: gst-plugins-good-0.10.2 0.10.2\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2011-05-10 09:38+0100\n"
+"POT-Creation-Date: 2011-06-15 15:02+0100\n"
"PO-Revision-Date: 2006-08-29 01:08+0800\n"
"Last-Translator: Abel Cheung <abelcheung@gmail.com>\n"
"Language-Team: Chinese (traditional) <zh-l10n@linux.org.tw>\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
-msgid "Could not establish connection to sound server"
-msgstr ""
-
-msgid "Failed to query sound server capabilities"
-msgstr ""
-
#. TRANSLATORS: 'song title' by 'artist name'
#, c-format
msgid "'%s' by '%s'"
}
ret = gst_oss4_mixer_set_control_val (s->mixer, s->mc, volume);
} else {
- ret = gst_oss4_mixer_set_control_val (s->mixer, s->mc->mute, ! !mute);
+ ret = gst_oss4_mixer_set_control_val (s->mixer, s->mc->mute, !!mute);
}
if (mute) {
if (s->mc->mute != NULL && s->mc->mute->changed) {
gst_mixer_mute_toggled (GST_MIXER (s->mixer), track,
- ! !s->mc->mute->last_val);
+ !!s->mc->mute->last_val);
} else {
/* nothing to do here, since we don't/can't easily implement the record
* flag */
GST_DEBUG_CATEGORY_EXTERN (v4l2_debug);
#define GST_CAT_DEFAULT v4l2_debug
-
/*
* GstV4l2Buffer:
*/
-
-static GstBufferClass *v4l2buffer_parent_class = NULL;
+const GstMetaInfo *
+gst_meta_v4l2_get_info (void)
+{
+ static const GstMetaInfo *meta_info = NULL;
+
+ if (meta_info == NULL) {
+ meta_info =
+ gst_meta_register ("GstMetaV4l2", "GstMetaV4l2",
+ sizeof (GstMetaV4l2), (GstMetaInitFunction) NULL,
+ (GstMetaFreeFunction) NULL, (GstMetaTransformFunction) NULL,
+ (GstMetaSerializeFunction) NULL, (GstMetaDeserializeFunction) NULL);
+ }
+ return meta_info;
+}
static void
-gst_v4l2_buffer_finalize (GstV4l2Buffer * buffer)
+gst_v4l2_buffer_dispose (GstBuffer * buffer)
{
GstV4l2BufferPool *pool;
gboolean resuscitated = FALSE;
gint index;
+ GstMetaV4l2 *meta;
- pool = buffer->pool;
+ meta = GST_META_V4L2_GET (buffer);
+ g_assert (meta != NULL);
- index = buffer->vbuffer.index;
+ pool = meta->pool;
+ index = meta->vbuffer.index;
GST_LOG_OBJECT (pool->v4l2elem, "finalizing buffer %p %d", buffer, index);
if (resuscitated) {
/* FIXME: check that the caps didn't change */
GST_LOG_OBJECT (pool->v4l2elem, "reviving buffer %p, %d", buffer, index);
- gst_buffer_ref (GST_BUFFER (buffer));
+ gst_buffer_ref (buffer);
GST_BUFFER_SIZE (buffer) = 0;
pool->buffers[index] = buffer;
}
if (!resuscitated) {
GST_LOG_OBJECT (pool->v4l2elem,
"buffer %p (data %p, len %u) not recovered, unmapping",
- buffer, GST_BUFFER_DATA (buffer), buffer->vbuffer.length);
- gst_mini_object_unref (GST_MINI_OBJECT (pool));
- v4l2_munmap ((void *) GST_BUFFER_DATA (buffer), buffer->vbuffer.length);
+ buffer, GST_BUFFER_DATA (buffer), meta->vbuffer.length);
+ v4l2_munmap ((void *) GST_BUFFER_DATA (buffer), meta->vbuffer.length);
- GST_MINI_OBJECT_CLASS (v4l2buffer_parent_class)->finalize (GST_MINI_OBJECT
- (buffer));
+ g_object_unref (pool);
}
}
-static void
-gst_v4l2_buffer_class_init (gpointer g_class, gpointer class_data)
-{
- GstMiniObjectClass *mini_object_class = GST_MINI_OBJECT_CLASS (g_class);
-
- v4l2buffer_parent_class = g_type_class_peek_parent (g_class);
-
- mini_object_class->finalize = (GstMiniObjectFinalizeFunction)
- gst_v4l2_buffer_finalize;
-}
-
-GType
-gst_v4l2_buffer_get_type (void)
-{
- static GType _gst_v4l2_buffer_type;
-
- if (G_UNLIKELY (_gst_v4l2_buffer_type == 0)) {
- static const GTypeInfo v4l2_buffer_info = {
- sizeof (GstBufferClass),
- NULL,
- NULL,
- gst_v4l2_buffer_class_init,
- NULL,
- NULL,
- sizeof (GstV4l2Buffer),
- 0,
- NULL,
- NULL
- };
- _gst_v4l2_buffer_type = g_type_register_static (GST_TYPE_BUFFER,
- "GstV4l2Buffer", &v4l2_buffer_info, 0);
- }
- return _gst_v4l2_buffer_type;
-}
-
-static GstV4l2Buffer *
+static GstBuffer *
gst_v4l2_buffer_new (GstV4l2BufferPool * pool, guint index, GstCaps * caps)
{
- GstV4l2Buffer *ret;
- guint8 *data;
+ GstBuffer *ret;
+ guint8 *mem;
+ GstMetaV4l2 *meta;
+
+ ret = gst_buffer_new ();
+ GST_MINI_OBJECT_CAST (ret)->dispose =
+ (GstMiniObjectDisposeFunction) gst_v4l2_buffer_dispose;
- ret = (GstV4l2Buffer *) gst_mini_object_new (GST_TYPE_V4L2_BUFFER);
+ meta = GST_META_V4L2_ADD (ret);
GST_LOG_OBJECT (pool->v4l2elem, "creating buffer %u, %p in pool %p", index,
ret, pool);
- ret->pool =
- (GstV4l2BufferPool *) gst_mini_object_ref (GST_MINI_OBJECT (pool));
+ meta->pool = (GstV4l2BufferPool *) g_object_ref (pool);
- ret->vbuffer.index = index;
- ret->vbuffer.type = pool->type;
- ret->vbuffer.memory = V4L2_MEMORY_MMAP;
+ meta->vbuffer.index = index;
+ meta->vbuffer.type = pool->type;
+ meta->vbuffer.memory = V4L2_MEMORY_MMAP;
- if (v4l2_ioctl (pool->video_fd, VIDIOC_QUERYBUF, &ret->vbuffer) < 0)
+ if (v4l2_ioctl (pool->video_fd, VIDIOC_QUERYBUF, &meta->vbuffer) < 0)
goto querybuf_failed;
- GST_LOG_OBJECT (pool->v4l2elem, " index: %u", ret->vbuffer.index);
- GST_LOG_OBJECT (pool->v4l2elem, " type: %d", ret->vbuffer.type);
- GST_LOG_OBJECT (pool->v4l2elem, " bytesused: %u", ret->vbuffer.bytesused);
- GST_LOG_OBJECT (pool->v4l2elem, " flags: %08x", ret->vbuffer.flags);
- GST_LOG_OBJECT (pool->v4l2elem, " field: %d", ret->vbuffer.field);
- GST_LOG_OBJECT (pool->v4l2elem, " memory: %d", ret->vbuffer.memory);
- if (ret->vbuffer.memory == V4L2_MEMORY_MMAP)
+ GST_LOG_OBJECT (pool->v4l2elem, " index: %u", meta->vbuffer.index);
+ GST_LOG_OBJECT (pool->v4l2elem, " type: %d", meta->vbuffer.type);
+ GST_LOG_OBJECT (pool->v4l2elem, " bytesused: %u", meta->vbuffer.bytesused);
+ GST_LOG_OBJECT (pool->v4l2elem, " flags: %08x", meta->vbuffer.flags);
+ GST_LOG_OBJECT (pool->v4l2elem, " field: %d", meta->vbuffer.field);
+ GST_LOG_OBJECT (pool->v4l2elem, " memory: %d", meta->vbuffer.memory);
+ if (meta->vbuffer.memory == V4L2_MEMORY_MMAP)
GST_LOG_OBJECT (pool->v4l2elem, " MMAP offset: %u",
- ret->vbuffer.m.offset);
- GST_LOG_OBJECT (pool->v4l2elem, " length: %u", ret->vbuffer.length);
- GST_LOG_OBJECT (pool->v4l2elem, " input: %u", ret->vbuffer.input);
+ meta->vbuffer.m.offset);
+ GST_LOG_OBJECT (pool->v4l2elem, " length: %u", meta->vbuffer.length);
+ GST_LOG_OBJECT (pool->v4l2elem, " input: %u", meta->vbuffer.input);
- data = (guint8 *) v4l2_mmap (0, ret->vbuffer.length,
+ mem = (guint8 *) v4l2_mmap (0, meta->vbuffer.length,
PROT_READ | PROT_WRITE, MAP_SHARED, pool->video_fd,
- ret->vbuffer.m.offset);
+ meta->vbuffer.m.offset);
- if (data == MAP_FAILED)
+ if (mem == MAP_FAILED)
goto mmap_failed;
- GST_BUFFER_DATA (ret) = data;
- GST_BUFFER_SIZE (ret) = ret->vbuffer.length;
+ GST_BUFFER_DATA (ret) = mem;
+ GST_BUFFER_SIZE (ret) = meta->vbuffer.length;
GST_BUFFER_FLAG_SET (ret, GST_BUFFER_FLAG_READONLY);
- gst_buffer_set_caps (GST_BUFFER (ret), caps);
+ gst_buffer_set_caps (ret, caps);
return ret;
gint errnosave = errno;
GST_WARNING ("Failed QUERYBUF: %s", g_strerror (errnosave));
- gst_buffer_unref (GST_BUFFER (ret));
+ gst_buffer_unref (ret);
errno = errnosave;
return NULL;
}
gint errnosave = errno;
GST_WARNING ("Failed to mmap: %s", g_strerror (errnosave));
- gst_buffer_unref (GST_BUFFER (ret));
+ gst_buffer_unref (ret);
errno = errnosave;
return NULL;
}
* GstV4l2BufferPool:
*/
-static GstMiniObjectClass *buffer_pool_parent_class = NULL;
+static GObjectClass *buffer_pool_parent_class = NULL;
static void
-gst_v4l2_buffer_pool_finalize (GstV4l2BufferPool * pool)
+gst_v4l2_buffer_pool_finalize (GObject * object)
{
+ GstV4l2BufferPool *pool = GST_V4L2_BUFFER_POOL (object);
+
g_mutex_free (pool->lock);
pool->lock = NULL;
pool->buffers = NULL;
}
- GST_MINI_OBJECT_CLASS (buffer_pool_parent_class)->finalize (GST_MINI_OBJECT
- (pool));
+ buffer_pool_parent_class->finalize (object);
}
static void
static void
gst_v4l2_buffer_pool_class_init (gpointer g_class, gpointer class_data)
{
- GstMiniObjectClass *mini_object_class = GST_MINI_OBJECT_CLASS (g_class);
+ GObjectClass *object_class = G_OBJECT_CLASS (g_class);
buffer_pool_parent_class = g_type_class_peek_parent (g_class);
- mini_object_class->finalize = (GstMiniObjectFinalizeFunction)
- gst_v4l2_buffer_pool_finalize;
+ object_class->finalize = gst_v4l2_buffer_pool_finalize;
}
GType
if (G_UNLIKELY (_gst_v4l2_buffer_pool_type == 0)) {
static const GTypeInfo v4l2_buffer_pool_info = {
- sizeof (GstMiniObjectClass),
+ sizeof (GObjectClass),
NULL,
NULL,
gst_v4l2_buffer_pool_class_init,
(GInstanceInitFunc) gst_v4l2_buffer_pool_init,
NULL
};
- _gst_v4l2_buffer_pool_type = g_type_register_static (GST_TYPE_MINI_OBJECT,
+ _gst_v4l2_buffer_pool_type = g_type_register_static (G_TYPE_OBJECT,
"GstV4l2BufferPool", &v4l2_buffer_pool_info, 0);
}
return _gst_v4l2_buffer_pool_type;
gint n;
struct v4l2_requestbuffers breq;
- pool = (GstV4l2BufferPool *) gst_mini_object_new (GST_TYPE_V4L2_BUFFER_POOL);
+ pool = (GstV4l2BufferPool *) g_object_new (GST_TYPE_V4L2_BUFFER_POOL, NULL);
pool->video_fd = v4l2_dup (fd);
if (pool->video_fd < 0)
pool->requeuebuf = requeuebuf;
pool->type = type;
pool->buffer_count = num_buffers;
- pool->buffers = g_new0 (GstV4l2Buffer *, num_buffers);
+ pool->buffers = g_new0 (GstBuffer *, num_buffers);
pool->avail_buffers = g_async_queue_new ();
/* now, map the buffers: */
{
gint errnosave = errno;
- gst_mini_object_unref (GST_MINI_OBJECT (pool));
+ g_object_unref (pool);
errno = errnosave;
gst_buffer_unref (buf);
}
- gst_mini_object_unref (GST_MINI_OBJECT (pool));
+ g_object_unref (pool);
}
/**
*
* Get an available buffer in the pool
*/
-GstV4l2Buffer *
+GstBuffer *
gst_v4l2_buffer_pool_get (GstV4l2BufferPool * pool, gboolean blocking)
{
- GstV4l2Buffer *buf;
+ GstBuffer *buf;
if (blocking) {
buf = g_async_queue_pop (pool->avail_buffers);
}
if (buf) {
+ GstMetaV4l2 *meta = GST_META_V4L2_GET (buf);
+
GST_V4L2_BUFFER_POOL_LOCK (pool);
- GST_BUFFER_SIZE (buf) = buf->vbuffer.length;
+ GST_BUFFER_SIZE (buf) = meta->vbuffer.length;
GST_BUFFER_FLAG_UNSET (buf, 0xffffffff);
GST_V4L2_BUFFER_POOL_UNLOCK (pool);
}
* Returns: %TRUE for success
*/
gboolean
-gst_v4l2_buffer_pool_qbuf (GstV4l2BufferPool * pool, GstV4l2Buffer * buf)
+gst_v4l2_buffer_pool_qbuf (GstV4l2BufferPool * pool, GstBuffer * buf)
{
- GST_LOG_OBJECT (pool->v4l2elem, "enqueue pool buffer %d", buf->vbuffer.index);
+ GstMetaV4l2 *meta;
- if (v4l2_ioctl (pool->video_fd, VIDIOC_QBUF, &buf->vbuffer) < 0)
+ meta = GST_META_V4L2_GET (buf);
+
+ GST_LOG_OBJECT (pool->v4l2elem, "enqueue pool buffer %d",
+ meta->vbuffer.index);
+
+ if (v4l2_ioctl (pool->video_fd, VIDIOC_QBUF, &meta->vbuffer) < 0)
return FALSE;
pool->num_live_buffers--;
*
* Returns: a buffer
*/
-GstV4l2Buffer *
+GstBuffer *
gst_v4l2_buffer_pool_dqbuf (GstV4l2BufferPool * pool)
{
GstV4l2Object *v4l2object = get_v4l2_object (pool->v4l2elem);
- GstV4l2Buffer *pool_buffer;
+ GstBuffer *pool_buffer;
struct v4l2_buffer buffer;
memset (&buffer, 0x00, sizeof (buffer));
buffer.type = pool->type;
buffer.memory = V4L2_MEMORY_MMAP;
-
if (v4l2_ioctl (pool->video_fd, VIDIOC_DQBUF, &buffer) >= 0) {
GST_V4L2_BUFFER_POOL_LOCK (pool);
typedef struct _GstV4l2BufferPool GstV4l2BufferPool;
-typedef struct _GstV4l2Buffer GstV4l2Buffer;
+typedef struct _GstMetaV4l2 GstMetaV4l2;
struct _GstV4l2BufferPool
{
- GstMiniObject parent;
+ GObject parent;
GstElement *v4l2elem; /* the v4l2 src/sink that owns us.. maybe we should be owned by v4l2object? */
gboolean requeuebuf; /* if true, unusued buffers are automatically re-QBUF'd */
GAsyncQueue* avail_buffers;/* pool of available buffers, not with the driver and which aren't held outside the bufferpool */
gint video_fd; /* a dup(2) of the v4l2object's video_fd */
guint buffer_count;
- GstV4l2Buffer **buffers;
+ GstBuffer **buffers;
};
-struct _GstV4l2Buffer {
- GstBuffer buffer;
+struct _GstMetaV4l2 {
+ GstMeta meta;
struct v4l2_buffer vbuffer;
GstV4l2BufferPool *pool;
};
+const GstMetaInfo * gst_meta_v4l2_get_info (void);
+#define GST_META_V4L2_GET(buf) ((GstMetaV4l2 *)gst_buffer_get_meta(buf,gst_meta_v4l2_get_info()))
+#define GST_META_V4L2_ADD(buf) ((GstMetaV4l2 *)gst_buffer_add_meta(buf,gst_meta_v4l2_get_info(),NULL))
+
void gst_v4l2_buffer_pool_destroy (GstV4l2BufferPool * pool);
GstV4l2BufferPool *gst_v4l2_buffer_pool_new (GstElement *v4l2elem, gint fd, gint num_buffers, GstCaps * caps, gboolean requeuebuf, enum v4l2_buf_type type);
-GstV4l2Buffer *gst_v4l2_buffer_pool_get (GstV4l2BufferPool *pool, gboolean blocking);
-gboolean gst_v4l2_buffer_pool_qbuf (GstV4l2BufferPool *pool, GstV4l2Buffer *buf);
-GstV4l2Buffer *gst_v4l2_buffer_pool_dqbuf (GstV4l2BufferPool *pool);
+GstBuffer *gst_v4l2_buffer_pool_get (GstV4l2BufferPool *pool, gboolean blocking);
+gboolean gst_v4l2_buffer_pool_qbuf (GstV4l2BufferPool *pool, GstBuffer *buf);
+GstBuffer *gst_v4l2_buffer_pool_dqbuf (GstV4l2BufferPool *pool);
gint gst_v4l2_buffer_pool_available_buffers (GstV4l2BufferPool *pool);
V4L2_STD_OBJECT_PROPS,
};
+G_LOCK_DEFINE_STATIC (probe_lock);
+
const GList *
gst_v4l2_probe_get_properties (GstPropertyProbe * probe)
{
GObjectClass *klass = G_OBJECT_GET_CLASS (probe);
static GList *list = NULL;
- /* well, not perfect, but better than no locking at all.
- * In the worst case we leak a list node, so who cares? */
- GST_CLASS_LOCK (GST_OBJECT_CLASS (klass));
+ G_LOCK (probe_lock);
if (!list) {
list = g_list_append (NULL, g_object_class_find_property (klass, "device"));
}
- GST_CLASS_UNLOCK (GST_OBJECT_CLASS (klass));
+ G_UNLOCK (probe_lock);
return list;
}
g_assert (iface_type == GST_TYPE_X_OVERLAY ||
iface_type == GST_TYPE_NAVIGATION ||
iface_type == GST_TYPE_COLOR_BALANCE ||
- iface_type == GST_TYPE_VIDEO_ORIENTATION ||
- iface_type == GST_TYPE_TUNER);
+ iface_type == GST_TYPE_VIDEO_ORIENTATION || iface_type == GST_TYPE_TUNER);
#else
g_assert (iface_type == GST_TYPE_COLOR_BALANCE ||
- iface_type == GST_TYPE_VIDEO_ORIENTATION ||
- iface_type == GST_TYPE_TUNER);
+ iface_type == GST_TYPE_VIDEO_ORIENTATION || iface_type == GST_TYPE_TUNER);
#endif
if (v4l2object->video_fd == -1)
gst_v4l2src_buffer_pool_activate (GstV4l2BufferPool * pool,
GstV4l2Src * v4l2src)
{
- GstV4l2Buffer *buf;
+ GstBuffer *buf;
while ((buf = gst_v4l2_buffer_pool_get (pool, FALSE)) != NULL)
if (!gst_v4l2_buffer_pool_qbuf (pool, buf))
(_("Could not enqueue buffers in device '%s'."),
v4l2src->v4l2object->videodev),
("enqueing buffer %d/%d failed: %s",
- buf->vbuffer.index, v4l2src->num_buffers, g_strerror (errno)));
+ GST_META_V4L2_GET (buf)->vbuffer.index, v4l2src->num_buffers,
+ g_strerror (errno)));
return FALSE;
}
}
/* Called when a buffer is returned from the pipeline */
static void
-gst_ximage_src_return_buf (GstXImageSrc * ximagesrc,
- GstXImageSrcBuffer * ximage)
+gst_ximage_src_return_buf (GstXImageSrc * ximagesrc, GstBuffer * ximage)
{
+ GstMetaXImage *meta = GST_META_XIMAGE_GET (ximage);
+
/* If our geometry changed we can't reuse that image. */
- if ((ximage->width != ximagesrc->width) ||
- (ximage->height != ximagesrc->height)) {
+ if ((meta->width != ximagesrc->width) || (meta->height != ximagesrc->height)) {
GST_DEBUG_OBJECT (ximagesrc,
"destroy image %p as its size changed %dx%d vs current %dx%d",
- ximage, ximage->width, ximage->height,
- ximagesrc->width, ximagesrc->height);
+ ximage, meta->width, meta->height, ximagesrc->width, ximagesrc->height);
g_mutex_lock (ximagesrc->x_lock);
gst_ximageutil_ximage_destroy (ximagesrc->xcontext, ximage);
g_mutex_unlock (ximagesrc->x_lock);
/* In that case we can reuse the image and add it to our image pool. */
GST_LOG_OBJECT (ximagesrc, "recycling image %p in pool", ximage);
/* need to increment the refcount again to recycle */
- gst_buffer_ref (GST_BUFFER (ximage));
+ gst_buffer_ref (ximage);
g_mutex_lock (ximagesrc->pool_lock);
ximagesrc->buffer_pool = g_slist_prepend (ximagesrc->buffer_pool, ximage);
g_mutex_unlock (ximagesrc->pool_lock);
/* Retrieve an XImageSrcBuffer, preferably from our
* pool of existing images and populate it from the window */
-static GstXImageSrcBuffer *
+static GstBuffer *
gst_ximage_src_ximage_get (GstXImageSrc * ximagesrc)
{
- GstXImageSrcBuffer *ximage = NULL;
+ GstBuffer *ximage = NULL;
+ GstMetaXImage *meta;
g_mutex_lock (ximagesrc->pool_lock);
while (ximagesrc->buffer_pool != NULL) {
ximage = ximagesrc->buffer_pool->data;
- if ((ximage->width != ximagesrc->width) ||
- (ximage->height != ximagesrc->height)) {
+ meta = GST_META_XIMAGE_GET (ximage);
+
+ if ((meta->width != ximagesrc->width) ||
+ (meta->height != ximagesrc->height)) {
gst_ximage_buffer_free (ximage);
}
gst_value_get_fraction_numerator (xcontext->par),
gst_value_get_fraction_denominator (xcontext->par), NULL);
- gst_buffer_set_caps (GST_BUFFER (ximage), caps);
+ gst_buffer_set_caps (ximage, caps);
g_mutex_unlock (ximagesrc->x_lock);
gst_caps_unref (caps);
}
g_return_val_if_fail (GST_IS_XIMAGE_SRC (ximagesrc), NULL);
+
+ meta = GST_META_XIMAGE_GET (ximage);
+
#ifdef HAVE_XDAMAGE
if (ximagesrc->have_xdamage && ximagesrc->use_damage &&
ximagesrc->last_ximage != NULL) {
startx, starty, width, height);
XGetSubImage (ximagesrc->xcontext->disp, ximagesrc->xwindow,
startx, starty, width, height, AllPlanes, ZPixmap,
- ximage->ximage, startx - ximagesrc->startx,
+ meta->ximage, startx - ximagesrc->startx,
starty - ximagesrc->starty);
}
} else {
XGetSubImage (ximagesrc->xcontext->disp, ximagesrc->xwindow,
rects[i].x, rects[i].y,
rects[i].width, rects[i].height,
- AllPlanes, ZPixmap, ximage->ximage, rects[i].x, rects[i].y);
+ AllPlanes, ZPixmap, meta->ximage, rects[i].x, rects[i].y);
}
}
free (rects);
GST_DEBUG_OBJECT (ximagesrc, "Removing cursor from %d,%d", x, y);
XGetSubImage (ximagesrc->xcontext->disp, ximagesrc->xwindow,
startx, starty, iwidth, iheight, AllPlanes, ZPixmap,
- ximage->ximage, startx - ximagesrc->startx,
+ meta->ximage, startx - ximagesrc->startx,
starty - ximagesrc->starty);
}
} else {
GST_DEBUG_OBJECT (ximagesrc, "Removing cursor from %d,%d", x, y);
XGetSubImage (ximagesrc->xcontext->disp, ximagesrc->xwindow,
- x, y, width, height, AllPlanes, ZPixmap, ximage->ximage, x, y);
+ x, y, width, height, AllPlanes, ZPixmap, meta->ximage, x, y);
}
}
#endif
if (ximagesrc->xcontext->use_xshm) {
GST_DEBUG_OBJECT (ximagesrc, "Retrieving screen using XShm");
XShmGetImage (ximagesrc->xcontext->disp, ximagesrc->xwindow,
- ximage->ximage, ximagesrc->startx, ximagesrc->starty, AllPlanes);
+ meta->ximage, ximagesrc->startx, ximagesrc->starty, AllPlanes);
} else
#endif /* HAVE_XSHM */
if (ximagesrc->remote) {
XGetSubImage (ximagesrc->xcontext->disp, ximagesrc->xwindow,
ximagesrc->startx, ximagesrc->starty, ximagesrc->width,
- ximagesrc->height, AllPlanes, ZPixmap, ximage->ximage, 0, 0);
+ ximagesrc->height, AllPlanes, ZPixmap, meta->ximage, 0, 0);
} else {
- ximage->ximage =
+ meta->ximage =
XGetImage (ximagesrc->xcontext->disp, ximagesrc->xwindow,
ximagesrc->startx, ximagesrc->starty, ximagesrc->width,
ximagesrc->height, AllPlanes, ZPixmap);
(guint8 *) & (ximagesrc->cursor_image->pixels[((j -
cy) * ximagesrc->cursor_image->width + (i - cx))]);
dest =
- (guint8 *) & (ximage->ximage->data[((j -
+ (guint8 *) & (meta->ximage->data[((j -
ximagesrc->starty) * ximagesrc->width + (i -
ximagesrc->startx)) * (ximagesrc->xcontext->bpp /
8)]);
gst_ximage_src_create (GstPushSrc * bs, GstBuffer ** buf)
{
GstXImageSrc *s = GST_XIMAGE_SRC (bs);
- GstXImageSrcBuffer *image;
+ GstBuffer *image;
GstClockTime base_time;
GstClockTime next_capture_ts;
GstClockTime dur;
{
g_mutex_lock (ximagesrc->pool_lock);
while (ximagesrc->buffer_pool != NULL) {
- GstXImageSrcBuffer *ximage = ximagesrc->buffer_pool->data;
+ GstBuffer *ximage = ximagesrc->buffer_pool->data;
gst_ximage_buffer_free (ximage);
int damage_event_base;
XserverRegion damage_region;
GC damage_copy_gc;
- GstXImageSrcBuffer *last_ximage;
+ GstBuffer *last_ximage;
#endif
};
#include "ximageutil.h"
+const GstMetaInfo *
+gst_meta_ximage_get_info (void)
+{
+ static const GstMetaInfo *meta_ximage_info = NULL;
+
+ if (meta_ximage_info == NULL) {
+ meta_ximage_info =
+ gst_meta_register ("GstMetaXImageSrc", "GstMetaXImageSrc",
+ sizeof (GstMetaXImage), (GstMetaInitFunction) NULL,
+ (GstMetaFreeFunction) NULL, (GstMetaTransformFunction) NULL,
+ (GstMetaSerializeFunction) NULL, (GstMetaDeserializeFunction) NULL);
+ }
+ return meta_ximage_info;
+}
+
#ifdef HAVE_XSHM
static gboolean error_caught = FALSE;
gst_value_get_fraction_denominator (xcontext->par));
}
-static GstBufferClass *ximagesrc_buffer_parent_class = NULL;
-
static void
-gst_ximagesrc_buffer_finalize (GstXImageSrcBuffer * ximage)
+gst_ximagesrc_buffer_dispose (GstBuffer * ximage)
{
GstElement *parent;
+ GstMetaXImage *meta;
g_return_if_fail (ximage != NULL);
- parent = ximage->parent;
+ meta = GST_META_XIMAGE_GET (ximage);
+
+ parent = meta->parent;
if (parent == NULL) {
g_warning ("XImageSrcBuffer->ximagesrc == NULL");
goto beach;
}
- if (ximage->return_func)
- ximage->return_func (parent, ximage);
+ if (meta->return_func)
+ meta->return_func (parent, ximage);
beach:
-
- GST_MINI_OBJECT_CLASS (ximagesrc_buffer_parent_class)->finalize
- (GST_MINI_OBJECT (ximage));
-
return;
}
void
-gst_ximage_buffer_free (GstXImageSrcBuffer * ximage)
+gst_ximage_buffer_free (GstBuffer * ximage)
{
- /* make sure it is not recycled */
- ximage->width = -1;
- ximage->height = -1;
- gst_buffer_unref (GST_BUFFER (ximage));
-}
+ GstMetaXImage *meta;
-static void
-gst_ximagesrc_buffer_init (GstXImageSrcBuffer * ximage_buffer, gpointer g_class)
-{
-#ifdef HAVE_XSHM
- ximage_buffer->SHMInfo.shmaddr = ((void *) -1);
- ximage_buffer->SHMInfo.shmid = -1;
-#endif
-}
-
-static void
-gst_ximagesrc_buffer_class_init (gpointer g_class, gpointer class_data)
-{
- GstMiniObjectClass *mini_object_class = GST_MINI_OBJECT_CLASS (g_class);
-
- ximagesrc_buffer_parent_class = g_type_class_peek_parent (g_class);
-
- mini_object_class->finalize = (GstMiniObjectFinalizeFunction)
- gst_ximagesrc_buffer_finalize;
-}
+ meta = GST_META_XIMAGE_GET (ximage);
-static GType
-gst_ximagesrc_buffer_get_type (void)
-{
- static GType _gst_ximagesrc_buffer_type;
-
- if (G_UNLIKELY (_gst_ximagesrc_buffer_type == 0)) {
- static const GTypeInfo ximagesrc_buffer_info = {
- sizeof (GstBufferClass),
- NULL,
- NULL,
- gst_ximagesrc_buffer_class_init,
- NULL,
- NULL,
- sizeof (GstXImageSrcBuffer),
- 0,
- (GInstanceInitFunc) gst_ximagesrc_buffer_init,
- NULL
- };
- _gst_ximagesrc_buffer_type = g_type_register_static (GST_TYPE_BUFFER,
- "GstXImageSrcBuffer", &ximagesrc_buffer_info, 0);
- }
- return _gst_ximagesrc_buffer_type;
+ /* make sure it is not recycled */
+ meta->width = -1;
+ meta->height = -1;
+ gst_buffer_unref (ximage);
}
/* This function handles GstXImageSrcBuffer creation depending on XShm availability */
-GstXImageSrcBuffer *
+GstBuffer *
gst_ximageutil_ximage_new (GstXContext * xcontext,
GstElement * parent, int width, int height, BufferReturnFunc return_func)
{
- GstXImageSrcBuffer *ximage = NULL;
+ GstBuffer *ximage = NULL;
+ GstMetaXImage *meta;
gboolean succeeded = FALSE;
- ximage =
- (GstXImageSrcBuffer *) gst_mini_object_new (GST_TYPE_XIMAGESRC_BUFFER);
+ ximage = gst_buffer_new ();
+ GST_MINI_OBJECT_CAST (ximage)->dispose =
+ (GstMiniObjectDisposeFunction) gst_ximagesrc_buffer_dispose;
- ximage->width = width;
- ximage->height = height;
+ meta = GST_META_XIMAGE_ADD (ximage);
+ meta->width = width;
+ meta->height = height;
#ifdef HAVE_XSHM
+ meta->SHMInfo.shmaddr = ((void *) -1);
+ meta->SHMInfo.shmid = -1;
+
if (xcontext->use_xshm) {
- ximage->ximage = XShmCreateImage (xcontext->disp,
+ meta->ximage = XShmCreateImage (xcontext->disp,
xcontext->visual, xcontext->depth,
- ZPixmap, NULL, &ximage->SHMInfo, ximage->width, ximage->height);
- if (!ximage->ximage) {
+ ZPixmap, NULL, &meta->SHMInfo, meta->width, meta->height);
+ if (!meta->ximage) {
GST_WARNING_OBJECT (parent,
- "could not XShmCreateImage a %dx%d image",
- ximage->width, ximage->height);
+ "could not XShmCreateImage a %dx%d image", meta->width, meta->height);
/* Retry without XShm */
xcontext->use_xshm = FALSE;
}
/* we have to use the returned bytes_per_line for our shm size */
- ximage->size = ximage->ximage->bytes_per_line * ximage->ximage->height;
- ximage->SHMInfo.shmid = shmget (IPC_PRIVATE, ximage->size,
- IPC_CREAT | 0777);
- if (ximage->SHMInfo.shmid == -1)
+ meta->size = meta->ximage->bytes_per_line * meta->ximage->height;
+ meta->SHMInfo.shmid = shmget (IPC_PRIVATE, meta->size, IPC_CREAT | 0777);
+ if (meta->SHMInfo.shmid == -1)
goto beach;
- ximage->SHMInfo.shmaddr = shmat (ximage->SHMInfo.shmid, 0, 0);
- if (ximage->SHMInfo.shmaddr == ((void *) -1))
+ meta->SHMInfo.shmaddr = shmat (meta->SHMInfo.shmid, 0, 0);
+ if (meta->SHMInfo.shmaddr == ((void *) -1))
goto beach;
/* Delete the SHM segment. It will actually go away automatically
* when we detach now */
- shmctl (ximage->SHMInfo.shmid, IPC_RMID, 0);
+ shmctl (meta->SHMInfo.shmid, IPC_RMID, 0);
- ximage->ximage->data = ximage->SHMInfo.shmaddr;
- ximage->SHMInfo.readOnly = FALSE;
+ meta->ximage->data = meta->SHMInfo.shmaddr;
+ meta->SHMInfo.readOnly = FALSE;
- if (XShmAttach (xcontext->disp, &ximage->SHMInfo) == 0)
+ if (XShmAttach (xcontext->disp, &meta->SHMInfo) == 0)
goto beach;
XSync (xcontext->disp, FALSE);
no_xshm:
#endif /* HAVE_XSHM */
{
- ximage->ximage = XCreateImage (xcontext->disp,
+ meta->ximage = XCreateImage (xcontext->disp,
xcontext->visual,
xcontext->depth,
- ZPixmap, 0, NULL, ximage->width, ximage->height, xcontext->bpp, 0);
- if (!ximage->ximage)
+ ZPixmap, 0, NULL, meta->width, meta->height, xcontext->bpp, 0);
+ if (!meta->ximage)
goto beach;
/* we have to use the returned bytes_per_line for our image size */
- ximage->size = ximage->ximage->bytes_per_line * ximage->ximage->height;
- ximage->ximage->data = g_malloc (ximage->size);
+ meta->size = meta->ximage->bytes_per_line * meta->ximage->height;
+ meta->ximage->data = g_malloc (meta->size);
XSync (xcontext->disp, FALSE);
}
succeeded = TRUE;
- GST_BUFFER_DATA (ximage) = (guchar *) ximage->ximage->data;
- GST_BUFFER_SIZE (ximage) = ximage->size;
+ GST_BUFFER_DATA (ximage) = (guchar *) meta->ximage->data;
+ GST_BUFFER_SIZE (ximage) = meta->size;
/* Keep a ref to our src */
- ximage->parent = gst_object_ref (parent);
- ximage->return_func = return_func;
+ meta->parent = gst_object_ref (parent);
+ meta->return_func = return_func;
beach:
if (!succeeded) {
gst_ximage_buffer_free (ximage);
/* This function destroys a GstXImageBuffer handling XShm availability */
void
-gst_ximageutil_ximage_destroy (GstXContext * xcontext,
- GstXImageSrcBuffer * ximage)
+gst_ximageutil_ximage_destroy (GstXContext * xcontext, GstBuffer * ximage)
{
+ GstMetaXImage *meta;
+
+ meta = GST_META_XIMAGE_GET (ximage);
+
/* We might have some buffers destroyed after changing state to NULL */
if (!xcontext)
goto beach;
#ifdef HAVE_XSHM
if (xcontext->use_xshm) {
- if (ximage->SHMInfo.shmaddr != ((void *) -1)) {
- XShmDetach (xcontext->disp, &ximage->SHMInfo);
+ if (meta->SHMInfo.shmaddr != ((void *) -1)) {
+ XShmDetach (xcontext->disp, &meta->SHMInfo);
XSync (xcontext->disp, 0);
- shmdt (ximage->SHMInfo.shmaddr);
+ shmdt (meta->SHMInfo.shmaddr);
}
- if (ximage->ximage)
- XDestroyImage (ximage->ximage);
+ if (meta->ximage)
+ XDestroyImage (meta->ximage);
} else
#endif /* HAVE_XSHM */
{
- if (ximage->ximage) {
- XDestroyImage (ximage->ximage);
+ if (meta->ximage) {
+ XDestroyImage (meta->ximage);
}
}
XSync (xcontext->disp, FALSE);
beach:
- if (ximage->parent) {
+ if (meta->parent) {
/* Release the ref to our parent */
- gst_object_unref (ximage->parent);
- ximage->parent = NULL;
+ gst_object_unref (meta->parent);
+ meta->parent = NULL;
}
return;
typedef struct _GstXContext GstXContext;
typedef struct _GstXWindow GstXWindow;
typedef struct _GstXImage GstXImage;
-typedef struct _GstXImageSrcBuffer GstXImageSrcBuffer;
+typedef struct _GstMetaXImage GstMetaXImage;
/* Global X Context stuff */
/**
/* custom ximagesrc buffer, copied from ximagesink */
/* BufferReturnFunc is called when a buffer is finalised */
-typedef void (*BufferReturnFunc) (GstElement *parent, GstXImageSrcBuffer *buf);
+typedef void (*BufferReturnFunc) (GstElement *parent, GstBuffer *buf);
/**
- * GstXImageSrcBuffer:
+ * GstMetaXImage:
* @parent: a reference to the element we belong to
* @ximage: the XImage of this buffer
* @width: the width in pixels of XImage @ximage
* @height: the height in pixels of XImage @ximage
* @size: the size in bytes of XImage @ximage
*
- * Subclass of #GstBuffer containing additional information about an XImage.
+ * Extra data attached to buffers containing additional information about an XImage.
*/
-struct _GstXImageSrcBuffer {
- GstBuffer buffer;
+struct _GstMetaXImage {
+ GstMeta meta;
/* Reference to the ximagesrc we belong to */
GstElement *parent;
gint width, height;
size_t size;
-
+
BufferReturnFunc return_func;
};
+const GstMetaInfo * gst_meta_ximage_get_info (void);
+#define GST_META_XIMAGE_GET(buf) ((GstMetaXImage *)gst_buffer_get_meta(buf,gst_meta_ximage_get_info()))
+#define GST_META_XIMAGE_ADD(buf) ((GstMetaXImage *)gst_buffer_add_meta(buf,gst_meta_ximage_get_info(),NULL))
-GstXImageSrcBuffer *gst_ximageutil_ximage_new (GstXContext *xcontext,
+GstBuffer *gst_ximageutil_ximage_new (GstXContext *xcontext,
GstElement *parent, int width, int height, BufferReturnFunc return_func);
void gst_ximageutil_ximage_destroy (GstXContext *xcontext,
- GstXImageSrcBuffer * ximage);
+ GstBuffer * ximage);
/* Call to manually release a buffer */
-void gst_ximage_buffer_free (GstXImageSrcBuffer *ximage);
-
-#define GST_TYPE_XIMAGESRC_BUFFER (gst_ximagesrc_buffer_get_type())
-#define GST_IS_XIMAGESRC_BUFFER(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_XIMAGESRC_BUFFER))
-#define GST_IS_XIMAGESRC_BUFFER_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_XIMAGESRC_BUFFER))
-#define GST_XIMAGESRC_BUFFER(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_XIMAGESRC_BUFFER, GstXImageSrcBuffer))
-#define GST_XIMAGESRC_BUFFER_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_XIMAGESRC_BUFFER, GstXImageSrcBufferClass))
-#define GST_XIMAGESRC_BUFFER_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_XIMAGESRC_BUFFER, GstXImageSrcBufferClass))
+void gst_ximage_buffer_free (GstBuffer *ximage);
G_END_DECLS
GST_PLUGIN_PATH=$(top_builddir)/gst:$(top_builddir)/ext:$(top_builddir)/sys:$(GSTPB_PLUGINS_DIR):$(GST_PLUGINS_DIR) \
GST_PLUGIN_LOADING_WHITELIST="gstreamer@$(GST_PLUGINS_DIR):gst-plugins-base@$(GSTPB_PLUGINS_DIR):gst-plugins-good@$(top_builddir)" \
GST_STATE_IGNORE_ELEMENTS="aasink autoaudiosrc autoaudiosink autovideosrc autovideosink \
- cacasink cairotextoverlay gconfaudiosrc gconfvideosrc gconfaudiosink gconfvideosink \
+ cacasink cairotextoverlay \
halaudiosrc halaudiosink jackaudiosrc jackaudiosink \
osssrc osssink osxaudiosink osxaudiosrc osxvideosrc osxvideosink \
pulsesink pulsesrc pulsemixer v4l2src"
noinst_PROGRAMS = pulse
pulse_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_BASE_CFLAGS) $(GST_CFLAGS)
-pulse_LDADD = $(GST_PLUGINS_BASE_LIBS) -lgstinterfaces-0.10 $(GST_BASE_LIBS) $(GST_LIBS)
+pulse_LDADD = $(GST_PLUGINS_BASE_LIBS) -lgstinterfaces-$(GST_MAJORMINOR) $(GST_BASE_LIBS) $(GST_LIBS)
probe_SOURCES = probe.c
probe_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_BASE_CFLAGS) $(GST_CFLAGS)
-probe_LDADD = $(GST_PLUGINS_BASE_LIBS) -lgstinterfaces-0.10 $(GST_BASE_LIBS) $(GST_LIBS)
+probe_LDADD = $(GST_PLUGINS_BASE_LIBS) -lgstinterfaces-$(GST_MAJORMINOR) $(GST_BASE_LIBS) $(GST_LIBS)
test_oss4_SOURCES = test-oss4.c
test_oss4_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_CFLAGS)
-test_oss4_LDADD = $(GST_PLUGINS_BASE_LIBS) -lgstinterfaces-0.10 $(GST_LIBS)
+test_oss4_LDADD = $(GST_PLUGINS_BASE_LIBS) -lgstinterfaces-$(GST_MAJORMINOR) $(GST_LIBS)
test_oss4_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
else
OSS4_TESTS=