dnl initialize autoconf
dnl releases only do -Wall, git and prerelease does -Werror too
dnl use a three digit version number for releases, and four for git/pre
-AC_INIT(GStreamer Good Plug-ins, 0.10.29.1,
+AC_INIT(GStreamer Good Plug-ins, 0.11.0.1,
http://bugzilla.gnome.org/enter_bug.cgi?product=GStreamer,
gst-plugins-good)
dnl our libraries and install dirs use major.minor as a version
GST_MAJORMINOR=$PACKAGE_VERSION_MAJOR.$PACKAGE_VERSION_MINOR
dnl we override it here if we need to for the release candidate of new series
-GST_MAJORMINOR=0.10
+GST_MAJORMINOR=0.11
AC_SUBST(GST_MAJORMINOR)
AG_GST_LIBTOOL_PREPARE
AM_PROG_LIBTOOL
dnl *** required versions of GStreamer stuff ***
-GST_REQ=0.10.33
-GSTPB_REQ=0.10.33
+GST_REQ=0.11.0
+GSTPB_REQ=0.11.0
dnl *** autotools stuff ****
-e "s/.* PACKAGE_STRING$/#define PACKAGE_STRING \"$PACKAGE_STRING\"/" \
-e 's/.* PACKAGE_TARNAME$/#define PACKAGE_TARNAME "'$PACKAGE_TARNAME'"/' \
-e 's/.* PACKAGE_VERSION$/#define PACKAGE_VERSION "'$PACKAGE_VERSION'"/' \
- -e 's/.* PLUGINDIR$/#ifdef _DEBUG\n# define PLUGINDIR PREFIX "\\\\debug\\\\lib\\\\gstreamer-0.10"\n#else\n# define PLUGINDIR PREFIX "\\\\lib\\\\gstreamer-0.10"\n#endif/' \
+ -e 's/.* PLUGINDIR$/#ifdef _DEBUG\n# define PLUGINDIR PREFIX "\\\\debug\\\\lib\\\\gstreamer-0.11"\n#else\n# define PLUGINDIR PREFIX "\\\\lib\\\\gstreamer-0.11"\n#endif/' \
-e 's/.* USE_BINARY_REGISTRY$/#define USE_BINARY_REGISTRY/' \
-e 's/.* VERSION$/#define VERSION "'$VERSION'"/' \
-e "s/.* DEFAULT_AUDIOSINK$/#define DEFAULT_AUDIOSINK \"directsoundsink\"/" \
static void gst_flac_dec_error_cb (const FLAC__StreamDecoder *
decoder, FLAC__StreamDecoderErrorStatus status, void *client_data);
-GST_BOILERPLATE (GstFlacDec, gst_flac_dec, GstElement, GST_TYPE_ELEMENT);
+#define gst_flac_dec_parent_class parent_class
+G_DEFINE_TYPE (GstFlacDec, gst_flac_dec, GST_TYPE_ELEMENT);
/* FIXME 0.11: Use width=32 for all depths and let audioconvert
* handle the conversions instead of doing it ourself.
);
static void
-gst_flac_dec_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&flac_dec_src_factory));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&flac_dec_sink_factory));
- gst_element_class_set_details_simple (element_class, "FLAC audio decoder",
- "Codec/Decoder/Audio",
- "Decodes FLAC lossless audio streams", "Wim Taymans <wim@fluendo.com>");
-
- GST_DEBUG_CATEGORY_INIT (flacdec_debug, "flacdec", 0, "flac decoder");
-}
-
-static void
gst_flac_dec_class_init (GstFlacDecClass * klass)
{
GstElementClass *gstelement_class;
gstelement_class = (GstElementClass *) klass;
gobject_class = (GObjectClass *) klass;
+ GST_DEBUG_CATEGORY_INIT (flacdec_debug, "flacdec", 0, "flac decoder");
+
gobject_class->finalize = gst_flac_dec_finalize;
gstelement_class->change_state =
GST_DEBUG_FUNCPTR (gst_flac_dec_change_state);
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&flac_dec_src_factory));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&flac_dec_sink_factory));
+
+ gst_element_class_set_details_simple (gstelement_class, "FLAC audio decoder",
+ "Codec/Decoder/Audio",
+ "Decodes FLAC lossless audio streams", "Wim Taymans <wim@fluendo.com>");
}
static void
-gst_flac_dec_init (GstFlacDec * flacdec, GstFlacDecClass * klass)
+gst_flac_dec_init (GstFlacDec * flacdec)
{
flacdec->sinkpad =
gst_pad_new_from_static_template (&flac_dec_sink_factory, "sink");
while (offset >= MAX (SCANBLOCK_SIZE / 2, file_size / 2)) {
GstFlowReturn flow;
GstBuffer *buf = NULL;
- guint8 *data;
- guint size;
+ guint8 *data, *ptr;
+ gsize size, left;
/* divide by 2 = not very sophisticated way to deal with overlapping */
offset -= SCANBLOCK_SIZE / 2;
return;
}
- size = GST_BUFFER_SIZE (buf);
- data = GST_BUFFER_DATA (buf);
+ data = gst_buffer_map (buf, &size, NULL, GST_MAP_READ);
+
+ ptr = data;
+ left = size;
- while (size > 16) {
- if (gst_flac_dec_scan_got_frame (flacdec, data, size, samples)) {
+ while (left > 16) {
+ if (gst_flac_dec_scan_got_frame (flacdec, ptr, left, samples)) {
GST_DEBUG_OBJECT (flacdec, "frame sync at offset %" G_GINT64_FORMAT,
- offset + GST_BUFFER_SIZE (buf) - size);
+ offset + size - left);
+ gst_buffer_unmap (buf, data, size);
gst_buffer_unref (buf);
return;
}
- ++data;
- --size;
+ ++ptr;
+ --left;
}
+ gst_buffer_unmap (buf, data, size);
gst_buffer_unref (buf);
}
}
return FLAC__STREAM_DECODER_READ_STATUS_ABORT;
}
+ *bytes = gst_buffer_get_size (buf);
GST_DEBUG_OBJECT (flacdec, "Read %d bytes at %" G_GUINT64_FORMAT,
- GST_BUFFER_SIZE (buf), flacdec->offset);
- memcpy (buffer, GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (buf));
- *bytes = GST_BUFFER_SIZE (buf);
+ *bytes, flacdec->offset);
+
+ gst_buffer_extract (buf, 0, buffer, *bytes);
gst_buffer_unref (buf);
flacdec->offset += *bytes;
guint samples = frame->header.blocksize;
guint j, i;
GstClockTime next;
+ gpointer data;
+ gsize size;
GST_LOG_OBJECT (flacdec, "samples in frame header: %d", samples);
* downstream negotiation work on older basetransform */
ret = gst_pad_alloc_buffer_and_set_caps (flacdec->srcpad,
GST_BUFFER_OFFSET (flacdec->pending),
- GST_BUFFER_SIZE (flacdec->pending),
+ gst_buffer_get_size (flacdec->pending),
GST_BUFFER_CAPS (flacdec->pending), &outbuf);
if (ret == GST_FLOW_OK) {
gst_pad_push (flacdec->srcpad, flacdec->pending);
GST_BUFFER_DURATION (outbuf) = next - GST_BUFFER_TIMESTAMP (outbuf);
+ data = gst_buffer_map (outbuf, &size, NULL, GST_MAP_WRITE);
if (width == 8) {
- gint8 *outbuffer = (gint8 *) GST_BUFFER_DATA (outbuf);
+ gint8 *outbuffer = (gint8 *) data;
for (i = 0; i < samples; i++) {
for (j = 0; j < channels; j++) {
}
}
} else if (width == 16) {
- gint16 *outbuffer = (gint16 *) GST_BUFFER_DATA (outbuf);
+ gint16 *outbuffer = (gint16 *) data;
for (i = 0; i < samples; i++) {
for (j = 0; j < channels; j++) {
}
}
} else if (width == 32) {
- gint32 *outbuffer = (gint32 *) GST_BUFFER_DATA (outbuf);
+ gint32 *outbuffer = (gint32 *) data;
for (i = 0; i < samples; i++) {
for (j = 0; j < channels; j++) {
} else {
g_assert_not_reached ();
}
+ gst_buffer_unmap (outbuf, data, size);
if (!flacdec->seeking) {
GST_DEBUG_OBJECT (flacdec, "pushing %d samples at offset %" G_GINT64_FORMAT
if (flacdec->discont) {
GST_DEBUG_OBJECT (flacdec, "marking discont");
- outbuf = gst_buffer_make_metadata_writable (outbuf);
+ outbuf = gst_buffer_make_writable (outbuf);
GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
flacdec->discont = FALSE;
}
GST_LOG_OBJECT (dec, "buffer with ts=%" GST_TIME_FORMAT ", end_offset=%"
G_GINT64_FORMAT ", size=%u", GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
- GST_BUFFER_OFFSET_END (buf), GST_BUFFER_SIZE (buf));
+ GST_BUFFER_OFFSET_END (buf), gst_buffer_get_size (buf));
if (dec->init) {
GST_DEBUG_OBJECT (dec, "initializing decoder");
if (dec->framed) {
gint64 unused;
+ guint8 *data;
+ gsize size;
/* check if this is a flac audio frame (rather than a header or junk) */
- got_audio_frame = gst_flac_dec_scan_got_frame (dec, GST_BUFFER_DATA (buf),
- GST_BUFFER_SIZE (buf), &unused);
+ data = gst_buffer_map (buf, &size, NULL, GST_MAP_READ);
+ got_audio_frame = gst_flac_dec_scan_got_frame (dec, data, size, &unused);
+ gst_buffer_unmap (buf, data, size);
/* oggdemux will set granulepos in OFFSET_END instead of timestamp */
if (G_LIKELY (got_audio_frame)) {
GST_DEBUG_CATEGORY_STATIC (flacenc_debug);
#define GST_CAT_DEFAULT flacenc_debug
-
-#define _do_init(type) \
- G_STMT_START{ \
- static const GInterfaceInfo tag_setter_info = { \
- NULL, \
- NULL, \
- NULL \
- }; \
- static const GInterfaceInfo preset_info = { \
- NULL, \
- NULL, \
- NULL \
- }; \
- g_type_add_interface_static (type, GST_TYPE_TAG_SETTER, \
- &tag_setter_info); \
- g_type_add_interface_static (type, GST_TYPE_PRESET, \
- &preset_info); \
- }G_STMT_END
-
-GST_BOILERPLATE_FULL (GstFlacEnc, gst_flac_enc, GstElement, GST_TYPE_ELEMENT,
- _do_init);
+#define gst_flac_enc_parent_class parent_class
+G_DEFINE_TYPE_WITH_CODE (GstFlacEnc, gst_flac_enc, GST_TYPE_ELEMENT,
+ G_IMPLEMENT_INTERFACE (GST_TYPE_TAG_SETTER, NULL);
+ G_IMPLEMENT_INTERFACE (GST_TYPE_PRESET, NULL));
static void gst_flac_enc_finalize (GObject * object);
}
static void
-gst_flac_enc_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&src_factory));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&sink_factory));
-
- gst_element_class_set_details_simple (element_class, "FLAC audio encoder",
- "Codec/Encoder/Audio",
- "Encodes audio with the FLAC lossless audio encoder",
- "Wim Taymans <wim.taymans@chello.be>");
-
- GST_DEBUG_CATEGORY_INIT (flacenc_debug, "flacenc", 0,
- "Flac encoding element");
-}
-
-static void
gst_flac_enc_class_init (GstFlacEncClass * klass)
{
GObjectClass *gobject_class;
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
+ GST_DEBUG_CATEGORY_INIT (flacenc_debug, "flacenc", 0,
+ "Flac encoding element");
+
gobject_class->set_property = gst_flac_enc_set_property;
gobject_class->get_property = gst_flac_enc_get_property;
gobject_class->finalize = gst_flac_enc_finalize;
G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
gstelement_class->change_state = gst_flac_enc_change_state;
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&src_factory));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&sink_factory));
+
+ gst_element_class_set_details_simple (gstelement_class, "FLAC audio encoder",
+ "Codec/Encoder/Audio",
+ "Encodes audio with the FLAC lossless audio encoder",
+ "Wim Taymans <wim.taymans@chello.be>");
}
static void
-gst_flac_enc_init (GstFlacEnc * flacenc, GstFlacEncClass * klass)
+gst_flac_enc_init (GstFlacEnc * flacenc)
{
flacenc->sinkpad = gst_pad_new_from_static_template (&sink_factory, "sink");
gst_pad_set_chain_function (flacenc->sinkpad,
GstStructure *structure;
GstTagImageType image_type = GST_TAG_IMAGE_TYPE_NONE;
gint i;
+ guint8 *data;
+ gsize size;
for (i = 0; i < n_images + n_preview_images; i++) {
if (i < n_images) {
else
image_type = image_type + 2;
+ data = gst_buffer_map (buffer, &size, NULL, GST_MAP_READ);
FLAC__metadata_object_picture_set_data (flacenc->meta[entries],
- GST_BUFFER_DATA (buffer), GST_BUFFER_SIZE (buffer), TRUE);
+ data, size, TRUE);
+ gst_buffer_unmap (buffer, data, size);
+
/* FIXME: There's no way to set the picture type in libFLAC */
flacenc->meta[entries]->data.picture.type = image_type;
FLAC__metadata_object_picture_set_mime_type (flacenc->meta[entries],
"rate", G_TYPE_INT, enc->sample_rate, NULL);
for (l = enc->headers; l != NULL; l = l->next) {
- const guint8 *data;
- guint size;
+ GstBuffer *buf;
+ guint8 *data;
+ gsize size;
/* mark buffers so oggmux will ignore them if it already muxed the
* header buffers from the streamheaders field in the caps */
- l->data = gst_buffer_make_metadata_writable (GST_BUFFER (l->data));
- GST_BUFFER_FLAG_SET (GST_BUFFER (l->data), GST_BUFFER_FLAG_IN_CAPS);
+ l->data = gst_buffer_make_writable (GST_BUFFER_CAST (l->data));
- data = GST_BUFFER_DATA (GST_BUFFER_CAST (l->data));
- size = GST_BUFFER_SIZE (GST_BUFFER_CAST (l->data));
+ buf = GST_BUFFER_CAST (l->data);
+ GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_IN_CAPS);
+
+ data = gst_buffer_map (buf, &size, NULL, GST_MAP_READ);
/* find initial 4-byte marker which we need to skip later on */
if (size == 4 && memcmp (data, "fLaC", 4) == 0) {
- marker = GST_BUFFER_CAST (l->data);
+ marker = buf;
} else if (size > 1 && (data[0] & 0x7f) == HDR_TYPE_STREAMINFO) {
- streaminfo = GST_BUFFER_CAST (l->data);
+ streaminfo = buf;
} else if (size > 1 && (data[0] & 0x7f) == HDR_TYPE_VORBISCOMMENT) {
- vorbiscomment = GST_BUFFER_CAST (l->data);
+ vorbiscomment = buf;
}
+
+ gst_buffer_unmap (buf, data, size);
}
if (marker == NULL || streaminfo == NULL || vorbiscomment == NULL) {
{
GstBuffer *buf;
guint16 num;
+ guint8 *bdata;
+ gsize bsize, slen;
/* minus one for the marker that is merged with streaminfo here */
num = g_list_length (enc->headers) - 1;
- buf = gst_buffer_new_and_alloc (13 + GST_BUFFER_SIZE (streaminfo));
- GST_BUFFER_DATA (buf)[0] = 0x7f;
- memcpy (GST_BUFFER_DATA (buf) + 1, "FLAC", 4);
- GST_BUFFER_DATA (buf)[5] = 0x01; /* mapping version major */
- GST_BUFFER_DATA (buf)[6] = 0x00; /* mapping version minor */
- GST_BUFFER_DATA (buf)[7] = (num & 0xFF00) >> 8;
- GST_BUFFER_DATA (buf)[8] = (num & 0x00FF) >> 0;
- memcpy (GST_BUFFER_DATA (buf) + 9, "fLaC", 4);
- memcpy (GST_BUFFER_DATA (buf) + 13, GST_BUFFER_DATA (streaminfo),
- GST_BUFFER_SIZE (streaminfo));
+ slen = gst_buffer_get_size (streaminfo);
+ buf = gst_buffer_new_and_alloc (13 + slen);
+
+ bdata = gst_buffer_map (buf, &bsize, NULL, GST_MAP_WRITE);
+ bdata[0] = 0x7f;
+ memcpy (bdata + 1, "FLAC", 4);
+ bdata[5] = 0x01; /* mapping version major */
+ bdata[6] = 0x00; /* mapping version minor */
+ bdata[7] = (num & 0xFF00) >> 8;
+ bdata[8] = (num & 0x00FF) >> 0;
+ memcpy (bdata + 9, "fLaC", 4);
+ gst_buffer_extract (streaminfo, 0, bdata + 13, slen);
+ gst_buffer_unmap (buf, bdata, bsize);
+
notgst_value_array_append_buffer (&array, buf);
gst_buffer_unref (buf);
}
/* add other headers, if there are any */
for (l = enc->headers; l != NULL; l = l->next) {
- if (GST_BUFFER_CAST (l->data) != marker &&
- GST_BUFFER_CAST (l->data) != streaminfo &&
- GST_BUFFER_CAST (l->data) != vorbiscomment) {
- notgst_value_array_append_buffer (&array, GST_BUFFER_CAST (l->data));
+ GstBuffer *buf = GST_BUFFER_CAST (l->data);
+
+ if (buf != marker && buf != streaminfo && buf != vorbiscomment) {
+ notgst_value_array_append_buffer (&array, buf);
}
}
buf = GST_BUFFER (l->data);
gst_buffer_set_caps (buf, caps);
GST_LOG_OBJECT (enc, "Pushing header buffer, size %u bytes",
- GST_BUFFER_SIZE (buf));
+ gst_buffer_get_size (buf));
+#if 0
GST_MEMDUMP_OBJECT (enc, "header buffer", GST_BUFFER_DATA (buf),
GST_BUFFER_SIZE (buf));
+#endif
(void) gst_pad_push (enc->srcpad, buf);
l->data = NULL;
}
return FLAC__STREAM_ENCODER_WRITE_STATUS_OK;
outbuf = gst_buffer_new_and_alloc (bytes);
- memcpy (GST_BUFFER_DATA (outbuf), buffer, bytes);
+ gst_buffer_fill (outbuf, 0, buffer, bytes);
if (samples > 0 && flacenc->samples_written != (guint64) - 1) {
guint64 granulepos;
} else if (flacenc->got_headers && samples == 0) {
GST_DEBUG_OBJECT (flacenc, "Fixing up headers at pos=%" G_GUINT64_FORMAT
", size=%u", flacenc->offset, (guint) bytes);
+#if 0
GST_MEMDUMP_OBJECT (flacenc, "Presumed header fragment",
GST_BUFFER_DATA (outbuf), GST_BUFFER_SIZE (outbuf));
+#endif
} else {
GST_LOG ("Pushing buffer: ts=%" GST_TIME_FORMAT ", samples=%u, size=%u, "
"pos=%" G_GUINT64_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)),
{
GstFlacEnc *flacenc;
FLAC__int32 *data;
- gulong insize;
+ gsize bsize;
gint samples, width;
gulong i;
FLAC__bool res;
+ gpointer bdata;
flacenc = GST_FLAC_ENC (GST_PAD_PARENT (pad));
else
flacenc->next_ts = GST_CLOCK_TIME_NONE;
- insize = GST_BUFFER_SIZE (buffer);
- samples = insize / (width >> 3);
+ bdata = gst_buffer_map (buffer, &bsize, NULL, GST_MAP_READ);
+ samples = bsize / (width >> 3);
data = g_malloc (samples * sizeof (FLAC__int32));
if (width == 8) {
- gint8 *indata = (gint8 *) GST_BUFFER_DATA (buffer);
+ gint8 *indata = (gint8 *) bdata;
for (i = 0; i < samples; i++)
data[i] = (FLAC__int32) indata[i];
} else if (width == 16) {
- gint16 *indata = (gint16 *) GST_BUFFER_DATA (buffer);
+ gint16 *indata = (gint16 *) bdata;
for (i = 0; i < samples; i++)
data[i] = (FLAC__int32) indata[i];
} else if (width == 32) {
- gint32 *indata = (gint32 *) GST_BUFFER_DATA (buffer);
+ gint32 *indata = (gint32 *) bdata;
for (i = 0; i < samples; i++)
data[i] = (FLAC__int32) indata[i];
} else {
g_assert_not_reached ();
}
-
+ gst_buffer_unmap (buffer, bdata, bsize);
gst_buffer_unref (buffer);
res = FLAC__stream_encoder_process_interleaved (flacenc->encoder,
static gboolean gst_flac_tag_sink_setcaps (GstPad * pad, GstCaps * caps);
-static void
-gst_flac_tag_setup_interfaces (GType flac_tag_type)
-{
- static const GInterfaceInfo tag_setter_info = { NULL, NULL, NULL };
-
- g_type_add_interface_static (flac_tag_type, GST_TYPE_TAG_SETTER,
- &tag_setter_info);
-}
+#define gst_flac_tag_parent_class parent_class
+G_DEFINE_TYPE_WITH_CODE (GstFlacTag, gst_flac_tag, GST_TYPE_ELEMENT,
+ G_IMPLEMENT_INTERFACE (GST_TYPE_TAG_SETTER, NULL));
-GST_BOILERPLATE_FULL (GstFlacTag, gst_flac_tag, GstElement, GST_TYPE_ELEMENT,
- gst_flac_tag_setup_interfaces);
-
-static void
-gst_flac_tag_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_set_details_simple (element_class, "FLAC tagger",
- "Formatter/Metadata",
- "Rewrite tags in a FLAC file", "Christophe Fergeau <teuf@gnome.org>");
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&flac_tag_sink_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&flac_tag_src_template));
-
- GST_DEBUG_CATEGORY_INIT (flactag_debug, "flactag", 0, "flac tag rewriter");
-}
static void
gst_flac_tag_class_init (GstFlacTagClass * klass)
GstElementClass *gstelement_class;
GObjectClass *gobject_class;
+ GST_DEBUG_CATEGORY_INIT (flactag_debug, "flactag", 0, "flac tag rewriter");
+
gstelement_class = (GstElementClass *) klass;
gobject_class = (GObjectClass *) klass;
- parent_class = g_type_class_peek_parent (klass);
-
gobject_class->dispose = gst_flac_tag_dispose;
gstelement_class->change_state = gst_flac_tag_change_state;
+
+ gst_element_class_set_details_simple (gstelement_class, "FLAC tagger",
+ "Formatter/Metadata",
+ "Rewrite tags in a FLAC file", "Christophe Fergeau <teuf@gnome.org>");
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&flac_tag_sink_template));
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&flac_tag_src_template));
}
static void
static void
-gst_flac_tag_init (GstFlacTag * tag, GstFlacTagClass * klass)
+gst_flac_tag_init (GstFlacTag * tag)
{
/* create the sink and src pads */
tag->sinkpad =
{
GstFlacTag *tag;
GstFlowReturn ret;
+ guint8 *data;
+ gsize size;
ret = GST_FLOW_OK;
tag = GST_FLAC_TAG (gst_pad_get_parent (pad));
id_buffer = gst_adapter_take_buffer (tag->adapter, FLAC_MAGIC_SIZE);
GST_DEBUG_OBJECT (tag, "looking for " FLAC_MAGIC " identifier");
- if (memcmp (GST_BUFFER_DATA (id_buffer), FLAC_MAGIC, FLAC_MAGIC_SIZE) == 0) {
+ if (gst_buffer_memcmp (id_buffer, 0, FLAC_MAGIC, FLAC_MAGIC_SIZE) == 0) {
GST_DEBUG_OBJECT (tag, "pushing " FLAC_MAGIC " identifier buffer");
gst_buffer_set_caps (id_buffer, GST_PAD_CAPS (tag->srcpad));
* of a metadata block
*/
if (tag->state == GST_FLAC_TAG_STATE_METADATA_BLOCKS) {
- guint size;
guint type;
gboolean is_last;
const guint8 *block_header;
if (gst_adapter_available (tag->adapter) < 4)
goto cleanup;
- block_header = gst_adapter_peek (tag->adapter, 4);
+ block_header = gst_adapter_map (tag->adapter, 4);
is_last = ((block_header[0] & 0x80) == 0x80);
type = block_header[0] & 0x7F;
size = (block_header[1] << 16)
| (block_header[2] << 8)
| block_header[3];
+ gst_adapter_unmap (tag->adapter, 0);
/* The 4 bytes long header isn't included in the metadata size */
tag->metadata_block_size = size + 4;
/* clear the is-last flag, as the last metadata block will
* be the vorbis comment block which we will build ourselves.
*/
- GST_BUFFER_DATA (metadata_buffer)[0] &= (~0x80);
+ data = gst_buffer_map (metadata_buffer, &size, NULL, GST_MAP_READWRITE);
+ data[0] &= (~0x80);
+ gst_buffer_unmap (metadata_buffer, data, size);
if (tag->state == GST_FLAC_TAG_STATE_WRITING_METADATA_BLOCK) {
GST_DEBUG_OBJECT (tag, "pushing metadata block buffer");
* block, and stop now if the user only wants to read tags
*/
if (tag->vorbiscomment != NULL) {
+ guint8 id_data[4];
/* We found some tags, try to parse them and notify the other elements
* that we encountered some tags
*/
GST_DEBUG_OBJECT (tag, "emitting vorbiscomment tags");
+ gst_buffer_extract (tag->vorbiscomment, 0, id_data, 4);
tag->tags = gst_tag_list_from_vorbiscomment_buffer (tag->vorbiscomment,
- GST_BUFFER_DATA (tag->vorbiscomment), 4, NULL);
+ id_data, 4, NULL);
if (tag->tags != NULL) {
gst_element_found_tags (GST_ELEMENT (tag),
gst_tag_list_copy (tag->tags));
*/
if (tag->state == GST_FLAC_TAG_STATE_ADD_VORBIS_COMMENT) {
GstBuffer *buffer;
- gint size;
const GstTagList *user_tags;
GstTagList *merged_tags;
*/
GST_WARNING_OBJECT (tag, "No tags found");
buffer = gst_buffer_new_and_alloc (12);
- if (buffer == NULL) {
- GST_ELEMENT_ERROR (tag, CORE, TOO_LAZY, (NULL),
- ("Error creating 12-byte buffer for padding block"));
- ret = GST_FLOW_ERROR;
- goto cleanup;
- }
- memset (GST_BUFFER_DATA (buffer), 0, GST_BUFFER_SIZE (buffer));
- GST_BUFFER_DATA (buffer)[0] = 0x81; /* 0x80 = Last metadata block,
- * 0x01 = padding block
- */
+ if (buffer == NULL)
+ goto no_buffer;
+
+ data = gst_buffer_map (buffer, &size, NULL, GST_MAP_WRITE);
+ memset (data, 0, size);
+ data[0] = 0x81; /* 0x80 = Last metadata block,
+ * 0x01 = padding block */
+ gst_buffer_unmap (buffer, data, size);
} else {
guchar header[4];
+ guint8 fbit[1];
memset (header, 0, sizeof (header));
header[0] = 0x84; /* 0x80 = Last metadata block,
- * 0x04 = vorbiscomment block
- */
+ * 0x04 = vorbiscomment block */
buffer = gst_tag_list_to_vorbiscomment_buffer (merged_tags, header,
sizeof (header), NULL);
GST_DEBUG_OBJECT (tag, "Writing tags %" GST_PTR_FORMAT, merged_tags);
gst_tag_list_free (merged_tags);
- if (buffer == NULL) {
- GST_ELEMENT_ERROR (tag, CORE, TAG, (NULL),
- ("Error converting tag list to vorbiscomment buffer"));
- ret = GST_FLOW_ERROR;
- goto cleanup;
- }
- size = GST_BUFFER_SIZE (buffer) - 4;
- if ((size > 0xFFFFFF) || (size < 0)) {
- /* FLAC vorbis comment blocks are limited to 2^24 bytes,
- * while the vorbis specs allow more than that. Shouldn't
- * be a real world problem though
- */
- GST_ELEMENT_ERROR (tag, CORE, TAG, (NULL),
- ("Vorbis comment of size %d too long", size));
- ret = GST_FLOW_ERROR;
- goto cleanup;
- }
+ if (buffer == NULL)
+ goto no_comment;
+
+ size = gst_buffer_get_size (buffer);
+ if ((size < 4) || ((size - 4) > 0xFFFFFF))
+ goto comment_too_long;
+ fbit[0] = 1;
/* Get rid of the framing bit at the end of the vorbiscomment buffer
* if it exists since libFLAC seems to lose sync because of this
* bit in gstflacdec
*/
- if (GST_BUFFER_DATA (buffer)[GST_BUFFER_SIZE (buffer) - 1] == 1) {
- GstBuffer *sub;
-
- sub = gst_buffer_create_sub (buffer, 0, GST_BUFFER_SIZE (buffer) - 1);
- gst_buffer_unref (buffer);
- buffer = sub;
+ if (gst_buffer_memcmp (buffer, size - 1, fbit, 1) == 0) {
+ buffer = gst_buffer_make_writable (buffer);
+ gst_buffer_resize (buffer, 0, size - 1);
}
}
/* The 4 byte metadata block header isn't accounted for in the total
* size of the metadata block
*/
- size = GST_BUFFER_SIZE (buffer) - 4;
+ data = gst_buffer_map (buffer, &size, NULL, GST_MAP_WRITE);
+ data[1] = (((size - 4) & 0xFF0000) >> 16);
+ data[2] = (((size - 4) & 0x00FF00) >> 8);
+ data[3] = ((size - 4) & 0x0000FF);
+ gst_buffer_unmap (buffer, data, size);
+
+ GST_DEBUG_OBJECT (tag, "pushing %d byte vorbiscomment buffer", size);
- GST_BUFFER_DATA (buffer)[1] = ((size & 0xFF0000) >> 16);
- GST_BUFFER_DATA (buffer)[2] = ((size & 0x00FF00) >> 8);
- GST_BUFFER_DATA (buffer)[3] = (size & 0x0000FF);
- GST_DEBUG_OBJECT (tag, "pushing %d byte vorbiscomment buffer",
- GST_BUFFER_SIZE (buffer));
gst_buffer_set_caps (buffer, GST_PAD_CAPS (tag->srcpad));
ret = gst_pad_push (tag->srcpad, buffer);
if (ret != GST_FLOW_OK) {
gst_object_unref (tag);
return ret;
-}
+ /* ERRORS */
+no_buffer:
+ {
+ GST_ELEMENT_ERROR (tag, CORE, TOO_LAZY, (NULL),
+ ("Error creating 12-byte buffer for padding block"));
+ ret = GST_FLOW_ERROR;
+ goto cleanup;
+ }
+no_comment:
+ {
+ GST_ELEMENT_ERROR (tag, CORE, TAG, (NULL),
+ ("Error converting tag list to vorbiscomment buffer"));
+ ret = GST_FLOW_ERROR;
+ goto cleanup;
+ }
+comment_too_long:
+ {
+ /* FLAC vorbis comment blocks are limited to 2^24 bytes,
+ * while the vorbis specs allow more than that. Shouldn't
+ * be a real world problem though
+ */
+ GST_ELEMENT_ERROR (tag, CORE, TAG, (NULL),
+ ("Vorbis comment of size %d too long", size));
+ ret = GST_FLOW_ERROR;
+ goto cleanup;
+ }
+}
static GstStateChangeReturn
gst_flac_tag_change_state (GstElement * element, GstStateChange transition)
break;
}
- return parent_class->change_state (element, transition);
+ return GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
}
buffer_size, spec->segsize, spec->segtotal);
/* allocate the ringbuffer memory now */
- buf->data = gst_buffer_new_and_alloc (spec->segtotal * spec->segsize);
- memset (GST_BUFFER_DATA (buf->data), 0, GST_BUFFER_SIZE (buf->data));
+ buf->size = spec->segtotal * spec->segsize;
+ buf->memory = g_malloc0 (buf->size);
if ((res = gst_jack_audio_client_set_active (sink->client, TRUE)))
goto could_not_activate;
abuf->sample_rate = -1;
/* free the buffer */
- gst_buffer_unref (buf->data);
- buf->data = NULL;
+ g_free (buf->memory);
+ buf->memory = NULL;
return TRUE;
}
PROP_LAST
};
-#define _do_init(bla) \
- GST_DEBUG_CATEGORY_INIT (gst_jack_audio_sink_debug, "jacksink", 0, "jacksink element");
-
-GST_BOILERPLATE_FULL (GstJackAudioSink, gst_jack_audio_sink, GstBaseAudioSink,
- GST_TYPE_BASE_AUDIO_SINK, _do_init);
+#define gst_jack_audio_sink_parent_class parent_class
+G_DEFINE_TYPE (GstJackAudioSink, gst_jack_audio_sink, GST_TYPE_BASE_AUDIO_SINK);
static void gst_jack_audio_sink_dispose (GObject * object);
static void gst_jack_audio_sink_set_property (GObject * object, guint prop_id,
sink);
static void
-gst_jack_audio_sink_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_set_details_simple (element_class, "Audio Sink (Jack)",
- "Sink/Audio", "Output audio to a JACK server",
- "Wim Taymans <wim.taymans@gmail.com>");
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&jackaudiosink_sink_factory));
-}
-
-static void
gst_jack_audio_sink_class_init (GstJackAudioSinkClass * klass)
{
GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
GstBaseSinkClass *gstbasesink_class;
GstBaseAudioSinkClass *gstbaseaudiosink_class;
+ GST_DEBUG_CATEGORY_INIT (gst_jack_audio_sink_debug, "jacksink", 0,
+ "jacksink element");
+
gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
gstbasesink_class = (GstBaseSinkClass *) klass;
gstbaseaudiosink_class = (GstBaseAudioSinkClass *) klass;
GST_PARAM_MUTABLE_READY | G_PARAM_READWRITE |
G_PARAM_STATIC_STRINGS));
+ gst_element_class_set_details_simple (gstelement_class, "Audio Sink (Jack)",
+ "Sink/Audio", "Output audio to a JACK server",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&jackaudiosink_sink_factory));
+
gstbasesink_class->get_caps = GST_DEBUG_FUNCPTR (gst_jack_audio_sink_getcaps);
gstbaseaudiosink_class->create_ringbuffer =
}
static void
-gst_jack_audio_sink_init (GstJackAudioSink * sink,
- GstJackAudioSinkClass * g_class)
+gst_jack_audio_sink_init (GstJackAudioSink * sink)
{
sink->connect = DEFAULT_PROP_CONNECT;
sink->server = g_strdup (DEFAULT_PROP_SERVER);
buffer_size, spec->segsize, spec->segtotal);
/* allocate the ringbuffer memory now */
- buf->data = gst_buffer_new_and_alloc (spec->segtotal * spec->segsize);
- memset (GST_BUFFER_DATA (buf->data), 0, GST_BUFFER_SIZE (buf->data));
+ buf->size = spec->segtotal * spec->segsize;
+ buf->memory = g_malloc0 (buf->size);
if ((res = gst_jack_audio_client_set_active (src->client, TRUE)))
goto could_not_activate;
abuf->sample_rate = -1;
/* free the buffer */
- gst_buffer_unref (buf->data);
- buf->data = NULL;
+ g_free (buf->memory);
+ buf->memory = NULL;
return TRUE;
}
"rate = (int) [ 1, MAX ], " "channels = (int) [ 1, MAX ]")
);
-#define _do_init(bla) \
- GST_DEBUG_CATEGORY_INIT(gst_jack_audio_src_debug, "jacksrc", 0, "jacksrc element");
-
-GST_BOILERPLATE_FULL (GstJackAudioSrc, gst_jack_audio_src, GstBaseAudioSrc,
- GST_TYPE_BASE_AUDIO_SRC, _do_init);
+#define gst_jack_audio_src_parent_class parent_class
+G_DEFINE_TYPE (GstJackAudioSrc, gst_jack_audio_src, GST_TYPE_BASE_AUDIO_SRC);
static void gst_jack_audio_src_dispose (GObject * object);
static void gst_jack_audio_src_set_property (GObject * object, guint prop_id,
/* GObject vmethod implementations */
-static void
-gst_jack_audio_src_base_init (gpointer gclass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (gclass);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&src_factory));
- gst_element_class_set_details_simple (element_class, "Audio Source (Jack)",
- "Source/Audio", "Captures audio from a JACK server",
- "Tristan Matthews <tristan@sat.qc.ca>");
-}
-
/* initialize the jack_audio_src's class */
static void
gst_jack_audio_src_class_init (GstJackAudioSrcClass * klass)
{
GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
GstBaseSrcClass *gstbasesrc_class;
GstBaseAudioSrcClass *gstbaseaudiosrc_class;
- gobject_class = (GObjectClass *) klass;
+ GST_DEBUG_CATEGORY_INIT (gst_jack_audio_src_debug, "jacksrc", 0,
+ "jacksrc element");
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
gstbasesrc_class = (GstBaseSrcClass *) klass;
gstbaseaudiosrc_class = (GstBaseAudioSrcClass *) klass;
GST_PARAM_MUTABLE_READY | G_PARAM_READWRITE |
G_PARAM_STATIC_STRINGS));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&src_factory));
+
+ gst_element_class_set_details_simple (gstelement_class, "Audio Source (Jack)",
+ "Source/Audio", "Captures audio from a JACK server",
+ "Tristan Matthews <tristan@sat.qc.ca>");
+
gstbasesrc_class->get_caps = GST_DEBUG_FUNCPTR (gst_jack_audio_src_getcaps);
gstbaseaudiosrc_class->create_ringbuffer =
GST_DEBUG_FUNCPTR (gst_jack_audio_src_create_ringbuffer);
* initialize instance structure
*/
static void
-gst_jack_audio_src_init (GstJackAudioSrc * src, GstJackAudioSrcClass * gclass)
+gst_jack_audio_src_init (GstJackAudioSrc * src)
{
//gst_base_src_set_live(GST_BASE_SRC (src), TRUE);
src->connect = DEFAULT_PROP_CONNECT;
GST_OBJECT_LOCK (dec);
dec->proportion = proportion;
if (G_LIKELY (ts != GST_CLOCK_TIME_NONE)) {
- if (G_UNLIKELY (diff > 0))
+ if (G_UNLIKELY (diff > dec->qos_duration))
dec->earliest_time = ts + 2 * diff + dec->qos_duration;
else
dec->earliest_time = ts + diff;
static GstStateChangeReturn gst_pulsemixer_change_state (GstElement * element,
GstStateChange transition);
-static void gst_pulsemixer_init_interfaces (GType type);
-
GST_IMPLEMENT_PULSEMIXER_CTRL_METHODS (GstPulseMixer, gst_pulsemixer);
GST_IMPLEMENT_PULSEPROBE_METHODS (GstPulseMixer, gst_pulsemixer);
-GST_BOILERPLATE_FULL (GstPulseMixer, gst_pulsemixer, GstElement,
- GST_TYPE_ELEMENT, gst_pulsemixer_init_interfaces);
static gboolean
gst_pulsemixer_interface_supported (GstImplementsInterface
klass->supported = gst_pulsemixer_interface_supported;
}
-static void
-gst_pulsemixer_init_interfaces (GType type)
-{
- static const GInterfaceInfo implements_iface_info = {
- (GInterfaceInitFunc) gst_pulsemixer_implements_interface_init,
- NULL,
- NULL,
- };
- static const GInterfaceInfo mixer_iface_info = {
- (GInterfaceInitFunc) gst_pulsemixer_mixer_interface_init,
- NULL,
- NULL,
- };
- static const GInterfaceInfo probe_iface_info = {
- (GInterfaceInitFunc) gst_pulsemixer_property_probe_interface_init,
- NULL,
- NULL,
- };
-
- g_type_add_interface_static (type, GST_TYPE_IMPLEMENTS_INTERFACE,
- &implements_iface_info);
- g_type_add_interface_static (type, GST_TYPE_MIXER, &mixer_iface_info);
- g_type_add_interface_static (type, GST_TYPE_PROPERTY_PROBE,
- &probe_iface_info);
-}
-
-static void
-gst_pulsemixer_base_init (gpointer g_class)
-{
- gst_element_class_set_details_simple (GST_ELEMENT_CLASS (g_class),
- "PulseAudio Mixer",
- "Generic/Audio",
- "Control sound input and output levels for PulseAudio",
- "Lennart Poettering");
-}
+#define gst_pulsemixer_parent_class parent_class
+G_DEFINE_TYPE_WITH_CODE (GstPulseMixer, gst_pulsemixer, GST_TYPE_ELEMENT,
+ G_IMPLEMENT_INTERFACE (GST_TYPE_IMPLEMENTS_INTERFACE,
+ gst_pulsemixer_implements_interface_init);
+ G_IMPLEMENT_INTERFACE (GST_TYPE_MIXER, gst_pulsemixer_mixer_interface_init);
+ G_IMPLEMENT_INTERFACE (GST_TYPE_PROPERTY_PROBE,
+ gst_pulsemixer_property_probe_interface_init));
static void
gst_pulsemixer_class_init (GstPulseMixerClass * g_class)
g_param_spec_string ("device-name", "Device name",
"Human-readable name of the sound device", NULL,
G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+
+ gst_element_class_set_details_simple (GST_ELEMENT_CLASS (g_class),
+ "PulseAudio Mixer",
+ "Generic/Audio",
+ "Control sound input and output levels for PulseAudio",
+ "Lennart Poettering");
}
static void
-gst_pulsemixer_init (GstPulseMixer * this, GstPulseMixerClass * g_class)
+gst_pulsemixer_init (GstPulseMixer * this)
{
this->mixer = NULL;
this->server = NULL;
/* EOS needs running clock */
if (GST_BASE_SINK_CAST (psink)->eos ||
- g_atomic_int_get (&GST_BASE_AUDIO_SINK (psink)->abidata.ABI.
- eos_rendering))
+ g_atomic_int_get (&GST_BASE_AUDIO_SINK (psink)->abidata.
+ ABI.eos_rendering))
gst_pulsering_set_corked (pbuf, FALSE, FALSE);
pa_threaded_mainloop_unlock (mainloop);
static GstStateChangeReturn gst_pulsesink_change_state (GstElement * element,
GstStateChange transition);
-static void gst_pulsesink_init_interfaces (GType type);
-
#if (G_BYTE_ORDER == G_LITTLE_ENDIAN)
# define ENDIANNESS "LITTLE_ENDIAN, BIG_ENDIAN"
#else
# define ENDIANNESS "BIG_ENDIAN, LITTLE_ENDIAN"
#endif
+static GstStaticPadTemplate pad_template = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-raw-int, "
+ "endianness = (int) { " ENDIANNESS " }, "
+ "signed = (boolean) TRUE, "
+ "width = (int) 16, "
+ "depth = (int) 16, "
+ "rate = (int) [ 1, MAX ], "
+ "channels = (int) [ 1, 32 ];"
+ "audio/x-raw-float, "
+ "endianness = (int) { " ENDIANNESS " }, "
+ "width = (int) 32, "
+ "rate = (int) [ 1, MAX ], "
+ "channels = (int) [ 1, 32 ];"
+ "audio/x-raw-int, "
+ "endianness = (int) { " ENDIANNESS " }, "
+ "signed = (boolean) TRUE, "
+ "width = (int) 32, "
+ "depth = (int) 32, "
+ "rate = (int) [ 1, MAX ], " "channels = (int) [ 1, 32 ];"
+#ifdef HAVE_PULSE_0_9_15
+ "audio/x-raw-int, "
+ "endianness = (int) { " ENDIANNESS " }, "
+ "signed = (boolean) TRUE, "
+ "width = (int) 24, "
+ "depth = (int) 24, "
+ "rate = (int) [ 1, MAX ], "
+ "channels = (int) [ 1, 32 ];"
+ "audio/x-raw-int, "
+ "endianness = (int) { " ENDIANNESS " }, "
+ "signed = (boolean) TRUE, "
+ "width = (int) 32, "
+ "depth = (int) 24, "
+ "rate = (int) [ 1, MAX ], " "channels = (int) [ 1, 32 ];"
+#endif
+ "audio/x-raw-int, "
+ "signed = (boolean) FALSE, "
+ "width = (int) 8, "
+ "depth = (int) 8, "
+ "rate = (int) [ 1, MAX ], "
+ "channels = (int) [ 1, 32 ];"
+ "audio/x-alaw, "
+ "rate = (int) [ 1, MAX], "
+ "channels = (int) [ 1, 32 ];"
+ "audio/x-mulaw, "
+ "rate = (int) [ 1, MAX], " "channels = (int) [ 1, 32 ]")
+ );
+
GST_IMPLEMENT_PULSEPROBE_METHODS (GstPulseSink, gst_pulsesink);
#define _do_init(type) \
gst_pulsesink_init_contexts (); \
gst_pulsesink_init_interfaces (type);
-GST_BOILERPLATE_FULL (GstPulseSink, gst_pulsesink, GstBaseAudioSink,
- GST_TYPE_BASE_AUDIO_SINK, _do_init);
-
static gboolean
gst_pulsesink_interface_supported (GstImplementsInterface *
iface, GType interface_type)
klass->supported = gst_pulsesink_interface_supported;
}
-static void
-gst_pulsesink_init_interfaces (GType type)
-{
- static const GInterfaceInfo implements_iface_info = {
- (GInterfaceInitFunc) gst_pulsesink_implements_interface_init,
- NULL,
- NULL,
- };
- static const GInterfaceInfo probe_iface_info = {
- (GInterfaceInitFunc) gst_pulsesink_property_probe_interface_init,
- NULL,
- NULL,
- };
+#define gst_pulsesink_parent_class parent_class
+G_DEFINE_TYPE_WITH_CODE (GstPulseSink, gst_pulsesink, GST_TYPE_BASE_AUDIO_SINK,
+ gst_pulsesink_init_contexts ();
+ G_IMPLEMENT_INTERFACE (GST_TYPE_IMPLEMENTS_INTERFACE,
+ gst_pulsesink_implements_interface_init);
+ G_IMPLEMENT_INTERFACE (GST_TYPE_PROPERTY_PROBE,
+ gst_pulsesink_property_probe_interface_init);
#ifdef HAVE_PULSE_0_9_12
- static const GInterfaceInfo svol_iface_info = {
- NULL, NULL, NULL
- };
-
- g_type_add_interface_static (type, GST_TYPE_STREAM_VOLUME, &svol_iface_info);
-#endif
-
- g_type_add_interface_static (type, GST_TYPE_IMPLEMENTS_INTERFACE,
- &implements_iface_info);
- g_type_add_interface_static (type, GST_TYPE_PROPERTY_PROBE,
- &probe_iface_info);
-}
-
-static void
-gst_pulsesink_base_init (gpointer g_class)
-{
- static GstStaticPadTemplate pad_template = GST_STATIC_PAD_TEMPLATE ("sink",
- GST_PAD_SINK,
- GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("audio/x-raw-int, "
- "endianness = (int) { " ENDIANNESS " }, "
- "signed = (boolean) TRUE, "
- "width = (int) 16, "
- "depth = (int) 16, "
- "rate = (int) [ 1, MAX ], "
- "channels = (int) [ 1, 32 ];"
- "audio/x-raw-float, "
- "endianness = (int) { " ENDIANNESS " }, "
- "width = (int) 32, "
- "rate = (int) [ 1, MAX ], "
- "channels = (int) [ 1, 32 ];"
- "audio/x-raw-int, "
- "endianness = (int) { " ENDIANNESS " }, "
- "signed = (boolean) TRUE, "
- "width = (int) 32, "
- "depth = (int) 32, "
- "rate = (int) [ 1, MAX ], " "channels = (int) [ 1, 32 ];"
-#ifdef HAVE_PULSE_0_9_15
- "audio/x-raw-int, "
- "endianness = (int) { " ENDIANNESS " }, "
- "signed = (boolean) TRUE, "
- "width = (int) 24, "
- "depth = (int) 24, "
- "rate = (int) [ 1, MAX ], "
- "channels = (int) [ 1, 32 ];"
- "audio/x-raw-int, "
- "endianness = (int) { " ENDIANNESS " }, "
- "signed = (boolean) TRUE, "
- "width = (int) 32, "
- "depth = (int) 24, "
- "rate = (int) [ 1, MAX ], " "channels = (int) [ 1, 32 ];"
+ G_IMPLEMENT_INTERFACE (GST_TYPE_STREAM_VOLUME, NULL)
#endif
- "audio/x-raw-int, "
- "signed = (boolean) FALSE, "
- "width = (int) 8, "
- "depth = (int) 8, "
- "rate = (int) [ 1, MAX ], "
- "channels = (int) [ 1, 32 ];"
- "audio/x-alaw, "
- "rate = (int) [ 1, MAX], "
- "channels = (int) [ 1, 32 ];"
- "audio/x-mulaw, "
- "rate = (int) [ 1, MAX], " "channels = (int) [ 1, 32 ]")
- );
-
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_set_details_simple (element_class,
- "PulseAudio Audio Sink",
- "Sink/Audio", "Plays audio to a PulseAudio server", "Lennart Poettering");
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&pad_template));
-}
+ );
static GstRingBuffer *
gst_pulsesink_create_ringbuffer (GstBaseAudioSink * sink)
g_param_spec_boxed ("stream-properties", "stream properties",
"list of pulseaudio stream properties",
GST_TYPE_STRUCTURE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "PulseAudio Audio Sink",
+ "Sink/Audio", "Plays audio to a PulseAudio server", "Lennart Poettering");
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&pad_template));
}
/* returns the current time of the sink ringbuffer */
}
static void
-gst_pulsesink_init (GstPulseSink * pulsesink, GstPulseSinkClass * klass)
+gst_pulsesink_init (GstPulseSink * pulsesink)
{
pulsesink->server = NULL;
pulsesink->device = NULL;
static GstStateChangeReturn gst_pulsesrc_change_state (GstElement *
element, GstStateChange transition);
-static void gst_pulsesrc_init_interfaces (GType type);
-
#if (G_BYTE_ORDER == G_LITTLE_ENDIAN)
# define ENDIANNESS "LITTLE_ENDIAN, BIG_ENDIAN"
#else
# define ENDIANNESS "BIG_ENDIAN, LITTLE_ENDIAN"
#endif
+static GstStaticPadTemplate pad_template = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-raw-int, "
+ "endianness = (int) { " ENDIANNESS " }, "
+ "signed = (boolean) TRUE, "
+ "width = (int) 16, "
+ "depth = (int) 16, "
+ "rate = (int) [ 1, MAX ], "
+ "channels = (int) [ 1, 32 ];"
+ "audio/x-raw-float, "
+ "endianness = (int) { " ENDIANNESS " }, "
+ "width = (int) 32, "
+ "rate = (int) [ 1, MAX ], "
+ "channels = (int) [ 1, 32 ];"
+ "audio/x-raw-int, "
+ "endianness = (int) { " ENDIANNESS " }, "
+ "signed = (boolean) TRUE, "
+ "width = (int) 32, "
+ "depth = (int) 32, "
+ "rate = (int) [ 1, MAX ], "
+ "channels = (int) [ 1, 32 ];"
+ "audio/x-raw-int, "
+ "signed = (boolean) FALSE, "
+ "width = (int) 8, "
+ "depth = (int) 8, "
+ "rate = (int) [ 1, MAX ], "
+ "channels = (int) [ 1, 32 ];"
+ "audio/x-alaw, "
+ "rate = (int) [ 1, MAX], "
+ "channels = (int) [ 1, 32 ];"
+ "audio/x-mulaw, "
+ "rate = (int) [ 1, MAX], " "channels = (int) [ 1, 32 ]")
+ );
+
+
GST_IMPLEMENT_PULSEMIXER_CTRL_METHODS (GstPulseSrc, gst_pulsesrc);
GST_IMPLEMENT_PULSEPROBE_METHODS (GstPulseSrc, gst_pulsesrc);
-GST_BOILERPLATE_FULL (GstPulseSrc, gst_pulsesrc, GstAudioSrc,
- GST_TYPE_AUDIO_SRC, gst_pulsesrc_init_interfaces);
static gboolean
gst_pulsesrc_interface_supported (GstImplementsInterface *
klass->supported = gst_pulsesrc_interface_supported;
}
-static void
-gst_pulsesrc_init_interfaces (GType type)
-{
- static const GInterfaceInfo implements_iface_info = {
- (GInterfaceInitFunc) gst_pulsesrc_implements_interface_init,
- NULL,
- NULL,
- };
- static const GInterfaceInfo mixer_iface_info = {
- (GInterfaceInitFunc) gst_pulsesrc_mixer_interface_init,
- NULL,
- NULL,
- };
- static const GInterfaceInfo probe_iface_info = {
- (GInterfaceInitFunc) gst_pulsesrc_property_probe_interface_init,
- NULL,
- NULL,
- };
-
- g_type_add_interface_static (type, GST_TYPE_IMPLEMENTS_INTERFACE,
- &implements_iface_info);
- g_type_add_interface_static (type, GST_TYPE_MIXER, &mixer_iface_info);
- g_type_add_interface_static (type, GST_TYPE_PROPERTY_PROBE,
- &probe_iface_info);
-}
-
-static void
-gst_pulsesrc_base_init (gpointer g_class)
-{
-
- static GstStaticPadTemplate pad_template = GST_STATIC_PAD_TEMPLATE ("src",
- GST_PAD_SRC,
- GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("audio/x-raw-int, "
- "endianness = (int) { " ENDIANNESS " }, "
- "signed = (boolean) TRUE, "
- "width = (int) 16, "
- "depth = (int) 16, "
- "rate = (int) [ 1, MAX ], "
- "channels = (int) [ 1, 32 ];"
- "audio/x-raw-float, "
- "endianness = (int) { " ENDIANNESS " }, "
- "width = (int) 32, "
- "rate = (int) [ 1, MAX ], "
- "channels = (int) [ 1, 32 ];"
- "audio/x-raw-int, "
- "endianness = (int) { " ENDIANNESS " }, "
- "signed = (boolean) TRUE, "
- "width = (int) 32, "
- "depth = (int) 32, "
- "rate = (int) [ 1, MAX ], "
- "channels = (int) [ 1, 32 ];"
- "audio/x-raw-int, "
- "signed = (boolean) FALSE, "
- "width = (int) 8, "
- "depth = (int) 8, "
- "rate = (int) [ 1, MAX ], "
- "channels = (int) [ 1, 32 ];"
- "audio/x-alaw, "
- "rate = (int) [ 1, MAX], "
- "channels = (int) [ 1, 32 ];"
- "audio/x-mulaw, "
- "rate = (int) [ 1, MAX], " "channels = (int) [ 1, 32 ]")
- );
-
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_set_details_simple (element_class,
- "PulseAudio Audio Source",
- "Source/Audio",
- "Captures audio from a PulseAudio server", "Lennart Poettering");
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&pad_template));
-}
+#define gst_pulsesrc_parent_class parent_class
+G_DEFINE_TYPE_WITH_CODE (GstPulseSrc, gst_pulsesrc, GST_TYPE_AUDIO_SRC,
+ G_IMPLEMENT_INTERFACE (GST_TYPE_IMPLEMENTS_INTERFACE,
+ gst_pulsesrc_implements_interface_init);
+ G_IMPLEMENT_INTERFACE (GST_TYPE_MIXER, gst_pulsesrc_mixer_interface_init);
+ G_IMPLEMENT_INTERFACE (GST_TYPE_PROPERTY_PROBE,
+ gst_pulsesrc_property_probe_interface_init));
static void
gst_pulsesrc_class_init (GstPulseSrcClass * klass)
g_param_spec_boxed ("stream-properties", "stream properties",
"list of pulseaudio stream properties",
GST_TYPE_STRUCTURE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "PulseAudio Audio Source",
+ "Source/Audio",
+ "Captures audio from a PulseAudio server", "Lennart Poettering");
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&pad_template));
}
static void
-gst_pulsesrc_init (GstPulseSrc * pulsesrc, GstPulseSrcClass * klass)
+gst_pulsesrc_init (GstPulseSrc * pulsesrc)
{
pulsesrc->server = NULL;
pulsesrc->device = NULL;
gboolean result = FALSE;
/* first see what is possible on our source pad */
- thiscaps = gst_pad_get_caps_reffed (GST_BASE_SRC_PAD (basesrc));
+ thiscaps = gst_pad_get_caps (GST_BASE_SRC_PAD (basesrc), NULL);
GST_DEBUG_OBJECT (basesrc, "caps of src: %" GST_PTR_FORMAT, thiscaps);
/* nothing or anything is allowed, we're done */
if (thiscaps == NULL || gst_caps_is_any (thiscaps))
goto no_nego_needed;
/* get the peer caps */
- peercaps = gst_pad_peer_get_caps_reffed (GST_BASE_SRC_PAD (basesrc));
+ peercaps = gst_pad_peer_get_caps (GST_BASE_SRC_PAD (basesrc), NULL);
GST_DEBUG_OBJECT (basesrc, "caps of peer: %" GST_PTR_FORMAT, peercaps);
if (peercaps) {
/* get intersection */
{
GstPulseSrc *pulsesrc = GST_PULSESRC_CAST (userdata);
- pulsesrc->operation_success = ! !success;
+ pulsesrc->operation_success = !!success;
pa_threaded_mainloop_signal (pulsesrc->mainloop, 0);
}
GstBuffer *buf;
val = gst_tag_list_get_value_index (list, tag, i);
- buf = (GstBuffer *) gst_value_get_mini_object (val);
+ buf = (GstBuffer *) g_value_get_boxed (val);
if (buf && GST_BUFFER_CAPS (buf)) {
GstStructure *s;
GST_DEBUG ("image %u/%u", n + 1, num_tags);
val = gst_tag_list_get_value_index (list, tag, n);
- image = (GstBuffer *) gst_value_get_mini_object (val);
+ image = (GstBuffer *) g_value_get_boxed (val);
if (GST_IS_BUFFER (image) && GST_BUFFER_SIZE (image) > 0 &&
GST_BUFFER_CAPS (image) != NULL &&
static gboolean gst_alpha_start (GstBaseTransform * trans);
static gboolean gst_alpha_get_unit_size (GstBaseTransform * btrans,
- GstCaps * caps, guint * size);
+ GstCaps * caps, gsize * size);
static GstCaps *gst_alpha_transform_caps (GstBaseTransform * btrans,
GstPadDirection direction, GstCaps * caps);
static gboolean gst_alpha_set_caps (GstBaseTransform * btrans,
GValue * value, GParamSpec * pspec);
static void gst_alpha_finalize (GObject * object);
-GST_BOILERPLATE (GstAlpha, gst_alpha, GstVideoFilter, GST_TYPE_VIDEO_FILTER);
+#define gst_alpha_parent_class parent_class
+G_DEFINE_TYPE (GstAlpha, gst_alpha, GST_TYPE_VIDEO_FILTER);
#define GST_TYPE_ALPHA_METHOD (gst_alpha_method_get_type())
static GType
}
static void
-gst_alpha_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_set_details_simple (element_class, "Alpha filter",
- "Filter/Effect/Video",
- "Adds an alpha channel to video - uniform or via chroma-keying",
- "Wim Taymans <wim@fluendo.com>\n"
- "Edward Hervey <edward.hervey@collabora.co.uk>\n"
- "Jan Schmidt <thaytan@noraisin.net>");
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_alpha_sink_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_alpha_src_template));
-
- GST_DEBUG_CATEGORY_INIT (gst_alpha_debug, "alpha", 0,
- "alpha - Element for adding alpha channel to streams");
-}
-
-static void
gst_alpha_class_init (GstAlphaClass * klass)
{
GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
GstBaseTransformClass *btrans_class = (GstBaseTransformClass *) klass;
+ GST_DEBUG_CATEGORY_INIT (gst_alpha_debug, "alpha", 0,
+ "alpha - Element for adding alpha channel to streams");
+
gobject_class->set_property = gst_alpha_set_property;
gobject_class->get_property = gst_alpha_get_property;
gobject_class->finalize = gst_alpha_finalize;
DEFAULT_PREFER_PASSTHROUGH,
G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
+ gst_element_class_set_details_simple (gstelement_class, "Alpha filter",
+ "Filter/Effect/Video",
+ "Adds an alpha channel to video - uniform or via chroma-keying",
+ "Wim Taymans <wim.taymans@gmail.com>\n"
+ "Edward Hervey <edward.hervey@collabora.co.uk>\n"
+ "Jan Schmidt <thaytan@noraisin.net>");
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_alpha_sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_alpha_src_template));
+
btrans_class->start = GST_DEBUG_FUNCPTR (gst_alpha_start);
btrans_class->transform = GST_DEBUG_FUNCPTR (gst_alpha_transform);
btrans_class->before_transform =
}
static void
-gst_alpha_init (GstAlpha * alpha, GstAlphaClass * klass)
+gst_alpha_init (GstAlpha * alpha)
{
alpha->alpha = DEFAULT_ALPHA;
alpha->method = DEFAULT_METHOD;
case PROP_PREFER_PASSTHROUGH:{
gboolean prefer_passthrough = g_value_get_boolean (value);
- reconfigure = ((! !prefer_passthrough) != (! !alpha->prefer_passthrough))
+ reconfigure = ((!!prefer_passthrough) != (!!alpha->prefer_passthrough))
&& (alpha->method == ALPHA_METHOD_SET) && (alpha->alpha == 1.0);
alpha->prefer_passthrough = prefer_passthrough;
break;
static gboolean
gst_alpha_get_unit_size (GstBaseTransform * btrans,
- GstCaps * caps, guint * size)
+ GstCaps * caps, gsize * size)
{
GstVideoFormat format;
gint width, height;
{
GstAlpha *alpha = GST_ALPHA (btrans);
gint width, height;
+ guint8 *indata, *outdata;
+ gsize insize, outsize;
GST_ALPHA_LOCK (alpha);
width = alpha->width;
height = alpha->height;
- alpha->process (GST_BUFFER_DATA (in),
- GST_BUFFER_DATA (out), width, height, alpha);
+ indata = gst_buffer_map (in, &insize, NULL, GST_MAP_READ);
+ outdata = gst_buffer_map (out, &outsize, NULL, GST_MAP_WRITE);
+
+ alpha->process (indata, outdata, width, height, alpha);
+
+ gst_buffer_unmap (out, outdata, outsize);
+ gst_buffer_unmap (in, indata, insize);
GST_ALPHA_UNLOCK (alpha);
GST_VIDEO_CAPS_YUV ("AYUV"))
);
-GST_BOILERPLATE (GstAlphaColor, gst_alpha_color, GstVideoFilter,
- GST_TYPE_VIDEO_FILTER);
+G_DEFINE_TYPE (GstAlphaColor, gst_alpha_color, GST_TYPE_VIDEO_FILTER);
static GstCaps *gst_alpha_color_transform_caps (GstBaseTransform * btrans,
GstPadDirection direction, GstCaps * caps);
GstBuffer * inbuf);
static void
-gst_alpha_color_base_init (gpointer g_class)
+gst_alpha_color_class_init (GstAlphaColorClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
+ GstBaseTransformClass *gstbasetransform_class =
+ (GstBaseTransformClass *) klass;
- gst_element_class_set_details_simple (element_class, "Alpha color filter",
+ GST_DEBUG_CATEGORY_INIT (alpha_color_debug, "alphacolor", 0,
+ "ARGB<->AYUV colorspace conversion preserving the alpha channels");
+
+ gst_element_class_set_details_simple (gstelement_class, "Alpha color filter",
"Filter/Converter/Video",
"ARGB from/to AYUV colorspace conversion preserving the alpha channel",
"Wim Taymans <wim@fluendo.com>");
- gst_element_class_add_pad_template (element_class,
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&sink_template));
- gst_element_class_add_pad_template (element_class,
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&src_template));
-}
-
-static void
-gst_alpha_color_class_init (GstAlphaColorClass * klass)
-{
- GstBaseTransformClass *gstbasetransform_class =
- (GstBaseTransformClass *) klass;
gstbasetransform_class->transform_caps =
GST_DEBUG_FUNCPTR (gst_alpha_color_transform_caps);
gstbasetransform_class->transform_ip =
GST_DEBUG_FUNCPTR (gst_alpha_color_transform_ip);
- GST_DEBUG_CATEGORY_INIT (alpha_color_debug, "alphacolor", 0,
- "ARGB<->AYUV colorspace conversion preserving the alpha channels");
}
static void
-gst_alpha_color_init (GstAlphaColor * alpha, GstAlphaColorClass * g_class)
+gst_alpha_color_init (GstAlphaColor * alpha)
{
GstBaseTransform *btrans = GST_BASE_TRANSFORM (alpha);
gst_alpha_color_transform_ip (GstBaseTransform * btrans, GstBuffer * inbuf)
{
GstAlphaColor *alpha = GST_ALPHA_COLOR (btrans);
-
- if (G_UNLIKELY (GST_BUFFER_SIZE (inbuf) != 4 * alpha->width * alpha->height)) {
- GST_ERROR_OBJECT (alpha, "Invalid buffer size (was %u, expected %u)",
- GST_BUFFER_SIZE (inbuf), alpha->width * alpha->height);
- return GST_FLOW_ERROR;
- }
+ guint8 *data;
+ gsize size;
if (gst_base_transform_is_passthrough (btrans))
return GST_FLOW_OK;
return GST_FLOW_NOT_NEGOTIATED;
}
+ data = gst_buffer_map (inbuf, &size, NULL, GST_MAP_READWRITE);
+
+ if (G_UNLIKELY (size != 4 * alpha->width * alpha->height)) {
+ GST_ERROR_OBJECT (alpha, "Invalid buffer size (was %u, expected %u)",
+ size, alpha->width * alpha->height);
+ gst_buffer_unmap (inbuf, data, size);
+ return GST_FLOW_ERROR;
+ }
+
/* Transform in place */
- alpha->process (GST_BUFFER_DATA (inbuf), GST_BUFFER_SIZE (inbuf),
- alpha->matrix);
+ alpha->process (data, size, alpha->matrix);
+
+ gst_buffer_unmap (inbuf, data, size);
return GST_FLOW_OK;
}
GstBuffer * buffer, gboolean start_tag, guint * tag_size,
GstTagList ** tags);
-GST_BOILERPLATE (GstApeDemux, gst_ape_demux, GstTagDemux, GST_TYPE_TAG_DEMUX);
+G_DEFINE_TYPE (GstApeDemux, gst_ape_demux, GST_TYPE_TAG_DEMUX);
static void
-gst_ape_demux_base_init (gpointer klass)
+gst_ape_demux_class_init (GstApeDemuxClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstElementClass *element_class;
+ GstTagDemuxClass *tagdemux_class;
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&sink_factory));
+ GST_DEBUG_CATEGORY_INIT (apedemux_debug, "apedemux", 0,
+ "GStreamer APE tag demuxer");
+
+ tagdemux_class = GST_TAG_DEMUX_CLASS (klass);
+ element_class = GST_ELEMENT_CLASS (klass);
gst_element_class_set_details_simple (element_class, "APE tag demuxer",
"Codec/Demuxer/Metadata",
"Read and output APE tags while demuxing the contents",
"Tim-Philipp Müller <tim centricular net>");
- GST_DEBUG_CATEGORY_INIT (apedemux_debug, "apedemux", 0,
- "GStreamer APE tag demuxer");
-}
-
-static void
-gst_ape_demux_class_init (GstApeDemuxClass * klass)
-{
- GstTagDemuxClass *tagdemux_class;
-
- tagdemux_class = GST_TAG_DEMUX_CLASS (klass);
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&sink_factory));
tagdemux_class->identify_tag = GST_DEBUG_FUNCPTR (gst_ape_demux_identify_tag);
tagdemux_class->parse_tag = GST_DEBUG_FUNCPTR (gst_ape_demux_parse_tag);
}
static void
-gst_ape_demux_init (GstApeDemux * apedemux, GstApeDemuxClass * gclass)
+gst_ape_demux_init (GstApeDemux * apedemux)
{
/* nothing to do here */
}
gst_ape_demux_identify_tag (GstTagDemux * demux, GstBuffer * buffer,
gboolean start_tag, guint * tag_size)
{
- if (memcmp (GST_BUFFER_DATA (buffer), "APETAGEX", 8) != 0) {
+ guint8 *data;
+ gsize size;
+
+ data = gst_buffer_map (buffer, &size, NULL, GST_MAP_READ);
+
+ if (memcmp (data, "APETAGEX", 8) != 0) {
GST_DEBUG_OBJECT (demux, "No APETAGEX marker at %s - not an APE file",
(start_tag) ? "start" : "end");
+ gst_buffer_unmap (buffer, data, size);
return FALSE;
}
- *tag_size = GST_READ_UINT32_LE (GST_BUFFER_DATA (buffer) + 12);
+ *tag_size = GST_READ_UINT32_LE (data + 12);
/* size is without header, so add 32 to account for that */
*tag_size += 32;
+ gst_buffer_unmap (buffer, data, size);
+
return TRUE;
}
gst_ape_demux_parse_tag (GstTagDemux * demux, GstBuffer * buffer,
gboolean start_tag, guint * tag_size, GstTagList ** tags)
{
- const guint8 *data;
- const guint8 *footer;
+ guint8 *data_start, *data;
+ guint8 *footer;
gboolean have_header;
gboolean end_tag = !start_tag;
GstCaps *sink_caps;
guint version, footer_size;
+ gsize size;
+
+ data_start = data = gst_buffer_map (buffer, &size, NULL, GST_MAP_READ);
- GST_LOG_OBJECT (demux, "Parsing buffer of size %u", GST_BUFFER_SIZE (buffer));
+ GST_LOG_OBJECT (demux, "Parsing buffer of size %" G_GSIZE_FORMAT, size);
- data = GST_BUFFER_DATA (buffer);
- footer = GST_BUFFER_DATA (buffer) + GST_BUFFER_SIZE (buffer) - 32;
+ footer = data + size - 32;
GST_LOG_OBJECT (demux, "Checking for footer at offset 0x%04x",
(guint) (footer - data));
GST_TAG_CONTAINER_FORMAT, sink_caps);
gst_caps_unref (sink_caps);
+ gst_buffer_unmap (buffer, data_start, size);
+
return GST_TAG_DEMUX_RESULT_OK;
}
" rate=(int)[1,MAX]," \
" channels=(int)[1,MAX]"
-#define DEBUG_INIT(bla) \
- GST_DEBUG_CATEGORY_INIT (gst_audio_amplify_debug, "audioamplify", 0, "audioamplify element");
-
-GST_BOILERPLATE_FULL (GstAudioAmplify, gst_audio_amplify, GstAudioFilter,
- GST_TYPE_AUDIO_FILTER, DEBUG_INIT);
+G_DEFINE_TYPE (GstAudioAmplify, gst_audio_amplify, GST_TYPE_AUDIO_FILTER);
static gboolean gst_audio_amplify_set_process_function (GstAudioAmplify *
filter, gint clipping, gint format, gint width);
/* GObject vmethod implementations */
static void
-gst_audio_amplify_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
- GstCaps *caps;
-
- gst_element_class_set_details_simple (element_class, "Audio amplifier",
- "Filter/Effect/Audio",
- "Amplifies an audio stream by a given factor",
- "Sebastian Dröge <slomo@circular-chaos.org>");
-
- caps = gst_caps_from_string (ALLOWED_CAPS);
- gst_audio_filter_class_add_pad_templates (GST_AUDIO_FILTER_CLASS (klass),
- caps);
- gst_caps_unref (caps);
-}
-
-static void
gst_audio_amplify_class_init (GstAudioAmplifyClass * klass)
{
GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstCaps *caps;
+
+ GST_DEBUG_CATEGORY_INIT (gst_audio_amplify_debug, "audioamplify", 0,
+ "audioamplify element");
gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+
gobject_class->set_property = gst_audio_amplify_set_property;
gobject_class->get_property = gst_audio_amplify_get_property;
GST_TYPE_AUDIO_AMPLIFY_CLIPPING_METHOD, METHOD_CLIP,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ gst_element_class_set_details_simple (gstelement_class, "Audio amplifier",
+ "Filter/Effect/Audio",
+ "Amplifies an audio stream by a given factor",
+ "Sebastian Dröge <slomo@circular-chaos.org>");
+
+ caps = gst_caps_from_string (ALLOWED_CAPS);
+ gst_audio_filter_class_add_pad_templates (GST_AUDIO_FILTER_CLASS (klass),
+ caps);
+ gst_caps_unref (caps);
+
GST_AUDIO_FILTER_CLASS (klass)->setup =
GST_DEBUG_FUNCPTR (gst_audio_amplify_setup);
GST_BASE_TRANSFORM_CLASS (klass)->transform_ip =
}
static void
-gst_audio_amplify_init (GstAudioAmplify * filter, GstAudioAmplifyClass * klass)
+gst_audio_amplify_init (GstAudioAmplify * filter)
{
filter->amplification = 1.0;
gst_audio_amplify_set_process_function (filter, METHOD_CLIP,
GstAudioAmplify *filter = GST_AUDIO_AMPLIFY (base);
guint num_samples;
GstClockTime timestamp, stream_time;
+ guint8 *data;
+ gsize size;
timestamp = GST_BUFFER_TIMESTAMP (buf);
stream_time =
if (GST_CLOCK_TIME_IS_VALID (stream_time))
gst_object_sync_values (G_OBJECT (filter), stream_time);
- num_samples =
- GST_BUFFER_SIZE (buf) / (GST_AUDIO_FILTER (filter)->format.width / 8);
-
if (gst_base_transform_is_passthrough (base) ||
G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_GAP)))
return GST_FLOW_OK;
- filter->process (filter, GST_BUFFER_DATA (buf), num_samples);
+ data = gst_buffer_map (buf, &size, NULL, GST_MAP_READWRITE);
+ num_samples = size / (GST_AUDIO_FILTER (filter)->format.width / 8);
+
+ filter->process (filter, data, num_samples);
+
+ gst_buffer_unmap (buf, data, size);
return GST_FLOW_OK;
}
PROP_POLES
};
-#define DEBUG_INIT(bla) \
- GST_DEBUG_CATEGORY_INIT (gst_audio_cheb_band_debug, "audiochebband", 0, "audiochebband element");
-
-GST_BOILERPLATE_FULL (GstAudioChebBand, gst_audio_cheb_band,
- GstAudioFXBaseIIRFilter, GST_TYPE_AUDIO_FX_BASE_IIR_FILTER, DEBUG_INIT);
+#define gst_audio_cheb_band_parent_class parent_class
+G_DEFINE_TYPE (GstAudioChebBand, gst_audio_cheb_band,
+ GST_TYPE_AUDIO_FX_BASE_IIR_FILTER);
static void gst_audio_cheb_band_set_property (GObject * object,
guint prop_id, const GValue * value, GParamSpec * pspec);
/* GObject vmethod implementations */
static void
-gst_audio_cheb_band_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_set_details_simple (element_class,
- "Band pass & band reject filter", "Filter/Effect/Audio",
- "Chebyshev band pass and band reject filter",
- "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
-}
-
-static void
gst_audio_cheb_band_class_init (GstAudioChebBandClass * klass)
{
GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
GstAudioFilterClass *filter_class = (GstAudioFilterClass *) klass;
+ GST_DEBUG_CATEGORY_INIT (gst_audio_cheb_band_debug, "audiochebband", 0,
+ "audiochebband element");
+
gobject_class->set_property = gst_audio_cheb_band_set_property;
gobject_class->get_property = gst_audio_cheb_band_get_property;
gobject_class->finalize = gst_audio_cheb_band_finalize;
4, 32, 4,
G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
+ gst_element_class_set_details_simple (gstelement_class,
+ "Band pass & band reject filter", "Filter/Effect/Audio",
+ "Chebyshev band pass and band reject filter",
+ "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
+
filter_class->setup = GST_DEBUG_FUNCPTR (gst_audio_cheb_band_setup);
}
static void
-gst_audio_cheb_band_init (GstAudioChebBand * filter,
- GstAudioChebBandClass * klass)
+gst_audio_cheb_band_init (GstAudioChebBand * filter)
{
filter->lower_frequency = filter->upper_frequency = 0.0;
filter->mode = MODE_BAND_PASS;
PROP_POLES
};
-#define DEBUG_INIT(bla) \
- GST_DEBUG_CATEGORY_INIT (gst_audio_cheb_limit_debug, "audiocheblimit", 0, "audiocheblimit element");
-
-GST_BOILERPLATE_FULL (GstAudioChebLimit,
- gst_audio_cheb_limit, GstAudioFXBaseIIRFilter,
- GST_TYPE_AUDIO_FX_BASE_IIR_FILTER, DEBUG_INIT);
+#define gst_audio_cheb_limit_parent_class parent_class
+G_DEFINE_TYPE (GstAudioChebLimit,
+ gst_audio_cheb_limit, GST_TYPE_AUDIO_FX_BASE_IIR_FILTER);
static void gst_audio_cheb_limit_set_property (GObject * object,
guint prop_id, const GValue * value, GParamSpec * pspec);
/* GObject vmethod implementations */
static void
-gst_audio_cheb_limit_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_set_details_simple (element_class,
- "Low pass & high pass filter",
- "Filter/Effect/Audio",
- "Chebyshev low pass and high pass filter",
- "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
-}
-
-static void
gst_audio_cheb_limit_class_init (GstAudioChebLimitClass * klass)
{
GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
GstAudioFilterClass *filter_class = (GstAudioFilterClass *) klass;
+ GST_DEBUG_CATEGORY_INIT (gst_audio_cheb_limit_debug, "audiocheblimit", 0,
+ "audiocheblimit element");
+
gobject_class->set_property = gst_audio_cheb_limit_set_property;
gobject_class->get_property = gst_audio_cheb_limit_get_property;
gobject_class->finalize = gst_audio_cheb_limit_finalize;
2, 32, 4,
G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
+ gst_element_class_set_details_simple (gstelement_class,
+ "Low pass & high pass filter",
+ "Filter/Effect/Audio",
+ "Chebyshev low pass and high pass filter",
+ "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
+
filter_class->setup = GST_DEBUG_FUNCPTR (gst_audio_cheb_limit_setup);
}
static void
-gst_audio_cheb_limit_init (GstAudioChebLimit * filter,
- GstAudioChebLimitClass * klass)
+gst_audio_cheb_limit_init (GstAudioChebLimit * filter)
{
filter->cutoff = 0.0;
filter->mode = MODE_LOW_PASS;
" rate=(int)[1,MAX]," \
" channels=(int)[1,MAX]"
-#define DEBUG_INIT(bla) \
- GST_DEBUG_CATEGORY_INIT (gst_audio_dynamic_debug, "audiodynamic", 0, "audiodynamic element");
-
-GST_BOILERPLATE_FULL (GstAudioDynamic, gst_audio_dynamic, GstAudioFilter,
- GST_TYPE_AUDIO_FILTER, DEBUG_INIT);
+G_DEFINE_TYPE (GstAudioDynamic, gst_audio_dynamic, GST_TYPE_AUDIO_FILTER);
static void gst_audio_dynamic_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
/* GObject vmethod implementations */
static void
-gst_audio_dynamic_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
- GstCaps *caps;
-
- gst_element_class_set_details_simple (element_class,
- "Dynamic range controller", "Filter/Effect/Audio",
- "Compressor and Expander", "Sebastian Dröge <slomo@circular-chaos.org>");
-
- caps = gst_caps_from_string (ALLOWED_CAPS);
- gst_audio_filter_class_add_pad_templates (GST_AUDIO_FILTER_CLASS (klass),
- caps);
- gst_caps_unref (caps);
-}
-
-static void
gst_audio_dynamic_class_init (GstAudioDynamicClass * klass)
{
GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstCaps *caps;
+
+ GST_DEBUG_CATEGORY_INIT (gst_audio_dynamic_debug, "audiodynamic", 0,
+ "audiodynamic element");
gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+
gobject_class->set_property = gst_audio_dynamic_set_property;
gobject_class->get_property = gst_audio_dynamic_get_property;
1.0,
G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
+ gst_element_class_set_details_simple (gstelement_class,
+ "Dynamic range controller", "Filter/Effect/Audio",
+ "Compressor and Expander", "Sebastian Dröge <slomo@circular-chaos.org>");
+
+ caps = gst_caps_from_string (ALLOWED_CAPS);
+ gst_audio_filter_class_add_pad_templates (GST_AUDIO_FILTER_CLASS (klass),
+ caps);
+ gst_caps_unref (caps);
+
GST_AUDIO_FILTER_CLASS (klass)->setup =
GST_DEBUG_FUNCPTR (gst_audio_dynamic_setup);
GST_BASE_TRANSFORM_CLASS (klass)->transform_ip =
}
static void
-gst_audio_dynamic_init (GstAudioDynamic * filter, GstAudioDynamicClass * klass)
+gst_audio_dynamic_init (GstAudioDynamic * filter)
{
filter->ratio = 1.0;
filter->threshold = 0.0;
GstAudioDynamic *filter = GST_AUDIO_DYNAMIC (base);
guint num_samples;
GstClockTime timestamp, stream_time;
+ guint8 *data;
+ gsize size;
timestamp = GST_BUFFER_TIMESTAMP (buf);
stream_time =
if (GST_CLOCK_TIME_IS_VALID (stream_time))
gst_object_sync_values (G_OBJECT (filter), stream_time);
- num_samples =
- GST_BUFFER_SIZE (buf) / (GST_AUDIO_FILTER (filter)->format.width / 8);
-
if (gst_base_transform_is_passthrough (base) ||
G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_GAP)))
return GST_FLOW_OK;
- filter->process (filter, GST_BUFFER_DATA (buf), num_samples);
+ data = gst_buffer_map (buf, &size, NULL, GST_MAP_READWRITE);
+ num_samples = size / (GST_AUDIO_FILTER (filter)->format.width / 8);
+
+ filter->process (filter, data, num_samples);
+
+ gst_buffer_unmap (buf, data, size);
return GST_FLOW_OK;
}
" rate=(int)[1,MAX]," \
" channels=(int)[1,MAX]"
-#define DEBUG_INIT(bla) \
- GST_DEBUG_CATEGORY_INIT (gst_audio_echo_debug, "audioecho", 0, "audioecho element");
-
-GST_BOILERPLATE_FULL (GstAudioEcho, gst_audio_echo, GstAudioFilter,
- GST_TYPE_AUDIO_FILTER, DEBUG_INIT);
+#define gst_audio_echo_parent_class parent_class
+G_DEFINE_TYPE (GstAudioEcho, gst_audio_echo, GST_TYPE_AUDIO_FILTER);
static void gst_audio_echo_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
/* GObject vmethod implementations */
static void
-gst_audio_echo_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
- GstCaps *caps;
-
- gst_element_class_set_details_simple (element_class, "Audio echo",
- "Filter/Effect/Audio",
- "Adds an echo or reverb effect to an audio stream",
- "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
-
- caps = gst_caps_from_string (ALLOWED_CAPS);
- gst_audio_filter_class_add_pad_templates (GST_AUDIO_FILTER_CLASS (klass),
- caps);
- gst_caps_unref (caps);
-}
-
-static void
gst_audio_echo_class_init (GstAudioEchoClass * klass)
{
GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
GstBaseTransformClass *basetransform_class = (GstBaseTransformClass *) klass;
GstAudioFilterClass *audioself_class = (GstAudioFilterClass *) klass;
+ GstCaps *caps;
+
+ GST_DEBUG_CATEGORY_INIT (gst_audio_echo_debug, "audioecho", 0,
+ "audioecho element");
gobject_class->set_property = gst_audio_echo_set_property;
gobject_class->get_property = gst_audio_echo_get_property;
0.0, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS
| GST_PARAM_CONTROLLABLE));
+ gst_element_class_set_details_simple (gstelement_class, "Audio echo",
+ "Filter/Effect/Audio",
+ "Adds an echo or reverb effect to an audio stream",
+ "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
+
+ caps = gst_caps_from_string (ALLOWED_CAPS);
+ gst_audio_filter_class_add_pad_templates (GST_AUDIO_FILTER_CLASS (klass),
+ caps);
+ gst_caps_unref (caps);
+
audioself_class->setup = GST_DEBUG_FUNCPTR (gst_audio_echo_setup);
basetransform_class->transform_ip =
GST_DEBUG_FUNCPTR (gst_audio_echo_transform_ip);
}
static void
-gst_audio_echo_init (GstAudioEcho * self, GstAudioEchoClass * klass)
+gst_audio_echo_init (GstAudioEcho * self)
{
self->delay = 1;
self->max_delay = 1;
GstAudioEcho *self = GST_AUDIO_ECHO (base);
guint num_samples;
GstClockTime timestamp, stream_time;
+ guint8 *data;
+ gsize size;
timestamp = GST_BUFFER_TIMESTAMP (buf);
stream_time =
if (GST_CLOCK_TIME_IS_VALID (stream_time))
gst_object_sync_values (G_OBJECT (self), stream_time);
- num_samples =
- GST_BUFFER_SIZE (buf) / (GST_AUDIO_FILTER (self)->format.width / 8);
-
if (self->buffer == NULL) {
guint width, rate, channels;
}
}
- self->process (self, GST_BUFFER_DATA (buf), num_samples);
+ data = gst_buffer_map (buf, &size, NULL, GST_MAP_READWRITE);
+ num_samples = size / (GST_AUDIO_FILTER (self)->format.width / 8);
+
+ self->process (self, data, num_samples);
+
+ gst_buffer_unmap (buf, data, size);
return GST_FLOW_OK;
}
static guint gst_audio_fir_filter_signals[LAST_SIGNAL] = { 0, };
-#define DEBUG_INIT(bla) \
- GST_DEBUG_CATEGORY_INIT (gst_audio_fir_filter_debug, "audiofirfilter", 0, \
- "Generic audio FIR filter plugin");
-
-GST_BOILERPLATE_FULL (GstAudioFIRFilter, gst_audio_fir_filter, GstAudioFilter,
- GST_TYPE_AUDIO_FX_BASE_FIR_FILTER, DEBUG_INIT);
+#define gst_audio_fir_filter_parent_class parent_class
+G_DEFINE_TYPE (GstAudioFIRFilter, gst_audio_fir_filter,
+ GST_TYPE_AUDIO_FX_BASE_FIR_FILTER);
static void gst_audio_fir_filter_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static gboolean gst_audio_fir_filter_setup (GstAudioFilter * base,
GstRingBufferSpec * format);
-/* Element class */
-static void
-gst_audio_fir_filter_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_set_details_simple (element_class,
- "Audio FIR filter", "Filter/Effect/Audio",
- "Generic audio FIR filter with custom filter kernel",
- "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
-}
static void
gst_audio_fir_filter_class_init (GstAudioFIRFilterClass * klass)
{
GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
GstAudioFilterClass *filter_class = (GstAudioFilterClass *) klass;
+ GST_DEBUG_CATEGORY_INIT (gst_audio_fir_filter_debug, "audiofirfilter", 0,
+ "Generic audio FIR filter plugin");
+
gobject_class->set_property = gst_audio_fir_filter_set_property;
gobject_class->get_property = gst_audio_fir_filter_get_property;
gobject_class->finalize = gst_audio_fir_filter_finalize;
g_signal_new ("rate-changed", G_TYPE_FROM_CLASS (klass),
G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstAudioFIRFilterClass, rate_changed),
NULL, NULL, gst_marshal_VOID__INT, G_TYPE_NONE, 1, G_TYPE_INT);
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "Audio FIR filter", "Filter/Effect/Audio",
+ "Generic audio FIR filter with custom filter kernel",
+ "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
}
static void
}
static void
-gst_audio_fir_filter_init (GstAudioFIRFilter * self,
- GstAudioFIRFilterClass * g_class)
+gst_audio_fir_filter_init (GstAudioFIRFilter * self)
{
GValue v = { 0, };
GValueArray *va;
" rate = (int) [ 1, MAX ], " \
" channels = (int) [ 1, MAX ]"
-#define DEBUG_INIT(bla) \
- GST_DEBUG_CATEGORY_INIT (gst_audio_fx_base_fir_filter_debug, "audiofxbasefirfilter", 0, \
- "FIR filter base class");
-
/* Switch from time-domain to FFT convolution for kernels >= this */
#define FFT_THRESHOLD 32
#define DEFAULT_LOW_LATENCY FALSE
#define DEFAULT_DRAIN_ON_CHANGES TRUE
-GST_BOILERPLATE_FULL (GstAudioFXBaseFIRFilter, gst_audio_fx_base_fir_filter,
- GstAudioFilter, GST_TYPE_AUDIO_FILTER, DEBUG_INIT);
+#define gst_audio_fx_base_fir_filter_parent_class parent_class
+G_DEFINE_TYPE (GstAudioFXBaseFIRFilter, gst_audio_fx_base_fir_filter,
+ GST_TYPE_AUDIO_FILTER);
static GstFlowReturn gst_audio_fx_base_fir_filter_transform (GstBaseTransform *
base, GstBuffer * inbuf, GstBuffer * outbuf);
static gboolean gst_audio_fx_base_fir_filter_event (GstBaseTransform * base,
GstEvent * event);
static gboolean gst_audio_fx_base_fir_filter_transform_size (GstBaseTransform *
- base, GstPadDirection direction, GstCaps * caps, guint size,
- GstCaps * othercaps, guint * othersize);
+ base, GstPadDirection direction, GstCaps * caps, gsize size,
+ GstCaps * othercaps, gsize * othersize);
static gboolean gst_audio_fx_base_fir_filter_setup (GstAudioFilter * base,
GstRingBufferSpec * format);
}
static void
-gst_audio_fx_base_fir_filter_base_init (gpointer g_class)
-{
- GstCaps *caps;
-
- caps = gst_caps_from_string (ALLOWED_CAPS);
- gst_audio_filter_class_add_pad_templates (GST_AUDIO_FILTER_CLASS (g_class),
- caps);
- gst_caps_unref (caps);
-}
-
-static void
gst_audio_fx_base_fir_filter_class_init (GstAudioFXBaseFIRFilterClass * klass)
{
GObjectClass *gobject_class = (GObjectClass *) klass;
GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass;
GstAudioFilterClass *filter_class = (GstAudioFilterClass *) klass;
+ GstCaps *caps;
+
+ GST_DEBUG_CATEGORY_INIT (gst_audio_fx_base_fir_filter_debug,
+ "audiofxbasefirfilter", 0, "FIR filter base class");
gobject_class->dispose = gst_audio_fx_base_fir_filter_dispose;
gobject_class->set_property = gst_audio_fx_base_fir_filter_set_property;
DEFAULT_DRAIN_ON_CHANGES,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ caps = gst_caps_from_string (ALLOWED_CAPS);
+ gst_audio_filter_class_add_pad_templates (GST_AUDIO_FILTER_CLASS (klass),
+ caps);
+ gst_caps_unref (caps);
+
trans_class->transform =
GST_DEBUG_FUNCPTR (gst_audio_fx_base_fir_filter_transform);
trans_class->start = GST_DEBUG_FUNCPTR (gst_audio_fx_base_fir_filter_start);
}
static void
-gst_audio_fx_base_fir_filter_init (GstAudioFXBaseFIRFilter * self,
- GstAudioFXBaseFIRFilterClass * g_class)
+gst_audio_fx_base_fir_filter_init (GstAudioFXBaseFIRFilter * self)
{
self->kernel = NULL;
self->buffer = NULL;
gint channels = GST_AUDIO_FILTER_CAST (self)->format.channels;
gint width = GST_AUDIO_FILTER_CAST (self)->format.width / 8;
gint outsize, outsamples;
- guint8 *in, *out;
+ guint8 *in, *out, *data;
+ gsize size;
if (channels == 0 || rate == 0 || self->nsamples_in == 0) {
self->buffer_fill = 0;
g_free (out);
}
- res = gst_pad_alloc_buffer (GST_BASE_TRANSFORM_CAST (self)->srcpad,
- GST_BUFFER_OFFSET_NONE, outsize,
- GST_PAD_CAPS (GST_BASE_TRANSFORM_CAST (self)->srcpad), &outbuf);
-
- if (G_UNLIKELY (res != GST_FLOW_OK)) {
- GST_WARNING_OBJECT (self, "failed allocating buffer of %d bytes",
- outsize);
- self->buffer_fill = 0;
- return;
- }
+ outbuf = gst_buffer_new_and_alloc (outsize);
+ gst_buffer_set_caps (outbuf,
+ GST_PAD_CAPS (GST_BASE_TRANSFORM_CAST (self)->srcpad));
/* Convolve the residue with zeros to get the actual remaining data */
in = g_new0 (guint8, outsize);
- self->nsamples_out +=
- self->process (self, in, GST_BUFFER_DATA (outbuf), outsamples);
+ data = gst_buffer_map (outbuf, &size, NULL, GST_MAP_READWRITE);
+ self->nsamples_out += self->process (self, in, data, outsamples);
+ gst_buffer_unmap (outbuf, data, size);
+
g_free (in);
} else {
guint gensamples = 0;
- guint8 *data;
outbuf = gst_buffer_new_and_alloc (outsize);
- data = GST_BUFFER_DATA (outbuf);
+ data = gst_buffer_map (outbuf, &size, NULL, GST_MAP_READWRITE);
while (gensamples < outsamples) {
guint step_insamples = self->block_length - self->buffer_fill;
g_free (out);
}
self->nsamples_out += gensamples;
+
+ gst_buffer_unmap (outbuf, data, size);
}
/* Set timestamp, offset, etc from the values we
GST_DEBUG_OBJECT (self, "Pushing residue buffer of size %d with timestamp: %"
GST_TIME_FORMAT ", duration: %" GST_TIME_FORMAT ", offset: %"
G_GUINT64_FORMAT ", offset_end: %" G_GUINT64_FORMAT ", nsamples_out: %d",
- GST_BUFFER_SIZE (outbuf), GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)),
+ gst_buffer_get_size (outbuf),
+ GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)),
GST_TIME_ARGS (GST_BUFFER_DURATION (outbuf)), GST_BUFFER_OFFSET (outbuf),
GST_BUFFER_OFFSET_END (outbuf), outsamples);
static gboolean
gst_audio_fx_base_fir_filter_transform_size (GstBaseTransform * base,
- GstPadDirection direction, GstCaps * caps, guint size, GstCaps * othercaps,
- guint * othersize)
+ GstPadDirection direction, GstCaps * caps, gsize size, GstCaps * othercaps,
+ gsize * othersize)
{
GstAudioFXBaseFIRFilter *self = GST_AUDIO_FX_BASE_FIR_FILTER (base);
guint blocklen;
gint channels = GST_AUDIO_FILTER_CAST (self)->format.channels;
gint rate = GST_AUDIO_FILTER_CAST (self)->format.rate;
gint width = GST_AUDIO_FILTER_CAST (self)->format.width / 8;
- guint input_samples = (GST_BUFFER_SIZE (inbuf) / width) / channels;
- guint output_samples = (GST_BUFFER_SIZE (outbuf) / width) / channels;
+ guint8 *indata, *outdata;
+ gsize insize, outsize;
+ guint input_samples;
+ guint output_samples;
guint generated_samples;
guint64 output_offset;
gint64 diff = 0;
self->start_off = GST_BUFFER_OFFSET (inbuf);
}
+ indata = gst_buffer_map (inbuf, &insize, NULL, GST_MAP_READ);
+ outdata = gst_buffer_map (outbuf, &outsize, NULL, GST_MAP_WRITE);
+
+ input_samples = (insize / width) / channels;
+ output_samples = (outsize / width) / channels;
+
self->nsamples_in += input_samples;
- generated_samples =
- self->process (self, GST_BUFFER_DATA (inbuf), GST_BUFFER_DATA (outbuf),
- input_samples);
+ generated_samples = self->process (self, indata, outdata, input_samples);
+
+ gst_buffer_unmap (inbuf, indata, insize);
+ gst_buffer_unmap (outbuf, outdata, outsize);
g_assert (generated_samples <= output_samples);
self->nsamples_out += generated_samples;
gint64 tmp = diff;
diff = generated_samples - diff;
generated_samples = tmp;
- GST_BUFFER_DATA (outbuf) += diff * width * channels;
}
- GST_BUFFER_SIZE (outbuf) = generated_samples * width * channels;
+ gst_buffer_resize (outbuf, diff * width * channels,
+ generated_samples * width * channels);
output_offset = self->nsamples_out - self->latency - generated_samples;
GST_BUFFER_TIMESTAMP (outbuf) =
GST_DEBUG_OBJECT (self, "Pushing buffer of size %d with timestamp: %"
GST_TIME_FORMAT ", duration: %" GST_TIME_FORMAT ", offset: %"
G_GUINT64_FORMAT ", offset_end: %" G_GUINT64_FORMAT ", nsamples_out: %d",
- GST_BUFFER_SIZE (outbuf), GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)),
+ gst_buffer_get_size (outbuf),
+ GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)),
GST_TIME_ARGS (GST_BUFFER_DURATION (outbuf)), GST_BUFFER_OFFSET (outbuf),
GST_BUFFER_OFFSET_END (outbuf), generated_samples);
" rate = (int) [ 1, MAX ]," \
" channels = (int) [ 1, MAX ]"
-#define DEBUG_INIT(bla) \
- GST_DEBUG_CATEGORY_INIT (gst_audio_fx_base_iir_filter_debug, "audiofxbaseiirfilter", 0, "Audio IIR Filter Base Class");
-
-GST_BOILERPLATE_FULL (GstAudioFXBaseIIRFilter,
- gst_audio_fx_base_iir_filter, GstAudioFilter, GST_TYPE_AUDIO_FILTER,
- DEBUG_INIT);
+#define gst_audio_fx_base_iir_filter_parent_class parent_class
+G_DEFINE_TYPE (GstAudioFXBaseIIRFilter,
+ gst_audio_fx_base_iir_filter, GST_TYPE_AUDIO_FILTER);
static gboolean gst_audio_fx_base_iir_filter_setup (GstAudioFilter * filter,
GstRingBufferSpec * format);
/* GObject vmethod implementations */
static void
-gst_audio_fx_base_iir_filter_base_init (gpointer klass)
-{
- GstCaps *caps;
-
- caps = gst_caps_from_string (ALLOWED_CAPS);
- gst_audio_filter_class_add_pad_templates (GST_AUDIO_FILTER_CLASS (klass),
- caps);
- gst_caps_unref (caps);
-}
-
-static void
gst_audio_fx_base_iir_filter_dispose (GObject * object)
{
GstAudioFXBaseIIRFilter *filter = GST_AUDIO_FX_BASE_IIR_FILTER (object);
GObjectClass *gobject_class = (GObjectClass *) klass;
GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass;
GstAudioFilterClass *filter_class = (GstAudioFilterClass *) klass;
+ GstCaps *caps;
+
+ GST_DEBUG_CATEGORY_INIT (gst_audio_fx_base_iir_filter_debug,
+ "audiofxbaseiirfilter", 0, "Audio IIR Filter Base Class");
gobject_class->dispose = gst_audio_fx_base_iir_filter_dispose;
+ caps = gst_caps_from_string (ALLOWED_CAPS);
+ gst_audio_filter_class_add_pad_templates (GST_AUDIO_FILTER_CLASS (klass),
+ caps);
+ gst_caps_unref (caps);
+
filter_class->setup = GST_DEBUG_FUNCPTR (gst_audio_fx_base_iir_filter_setup);
trans_class->transform_ip =
}
static void
-gst_audio_fx_base_iir_filter_init (GstAudioFXBaseIIRFilter * filter,
- GstAudioFXBaseIIRFilterClass * klass)
+gst_audio_fx_base_iir_filter_init (GstAudioFXBaseIIRFilter * filter)
{
gst_base_transform_set_in_place (GST_BASE_TRANSFORM (filter), TRUE);
GstAudioFXBaseIIRFilter *filter = GST_AUDIO_FX_BASE_IIR_FILTER (base);
guint num_samples;
GstClockTime timestamp, stream_time;
+ guint8 *data;
+ gsize size;
timestamp = GST_BUFFER_TIMESTAMP (buf);
stream_time =
if (GST_CLOCK_TIME_IS_VALID (stream_time))
gst_object_sync_values (G_OBJECT (filter), stream_time);
- num_samples =
- GST_BUFFER_SIZE (buf) / (GST_AUDIO_FILTER (filter)->format.width / 8);
-
if (gst_base_transform_is_passthrough (base))
return GST_FLOW_OK;
g_return_val_if_fail (filter->a != NULL, GST_FLOW_ERROR);
- filter->process (filter, GST_BUFFER_DATA (buf), num_samples);
+ data = gst_buffer_map (buf, &size, NULL, GST_MAP_READWRITE);
+ num_samples = size / (GST_AUDIO_FILTER (filter)->format.width / 8);
+
+ filter->process (filter, data, num_samples);
+
+ gst_buffer_unmap (buf, data, size);
return GST_FLOW_OK;
}
static guint gst_audio_iir_filter_signals[LAST_SIGNAL] = { 0, };
-#define DEBUG_INIT(bla) \
- GST_DEBUG_CATEGORY_INIT (gst_audio_iir_filter_debug, "audioiirfilter", 0, \
- "Generic audio IIR filter plugin");
-
-GST_BOILERPLATE_FULL (GstAudioIIRFilter, gst_audio_iir_filter, GstAudioFilter,
- GST_TYPE_AUDIO_FX_BASE_IIR_FILTER, DEBUG_INIT);
+#define gst_audio_iir_filter_parent_class parent_class
+G_DEFINE_TYPE (GstAudioIIRFilter, gst_audio_iir_filter,
+ GST_TYPE_AUDIO_FX_BASE_IIR_FILTER);
static void gst_audio_iir_filter_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static gboolean gst_audio_iir_filter_setup (GstAudioFilter * base,
GstRingBufferSpec * format);
-/* Element class */
-static void
-gst_audio_iir_filter_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_set_details_simple (element_class,
- "Audio IIR filter", "Filter/Effect/Audio",
- "Generic audio IIR filter with custom filter kernel",
- "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
-}
-
static void
gst_audio_iir_filter_class_init (GstAudioIIRFilterClass * klass)
{
GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
GstAudioFilterClass *filter_class = (GstAudioFilterClass *) klass;
+ GST_DEBUG_CATEGORY_INIT (gst_audio_iir_filter_debug, "audioiirfilter", 0,
+ "Generic audio IIR filter plugin");
+
gobject_class->set_property = gst_audio_iir_filter_set_property;
gobject_class->get_property = gst_audio_iir_filter_get_property;
gobject_class->finalize = gst_audio_iir_filter_finalize;
g_signal_new ("rate-changed", G_TYPE_FROM_CLASS (klass),
G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstAudioIIRFilterClass, rate_changed),
NULL, NULL, gst_marshal_VOID__INT, G_TYPE_NONE, 1, G_TYPE_INT);
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "Audio IIR filter", "Filter/Effect/Audio",
+ "Generic audio IIR filter with custom filter kernel",
+ "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
}
static void
}
static void
-gst_audio_iir_filter_init (GstAudioIIRFilter * self,
- GstAudioIIRFilterClass * g_class)
+gst_audio_iir_filter_init (GstAudioIIRFilter * self)
{
GValue v = { 0, };
GValueArray *a, *b;
" rate=(int)[1,MAX]," \
" channels=(int)[1,MAX]"
-#define DEBUG_INIT(bla) \
- GST_DEBUG_CATEGORY_INIT (gst_audio_invert_debug, "audioinvert", 0, "audioinvert element");
-
-GST_BOILERPLATE_FULL (GstAudioInvert, gst_audio_invert, GstAudioFilter,
- GST_TYPE_AUDIO_FILTER, DEBUG_INIT);
+G_DEFINE_TYPE (GstAudioInvert, gst_audio_invert, GST_TYPE_AUDIO_FILTER);
static void gst_audio_invert_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
/* GObject vmethod implementations */
static void
-gst_audio_invert_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
- GstCaps *caps;
-
- gst_element_class_set_details_simple (element_class, "Audio inversion",
- "Filter/Effect/Audio",
- "Swaps upper and lower half of audio samples",
- "Sebastian Dröge <slomo@circular-chaos.org>");
-
- caps = gst_caps_from_string (ALLOWED_CAPS);
- gst_audio_filter_class_add_pad_templates (GST_AUDIO_FILTER_CLASS (klass),
- caps);
- gst_caps_unref (caps);
-}
-
-static void
gst_audio_invert_class_init (GstAudioInvertClass * klass)
{
GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstCaps *caps;
+
+ GST_DEBUG_CATEGORY_INIT (gst_audio_invert_debug, "audioinvert", 0,
+ "audioinvert element");
gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+
gobject_class->set_property = gst_audio_invert_set_property;
gobject_class->get_property = gst_audio_invert_get_property;
0.0,
G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
+ gst_element_class_set_details_simple (gstelement_class, "Audio inversion",
+ "Filter/Effect/Audio",
+ "Swaps upper and lower half of audio samples",
+ "Sebastian Dröge <slomo@circular-chaos.org>");
+
+ caps = gst_caps_from_string (ALLOWED_CAPS);
+ gst_audio_filter_class_add_pad_templates (GST_AUDIO_FILTER_CLASS (klass),
+ caps);
+ gst_caps_unref (caps);
+
GST_AUDIO_FILTER_CLASS (klass)->setup =
GST_DEBUG_FUNCPTR (gst_audio_invert_setup);
GST_BASE_TRANSFORM_CLASS (klass)->transform_ip =
}
static void
-gst_audio_invert_init (GstAudioInvert * filter, GstAudioInvertClass * klass)
+gst_audio_invert_init (GstAudioInvert * filter)
{
filter->degree = 0.0;
gst_base_transform_set_in_place (GST_BASE_TRANSFORM (filter), TRUE);
GstAudioInvert *filter = GST_AUDIO_INVERT (base);
guint num_samples;
GstClockTime timestamp, stream_time;
+ guint8 *data;
+ gsize size;
timestamp = GST_BUFFER_TIMESTAMP (buf);
stream_time =
if (GST_CLOCK_TIME_IS_VALID (stream_time))
gst_object_sync_values (G_OBJECT (filter), stream_time);
- num_samples =
- GST_BUFFER_SIZE (buf) / (GST_AUDIO_FILTER (filter)->format.width / 8);
-
if (gst_base_transform_is_passthrough (base) ||
G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_GAP)))
return GST_FLOW_OK;
- filter->process (filter, GST_BUFFER_DATA (buf), num_samples);
+ data = gst_buffer_map (buf, &size, NULL, GST_MAP_READWRITE);
+ num_samples = size / (GST_AUDIO_FILTER (filter)->format.width / 8);
+
+ filter->process (filter, data, num_samples);
+
+ gst_buffer_unmap (buf, data, size);
return GST_FLOW_OK;
}
" rate=(int)[1,MAX]," \
" channels=(int)[1,MAX]"
-#define DEBUG_INIT(bla) \
- GST_DEBUG_CATEGORY_INIT (gst_audio_karaoke_debug, "audiokaraoke", 0, "audiokaraoke element");
-
-GST_BOILERPLATE_FULL (GstAudioKaraoke, gst_audio_karaoke, GstAudioFilter,
- GST_TYPE_AUDIO_FILTER, DEBUG_INIT);
+G_DEFINE_TYPE (GstAudioKaraoke, gst_audio_karaoke, GST_TYPE_AUDIO_FILTER);
static void gst_audio_karaoke_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
/* GObject vmethod implementations */
static void
-gst_audio_karaoke_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
- GstCaps *caps;
-
- gst_element_class_set_details_simple (element_class, "AudioKaraoke",
- "Filter/Effect/Audio",
- "Removes voice from sound", "Wim Taymans <wim.taymans@gmail.com>");
-
- caps = gst_caps_from_string (ALLOWED_CAPS);
- gst_audio_filter_class_add_pad_templates (GST_AUDIO_FILTER_CLASS (klass),
- caps);
- gst_caps_unref (caps);
-}
-
-static void
gst_audio_karaoke_class_init (GstAudioKaraokeClass * klass)
{
GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstCaps *caps;
+
+ GST_DEBUG_CATEGORY_INIT (gst_audio_karaoke_debug, "audiokaraoke", 0,
+ "audiokaraoke element");
gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+
gobject_class->set_property = gst_audio_karaoke_set_property;
gobject_class->get_property = gst_audio_karaoke_get_property;
"The Frequency width of the filter", 0.0, 100.0, DEFAULT_FILTER_WIDTH,
G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
+ gst_element_class_set_details_simple (gstelement_class, "AudioKaraoke",
+ "Filter/Effect/Audio",
+ "Removes voice from sound", "Wim Taymans <wim.taymans@gmail.com>");
+
+ caps = gst_caps_from_string (ALLOWED_CAPS);
+ gst_audio_filter_class_add_pad_templates (GST_AUDIO_FILTER_CLASS (klass),
+ caps);
+ gst_caps_unref (caps);
+
GST_AUDIO_FILTER_CLASS (klass)->setup =
GST_DEBUG_FUNCPTR (gst_audio_karaoke_setup);
GST_BASE_TRANSFORM_CLASS (klass)->transform_ip =
}
static void
-gst_audio_karaoke_init (GstAudioKaraoke * filter, GstAudioKaraokeClass * klass)
+gst_audio_karaoke_init (GstAudioKaraoke * filter)
{
gst_base_transform_set_in_place (GST_BASE_TRANSFORM (filter), TRUE);
gst_base_transform_set_gap_aware (GST_BASE_TRANSFORM (filter), TRUE);
GstAudioKaraoke *filter = GST_AUDIO_KARAOKE (base);
guint num_samples;
GstClockTime timestamp, stream_time;
+ guint8 *data;
+ gsize size;
timestamp = GST_BUFFER_TIMESTAMP (buf);
stream_time =
if (GST_CLOCK_TIME_IS_VALID (stream_time))
gst_object_sync_values (G_OBJECT (filter), stream_time);
- num_samples =
- GST_BUFFER_SIZE (buf) / (GST_AUDIO_FILTER (filter)->format.width / 8);
-
if (gst_base_transform_is_passthrough (base) ||
G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_GAP)))
return GST_FLOW_OK;
- filter->process (filter, GST_BUFFER_DATA (buf), num_samples);
+ data = gst_buffer_map (buf, &size, NULL, GST_MAP_READWRITE);
+ num_samples = size / (GST_AUDIO_FILTER (filter)->format.width / 8);
+
+ filter->process (filter, data, num_samples);
+
+ gst_buffer_unmap (buf, data, size);
return GST_FLOW_OK;
}
"width = (int) 16, " "depth = (int) 16, " "signed = (boolean) true")
);
-#define DEBUG_INIT(bla) \
- GST_DEBUG_CATEGORY_INIT (gst_audio_panorama_debug, "audiopanorama", 0, "audiopanorama element");
-
-GST_BOILERPLATE_FULL (GstAudioPanorama, gst_audio_panorama, GstBaseTransform,
- GST_TYPE_BASE_TRANSFORM, DEBUG_INIT);
+G_DEFINE_TYPE (GstAudioPanorama, gst_audio_panorama, GST_TYPE_BASE_TRANSFORM);
static void gst_audio_panorama_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
GValue * value, GParamSpec * pspec);
static gboolean gst_audio_panorama_get_unit_size (GstBaseTransform * base,
- GstCaps * caps, guint * size);
+ GstCaps * caps, gsize * size);
static GstCaps *gst_audio_panorama_transform_caps (GstBaseTransform * base,
GstPadDirection direction, GstCaps * caps);
static gboolean gst_audio_panorama_set_caps (GstBaseTransform * base,
/* GObject vmethod implementations */
static void
-gst_audio_panorama_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&src_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&sink_template));
- gst_element_class_set_details_simple (element_class, "Stereo positioning",
- "Filter/Effect/Audio",
- "Positions audio streams in the stereo panorama",
- "Stefan Kost <ensonic@users.sf.net>");
-}
-
-static void
gst_audio_panorama_class_init (GstAudioPanoramaClass * klass)
{
GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+
+ GST_DEBUG_CATEGORY_INIT (gst_audio_panorama_debug, "audiopanorama", 0,
+ "audiopanorama element");
gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+
gobject_class->set_property = gst_audio_panorama_set_property;
gobject_class->get_property = gst_audio_panorama_get_property;
GST_TYPE_AUDIO_PANORAMA_METHOD, METHOD_PSYCHOACOUSTIC,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ gst_element_class_set_details_simple (gstelement_class, "Stereo positioning",
+ "Filter/Effect/Audio",
+ "Positions audio streams in the stereo panorama",
+ "Stefan Kost <ensonic@users.sf.net>");
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&sink_template));
+
GST_BASE_TRANSFORM_CLASS (klass)->get_unit_size =
GST_DEBUG_FUNCPTR (gst_audio_panorama_get_unit_size);
GST_BASE_TRANSFORM_CLASS (klass)->transform_caps =
}
static void
-gst_audio_panorama_init (GstAudioPanorama * filter,
- GstAudioPanoramaClass * klass)
+gst_audio_panorama_init (GstAudioPanorama * filter)
{
filter->panorama = 0;
static gboolean
gst_audio_panorama_get_unit_size (GstBaseTransform * base, GstCaps * caps,
- guint * size)
+ gsize * size)
{
gint width, channels;
GstStructure *structure;
GstBuffer * outbuf)
{
GstAudioPanorama *filter = GST_AUDIO_PANORAMA (base);
- guint num_samples = GST_BUFFER_SIZE (outbuf) / (2 * filter->width);
GstClockTime timestamp, stream_time;
+ guint8 *indata, *outdata;
+ gsize insize, outsize;
timestamp = GST_BUFFER_TIMESTAMP (inbuf);
stream_time =
if (GST_CLOCK_TIME_IS_VALID (stream_time))
gst_object_sync_values (G_OBJECT (filter), stream_time);
+ indata = gst_buffer_map (inbuf, &insize, NULL, GST_MAP_READ);
+ outdata = gst_buffer_map (outbuf, &outsize, NULL, GST_MAP_WRITE);
+
if (G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (inbuf, GST_BUFFER_FLAG_GAP))) {
GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_GAP);
- memset (GST_BUFFER_DATA (outbuf), 0, GST_BUFFER_SIZE (outbuf));
- return GST_FLOW_OK;
+ memset (outdata, 0, outsize);
+ } else {
+ guint num_samples = outsize / (2 * filter->width);
+
+ filter->process (filter, indata, outdata, num_samples);
}
- filter->process (filter, GST_BUFFER_DATA (inbuf),
- GST_BUFFER_DATA (outbuf), num_samples);
+ gst_buffer_unmap (inbuf, indata, insize);
+ gst_buffer_unmap (outbuf, outdata, outsize);
return GST_FLOW_OK;
}
return gtype;
}
-#define DEBUG_INIT(bla) \
- GST_DEBUG_CATEGORY_INIT (gst_gst_audio_wsincband_debug, "audiowsincband", 0, \
- "Band-pass and Band-reject Windowed sinc filter plugin");
-
-GST_BOILERPLATE_FULL (GstAudioWSincBand, gst_audio_wsincband, GstAudioFilter,
- GST_TYPE_AUDIO_FX_BASE_FIR_FILTER, DEBUG_INIT);
+#define gst_audio_wsincband_parent_class parent_class
+G_DEFINE_TYPE (GstAudioWSincBand, gst_audio_wsincband,
+ GST_TYPE_AUDIO_FX_BASE_FIR_FILTER);
static void gst_audio_wsincband_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
#define POW2(x) (x)*(x)
-/* Element class */
-static void
-gst_audio_wsincband_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_set_details_simple (element_class,
- "Band pass & band reject filter", "Filter/Effect/Audio",
- "Band pass and band reject windowed sinc filter",
- "Thomas Vander Stichele <thomas at apestaart dot org>, "
- "Steven W. Smith, "
- "Dreamlab Technologies Ltd. <mathis.hofer@dreamlab.net>, "
- "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
-}
-
static void
gst_audio_wsincband_class_init (GstAudioWSincBandClass * klass)
{
GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
GstAudioFilterClass *filter_class = (GstAudioFilterClass *) klass;
+ GST_DEBUG_CATEGORY_INIT (gst_gst_audio_wsincband_debug, "audiowsincband", 0,
+ "Band-pass and Band-reject Windowed sinc filter plugin");
+
gobject_class->set_property = gst_audio_wsincband_set_property;
gobject_class->get_property = gst_audio_wsincband_get_property;
gobject_class->finalize = gst_audio_wsincband_finalize;
WINDOW_HAMMING,
G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
+ gst_element_class_set_details_simple (gstelement_class,
+ "Band pass & band reject filter", "Filter/Effect/Audio",
+ "Band pass and band reject windowed sinc filter",
+ "Thomas Vander Stichele <thomas at apestaart dot org>, "
+ "Steven W. Smith, "
+ "Dreamlab Technologies Ltd. <mathis.hofer@dreamlab.net>, "
+ "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
+
filter_class->setup = GST_DEBUG_FUNCPTR (gst_audio_wsincband_setup);
}
static void
-gst_audio_wsincband_init (GstAudioWSincBand * self,
- GstAudioWSincBandClass * g_class)
+gst_audio_wsincband_init (GstAudioWSincBand * self)
{
self->kernel_length = 101;
self->lower_frequency = 0.0;
return gtype;
}
-#define DEBUG_INIT(bla) \
- GST_DEBUG_CATEGORY_INIT (gst_audio_wsinclimit_debug, "audiowsinclimit", 0, \
- "Low-pass and High-pass Windowed sinc filter plugin");
-
-GST_BOILERPLATE_FULL (GstAudioWSincLimit, gst_audio_wsinclimit, GstAudioFilter,
- GST_TYPE_AUDIO_FX_BASE_FIR_FILTER, DEBUG_INIT);
+#define gst_audio_wsinclimit_parent_class parent_class
+G_DEFINE_TYPE (GstAudioWSincLimit, gst_audio_wsinclimit,
+ GST_TYPE_AUDIO_FX_BASE_FIR_FILTER);
static void gst_audio_wsinclimit_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
#define POW2(x) (x)*(x)
-/* Element class */
-
-static void
-gst_audio_wsinclimit_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_set_details_simple (element_class,
- "Low pass & high pass filter", "Filter/Effect/Audio",
- "Low pass and high pass windowed sinc filter",
- "Thomas Vander Stichele <thomas at apestaart dot org>, "
- "Steven W. Smith, "
- "Dreamlab Technologies Ltd. <mathis.hofer@dreamlab.net>, "
- "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
-}
-
static void
gst_audio_wsinclimit_class_init (GstAudioWSincLimitClass * klass)
{
GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
GstAudioFilterClass *filter_class = (GstAudioFilterClass *) klass;
+ GST_DEBUG_CATEGORY_INIT (gst_audio_wsinclimit_debug, "audiowsinclimit", 0,
+ "Low-pass and High-pass Windowed sinc filter plugin");
+
gobject_class->set_property = gst_audio_wsinclimit_set_property;
gobject_class->get_property = gst_audio_wsinclimit_get_property;
gobject_class->finalize = gst_audio_wsinclimit_finalize;
WINDOW_HAMMING,
G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
+ gst_element_class_set_details_simple (gstelement_class,
+ "Low pass & high pass filter", "Filter/Effect/Audio",
+ "Low pass and high pass windowed sinc filter",
+ "Thomas Vander Stichele <thomas at apestaart dot org>, "
+ "Steven W. Smith, "
+ "Dreamlab Technologies Ltd. <mathis.hofer@dreamlab.net>, "
+ "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
+
filter_class->setup = GST_DEBUG_FUNCPTR (gst_audio_wsinclimit_setup);
}
static void
-gst_audio_wsinclimit_init (GstAudioWSincLimit * self,
- GstAudioWSincLimitClass * g_class)
+gst_audio_wsinclimit_init (GstAudioWSincLimit * self)
{
self->mode = MODE_LOW_PASS;
self->window = WINDOW_HAMMING;
gboolean gst_aac_parse_event (GstBaseParse * parse, GstEvent * event);
-#define _do_init(bla) \
- GST_DEBUG_CATEGORY_INIT (aacparse_debug, "aacparse", 0, \
- "AAC audio stream parser");
-
-GST_BOILERPLATE_FULL (GstAacParse, gst_aac_parse, GstBaseParse,
- GST_TYPE_BASE_PARSE, _do_init);
+G_DEFINE_TYPE (GstAacParse, gst_aac_parse, GST_TYPE_BASE_PARSE);
static inline gint
gst_aac_parse_get_sample_rate_from_index (guint sr_idx)
}
/**
- * gst_aac_parse_base_init:
- * @klass: #GstElementClass.
+ * gst_aac_parse_class_init:
+ * @klass: #GstAacParseClass.
*
*/
static void
-gst_aac_parse_base_init (gpointer klass)
+gst_aac_parse_class_init (GstAacParseClass * klass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstBaseParseClass *parse_class = GST_BASE_PARSE_CLASS (klass);
+
+ GST_DEBUG_CATEGORY_INIT (aacparse_debug, "aacparse", 0,
+ "AAC audio stream parser");
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&sink_template));
gst_element_class_set_details_simple (element_class,
"AAC audio stream parser", "Codec/Parser/Audio",
"Advanced Audio Coding parser", "Stefan Kost <stefan.kost@nokia.com>");
-}
-
-
-/**
- * gst_aac_parse_class_init:
- * @klass: #GstAacParseClass.
- *
- */
-static void
-gst_aac_parse_class_init (GstAacParseClass * klass)
-{
- GstBaseParseClass *parse_class = GST_BASE_PARSE_CLASS (klass);
parse_class->start = GST_DEBUG_FUNCPTR (gst_aac_parse_start);
parse_class->stop = GST_DEBUG_FUNCPTR (gst_aac_parse_stop);
*
*/
static void
-gst_aac_parse_init (GstAacParse * aacparse, GstAacParseClass * klass)
+gst_aac_parse_init (GstAacParse * aacparse)
{
GST_DEBUG ("initialized");
}
GstBuffer *buf = gst_value_get_buffer (value);
if (buf) {
- const guint8 *buffer = GST_BUFFER_DATA (buf);
+ guint8 *data;
+ gsize size;
guint sr_idx;
- sr_idx = ((buffer[0] & 0x07) << 1) | ((buffer[1] & 0x80) >> 7);
- aacparse->object_type = (buffer[0] & 0xf8) >> 3;
+ data = gst_buffer_map (buf, &size, NULL, GST_MAP_READ);
+
+ sr_idx = ((data[0] & 0x07) << 1) | ((data[1] & 0x80) >> 7);
+ aacparse->object_type = (data[0] & 0xf8) >> 3;
aacparse->sample_rate = gst_aac_parse_get_sample_rate_from_index (sr_idx);
- aacparse->channels = (buffer[1] & 0x78) >> 3;
+ aacparse->channels = (data[1] & 0x78) >> 3;
aacparse->header_type = DSPAAC_HEADER_NONE;
aacparse->mpegversion = 4;
+ gst_buffer_unmap (buf, data, size);
GST_DEBUG ("codec_data: object_type=%d, sample_rate=%d, channels=%d",
aacparse->object_type, aacparse->sample_rate, aacparse->channels);
gst_aac_parse_check_valid_frame (GstBaseParse * parse,
GstBaseParseFrame * frame, guint * framesize, gint * skipsize)
{
- const guint8 *data;
+ guint8 *data;
+ gsize size;
GstAacParse *aacparse;
gboolean ret = FALSE;
gboolean lost_sync;
aacparse = GST_AAC_PARSE (parse);
buffer = frame->buffer;
- data = GST_BUFFER_DATA (buffer);
+
+ data = gst_buffer_map (buffer, &size, NULL, GST_MAP_READ);
lost_sync = GST_BASE_PARSE_LOST_SYNC (parse);
if (aacparse->header_type == DSPAAC_HEADER_ADIF ||
aacparse->header_type == DSPAAC_HEADER_NONE) {
/* There is nothing to parse */
- *framesize = GST_BUFFER_SIZE (buffer);
+ *framesize = size;
ret = TRUE;
} else if (aacparse->header_type == DSPAAC_HEADER_NOT_PARSED || lost_sync) {
- ret = gst_aac_parse_detect_stream (aacparse, data, GST_BUFFER_SIZE (buffer),
+ ret = gst_aac_parse_detect_stream (aacparse, data, size,
GST_BASE_PARSE_DRAINING (parse), framesize, skipsize);
} else if (aacparse->header_type == DSPAAC_HEADER_ADTS) {
guint needed_data = 1024;
- ret = gst_aac_parse_check_adts_frame (aacparse, data,
- GST_BUFFER_SIZE (buffer), GST_BASE_PARSE_DRAINING (parse),
- framesize, &needed_data);
+ ret = gst_aac_parse_check_adts_frame (aacparse, data, size,
+ GST_BASE_PARSE_DRAINING (parse), framesize, &needed_data);
if (!ret) {
GST_DEBUG ("buffer didn't contain valid frame");
GST_DEBUG ("buffer didn't contain valid frame");
gst_base_parse_set_min_frame_size (GST_BASE_PARSE (aacparse), 1024);
}
+ gst_buffer_unmap (buffer, data, size);
return ret;
}
GstBuffer *buffer;
GstFlowReturn ret = GST_FLOW_OK;
gint rate, channels;
+ guint8 *data;
+ gsize size;
aacparse = GST_AAC_PARSE (parse);
buffer = frame->buffer;
/* see above */
frame->overhead = 7;
- gst_aac_parse_parse_adts_header (aacparse, GST_BUFFER_DATA (buffer),
+ data = gst_buffer_map (buffer, &size, NULL, GST_MAP_READ);
+ gst_aac_parse_parse_adts_header (aacparse, data,
&rate, &channels, NULL, NULL);
+ gst_buffer_unmap (buffer, data, size);
+
GST_LOG_OBJECT (aacparse, "rate: %d, chans: %d", rate, channels);
if (G_UNLIKELY (rate != aacparse->sample_rate
static GstFlowReturn gst_ac3_parse_parse_frame (GstBaseParse * parse,
GstBaseParseFrame * frame);
-GST_BOILERPLATE (GstAc3Parse, gst_ac3_parse, GstBaseParse, GST_TYPE_BASE_PARSE);
+#define gst_ac3_parse_parent_class parent_class
+G_DEFINE_TYPE (GstAc3Parse, gst_ac3_parse, GST_TYPE_BASE_PARSE);
static void
-gst_ac3_parse_base_init (gpointer klass)
+gst_ac3_parse_class_init (GstAc3ParseClass * klass)
{
+ GObjectClass *object_class = G_OBJECT_CLASS (klass);
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstBaseParseClass *parse_class = GST_BASE_PARSE_CLASS (klass);
+
+ GST_DEBUG_CATEGORY_INIT (ac3_parse_debug, "ac3parse", 0,
+ "AC3 audio stream parser");
+
+ object_class->finalize = gst_ac3_parse_finalize;
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&sink_template));
gst_element_class_set_details_simple (element_class,
"AC3 audio stream parser", "Codec/Parser/Audio",
"AC3 parser", "Tim-Philipp Müller <tim centricular net>");
-}
-
-static void
-gst_ac3_parse_class_init (GstAc3ParseClass * klass)
-{
- GstBaseParseClass *parse_class = GST_BASE_PARSE_CLASS (klass);
- GObjectClass *object_class = G_OBJECT_CLASS (klass);
-
- GST_DEBUG_CATEGORY_INIT (ac3_parse_debug, "ac3parse", 0,
- "AC3 audio stream parser");
-
- object_class->finalize = gst_ac3_parse_finalize;
parse_class->start = GST_DEBUG_FUNCPTR (gst_ac3_parse_start);
parse_class->stop = GST_DEBUG_FUNCPTR (gst_ac3_parse_stop);
}
static void
-gst_ac3_parse_init (GstAc3Parse * ac3parse, GstAc3ParseClass * klass)
+gst_ac3_parse_init (GstAc3Parse * ac3parse)
{
gst_base_parse_set_min_frame_size (GST_BASE_PARSE (ac3parse), 64 * 2);
gst_ac3_parse_reset (ac3parse);
GstFlowReturn gst_amr_parse_parse_frame (GstBaseParse * parse,
GstBaseParseFrame * frame);
-#define _do_init(bla) \
- GST_DEBUG_CATEGORY_INIT (amrparse_debug, "amrparse", 0, \
- "AMR-NB audio stream parser");
-
-GST_BOILERPLATE_FULL (GstAmrParse, gst_amr_parse, GstBaseParse,
- GST_TYPE_BASE_PARSE, _do_init);
-
+G_DEFINE_TYPE (GstAmrParse, gst_amr_parse, GST_TYPE_BASE_PARSE);
/**
- * gst_amr_parse_base_init:
- * @klass: #GstElementClass.
+ * gst_amr_parse_class_init:
+ * @klass: GstAmrParseClass.
*
*/
static void
-gst_amr_parse_base_init (gpointer klass)
+gst_amr_parse_class_init (GstAmrParseClass * klass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstBaseParseClass *parse_class = GST_BASE_PARSE_CLASS (klass);
+
+ GST_DEBUG_CATEGORY_INIT (amrparse_debug, "amrparse", 0,
+ "AMR-NB audio stream parser");
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&sink_template));
"AMR audio stream parser", "Codec/Parser/Audio",
"Adaptive Multi-Rate audio parser",
"Ronald Bultje <rbultje@ronald.bitfreak.net>");
-}
-
-
-/**
- * gst_amr_parse_class_init:
- * @klass: GstAmrParseClass.
- *
- */
-static void
-gst_amr_parse_class_init (GstAmrParseClass * klass)
-{
- GstBaseParseClass *parse_class = GST_BASE_PARSE_CLASS (klass);
parse_class->start = GST_DEBUG_FUNCPTR (gst_amr_parse_start);
parse_class->stop = GST_DEBUG_FUNCPTR (gst_amr_parse_stop);
*
*/
static void
-gst_amr_parse_init (GstAmrParse * amrparse, GstAmrParseClass * klass)
+gst_amr_parse_init (GstAmrParse * amrparse)
{
/* init rest */
gst_base_parse_set_min_frame_size (GST_BASE_PARSE (amrparse), 62);
GstBaseParseFrame * frame, guint * framesize, gint * skipsize)
{
GstBuffer *buffer;
- const guint8 *data;
+ guint8 *data;
+ gsize size;
gint fsize, mode, dsize;
GstAmrParse *amrparse;
+ gboolean ret = FALSE;
amrparse = GST_AMR_PARSE (parse);
buffer = frame->buffer;
- data = GST_BUFFER_DATA (buffer);
- dsize = GST_BUFFER_SIZE (buffer);
+
+ data = gst_buffer_map (buffer, &size, NULL, GST_MAP_READ);
+ dsize = size;
GST_LOG ("buffer: %d bytes", dsize);
}
/* We return FALSE, so this frame won't get pushed forward. Instead,
the "skip" value is set, so next time we will receive a valid frame. */
- return FALSE;
+ goto done;
}
/* Does this look like a possible frame header candidate? */
(!GST_BASE_PARSE_LOST_SYNC (parse) || GST_BASE_PARSE_DRAINING (parse)
|| (dsize > fsize && (data[fsize] & 0x83) == 0))) {
*framesize = fsize;
- return TRUE;
+ ret = TRUE;
+ goto done;
}
}
-
GST_LOG ("sync lost");
- return FALSE;
+
+done:
+ gst_buffer_unmap (buffer, data, size);
+
+ return ret;
}
static void gst_auto_audio_sink_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
-GST_BOILERPLATE (GstAutoAudioSink, gst_auto_audio_sink, GstBin, GST_TYPE_BIN);
+#define gst_auto_audio_sink_parent_class parent_class
+G_DEFINE_TYPE (GstAutoAudioSink, gst_auto_audio_sink, GST_TYPE_BIN);
static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_STATIC_CAPS_ANY);
static void
-gst_auto_audio_sink_base_init (gpointer klass)
-{
- GstElementClass *eklass = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (eklass,
- gst_static_pad_template_get (&sink_template));
-
- gst_element_class_set_details_simple (eklass, "Auto audio sink",
- "Sink/Audio",
- "Wrapper audio sink for automatically detected audio sink",
- "Jan Schmidt <thaytan@noraisin.net>");
-}
-
-static void
gst_auto_audio_sink_class_init (GstAutoAudioSinkClass * klass)
{
GObjectClass *gobject_class;
g_param_spec_boxed ("filter-caps", "Filter caps",
"Filter sink candidates using these caps.", GST_TYPE_CAPS,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gst_element_class_add_pad_template (eklass,
+ gst_static_pad_template_get (&sink_template));
+
+ gst_element_class_set_details_simple (eklass, "Auto audio sink",
+ "Sink/Audio",
+ "Wrapper audio sink for automatically detected audio sink",
+ "Jan Schmidt <thaytan@noraisin.net>");
}
static void
GST_STATIC_CAPS ("audio/x-raw-int; audio/x-raw-float");
static void
-gst_auto_audio_sink_init (GstAutoAudioSink * sink,
- GstAutoAudioSinkClass * g_class)
+gst_auto_audio_sink_init (GstAutoAudioSink * sink)
{
sink->pad = gst_ghost_pad_new_no_target ("sink", GST_PAD_SINK);
gst_element_add_pad (GST_ELEMENT (sink), sink->pad);
* accept only sinks that match with the filter caps */
if (sink->filter_caps) {
el_pad = gst_element_get_static_pad (GST_ELEMENT (el), "sink");
- el_caps = gst_pad_get_caps (el_pad);
+ el_caps = gst_pad_get_caps (el_pad, NULL);
gst_object_unref (el_pad);
GST_DEBUG_OBJECT (sink,
"Checking caps: %" GST_PTR_FORMAT " vs. %" GST_PTR_FORMAT,
static void gst_auto_audio_src_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
-GST_BOILERPLATE (GstAutoAudioSrc, gst_auto_audio_src, GstBin, GST_TYPE_BIN);
+#define gst_auto_audio_src_parent_class parent_class
+G_DEFINE_TYPE (GstAutoAudioSrc, gst_auto_audio_src, GST_TYPE_BIN);
static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_STATIC_CAPS_ANY);
static void
-gst_auto_audio_src_base_init (gpointer klass)
-{
- GstElementClass *eklass = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (eklass,
- gst_static_pad_template_get (&src_template));
-
- gst_element_class_set_details_simple (eklass, "Auto audio source",
- "Source/Audio",
- "Wrapper audio source for automatically detected audio source",
- "Jan Schmidt <thaytan@noraisin.net>, "
- "Stefan Kost <ensonic@users.sf.net>");
-}
-
-static void
gst_auto_audio_src_class_init (GstAutoAudioSrcClass * klass)
{
GObjectClass *gobject_class;
g_param_spec_boxed ("filter-caps", "Filter caps",
"Filter sink candidates using these caps.", GST_TYPE_CAPS,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gst_element_class_add_pad_template (eklass,
+ gst_static_pad_template_get (&src_template));
+
+ gst_element_class_set_details_simple (eklass, "Auto audio source",
+ "Source/Audio",
+ "Wrapper audio source for automatically detected audio source",
+ "Jan Schmidt <thaytan@noraisin.net>, "
+ "Stefan Kost <ensonic@users.sf.net>");
}
static void
GST_STATIC_CAPS ("audio/x-raw-int; audio/x-raw-float");
static void
-gst_auto_audio_src_init (GstAutoAudioSrc * src, GstAutoAudioSrcClass * g_class)
+gst_auto_audio_src_init (GstAutoAudioSrc * src)
{
src->pad = gst_ghost_pad_new_no_target ("src", GST_PAD_SRC);
gst_element_add_pad (GST_ELEMENT (src), src->pad);
* accept only sources that match with the filter caps */
if (src->filter_caps) {
el_pad = gst_element_get_static_pad (GST_ELEMENT (el), "src");
- el_caps = gst_pad_get_caps (el_pad);
+ el_caps = gst_pad_get_caps (el_pad, NULL);
gst_object_unref (el_pad);
GST_DEBUG_OBJECT (src,
"Checking caps: %" GST_PTR_FORMAT " vs. %" GST_PTR_FORMAT,
static void gst_auto_video_sink_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
-GST_BOILERPLATE (GstAutoVideoSink, gst_auto_video_sink, GstBin, GST_TYPE_BIN);
+#define gst_auto_video_sink_parent_class parent_class
+G_DEFINE_TYPE (GstAutoVideoSink, gst_auto_video_sink, GST_TYPE_BIN);
static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_STATIC_CAPS_ANY);
static void
-gst_auto_video_sink_base_init (gpointer klass)
-{
- GstElementClass *eklass = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (eklass,
- gst_static_pad_template_get (&sink_template));
- gst_element_class_set_details_simple (eklass, "Auto video sink",
- "Sink/Video",
- "Wrapper video sink for automatically detected video sink",
- "Jan Schmidt <thaytan@noraisin.net>");
-}
-
-static void
gst_auto_video_sink_class_init (GstAutoVideoSinkClass * klass)
{
GObjectClass *gobject_class;
g_param_spec_boxed ("filter-caps", "Filter caps",
"Filter sink candidates using these caps.", GST_TYPE_CAPS,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gst_element_class_add_pad_template (eklass,
+ gst_static_pad_template_get (&sink_template));
+ gst_element_class_set_details_simple (eklass, "Auto video sink",
+ "Sink/Video",
+ "Wrapper video sink for automatically detected video sink",
+ "Jan Schmidt <thaytan@noraisin.net>");
}
static void
GST_STATIC_CAPS ("video/x-raw-yuv; video/x-raw-rgb");
static void
-gst_auto_video_sink_init (GstAutoVideoSink * sink,
- GstAutoVideoSinkClass * g_class)
+gst_auto_video_sink_init (GstAutoVideoSink * sink)
{
sink->pad = gst_ghost_pad_new_no_target ("sink", GST_PAD_SINK);
gst_element_add_pad (GST_ELEMENT (sink), sink->pad);
* accept only sinks that match with the filter caps */
if (sink->filter_caps) {
el_pad = gst_element_get_static_pad (GST_ELEMENT (el), "sink");
- el_caps = gst_pad_get_caps (el_pad);
+ el_caps = gst_pad_get_caps (el_pad, NULL);
gst_object_unref (el_pad);
GST_DEBUG_OBJECT (sink,
"Checking caps: %" GST_PTR_FORMAT " vs. %" GST_PTR_FORMAT,
static void gst_auto_video_src_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
-GST_BOILERPLATE (GstAutoVideoSrc, gst_auto_video_src, GstBin, GST_TYPE_BIN);
+#define gst_auto_video_src_parent_class parent_class
+G_DEFINE_TYPE (GstAutoVideoSrc, gst_auto_video_src, GST_TYPE_BIN);
static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_STATIC_CAPS_ANY);
static void
-gst_auto_video_src_base_init (gpointer klass)
-{
- GstElementClass *eklass = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (eklass,
- gst_static_pad_template_get (&src_template));
- gst_element_class_set_details_simple (eklass, "Auto video source",
- "Source/Video",
- "Wrapper video source for automatically detected video source",
- "Jan Schmidt <thaytan@noraisin.net>, "
- "Stefan Kost <ensonic@users.sf.net>");
-}
-
-static void
gst_auto_video_src_class_init (GstAutoVideoSrcClass * klass)
{
GObjectClass *gobject_class;
g_param_spec_boxed ("filter-caps", "Filter caps",
"Filter src candidates using these caps.", GST_TYPE_CAPS,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gst_element_class_add_pad_template (eklass,
+ gst_static_pad_template_get (&src_template));
+ gst_element_class_set_details_simple (eklass, "Auto video source",
+ "Source/Video",
+ "Wrapper video source for automatically detected video source",
+ "Jan Schmidt <thaytan@noraisin.net>, "
+ "Stefan Kost <ensonic@users.sf.net>");
}
static void
GST_STATIC_CAPS ("video/x-raw-yuv; video/x-raw-rgb");
static void
-gst_auto_video_src_init (GstAutoVideoSrc * src, GstAutoVideoSrcClass * g_class)
+gst_auto_video_src_init (GstAutoVideoSrc * src)
{
src->pad = gst_ghost_pad_new_no_target ("src", GST_PAD_SRC);
gst_element_add_pad (GST_ELEMENT (src), src->pad);
* accept only sources that match with the filter caps */
if (src->filter_caps) {
el_pad = gst_element_get_static_pad (GST_ELEMENT (el), "src");
- el_caps = gst_pad_get_caps (el_pad);
+ el_caps = gst_pad_get_caps (el_pad, NULL);
gst_object_unref (el_pad);
GST_DEBUG_OBJECT (src,
"Checking caps: %" GST_PTR_FORMAT " vs. %" GST_PTR_FORMAT,
GST_STATIC_CAPS ("video/x-msvideo")
);
-static void gst_avi_demux_base_init (GstAviDemuxClass * klass);
-static void gst_avi_demux_class_init (GstAviDemuxClass * klass);
-static void gst_avi_demux_init (GstAviDemux * avi);
static void gst_avi_demux_finalize (GObject * object);
static void gst_avi_demux_reset (GstAviDemux * avi);
static void gst_avi_demux_parse_idit (GstAviDemux * avi, GstBuffer * buf);
-static GstElementClass *parent_class = NULL;
-
/* GObject methods */
-GType
-gst_avi_demux_get_type (void)
-{
- static GType avi_demux_type = 0;
-
- if (!avi_demux_type) {
- static const GTypeInfo avi_demux_info = {
- sizeof (GstAviDemuxClass),
- (GBaseInitFunc) gst_avi_demux_base_init,
- NULL,
- (GClassInitFunc) gst_avi_demux_class_init,
- NULL,
- NULL,
- sizeof (GstAviDemux),
- 0,
- (GInstanceInitFunc) gst_avi_demux_init,
- };
-
- avi_demux_type =
- g_type_register_static (GST_TYPE_ELEMENT,
- "GstAviDemux", &avi_demux_info, 0);
- }
-
- return avi_demux_type;
-}
+#define gst_avi_demux_parent_class parent_class
+G_DEFINE_TYPE (GstAviDemux, gst_avi_demux, GST_TYPE_ELEMENT);
static void
-gst_avi_demux_base_init (GstAviDemuxClass * klass)
+gst_avi_demux_class_init (GstAviDemuxClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
+ GObjectClass *gobject_class = (GObjectClass *) klass;
GstPadTemplate *videosrctempl, *audiosrctempl, *subsrctempl;
GstCaps *audcaps, *vidcaps, *subcaps;
+ GST_DEBUG_CATEGORY_INIT (avidemux_debug, "avidemux",
+ 0, "Demuxer for AVI streams");
+
+ gobject_class->finalize = gst_avi_demux_finalize;
+
+ gstelement_class->change_state =
+ GST_DEBUG_FUNCPTR (gst_avi_demux_change_state);
+ gstelement_class->set_index = GST_DEBUG_FUNCPTR (gst_avi_demux_set_index);
+ gstelement_class->get_index = GST_DEBUG_FUNCPTR (gst_avi_demux_get_index);
+
audcaps = gst_riff_create_audio_template_caps ();
gst_caps_append (audcaps, gst_caps_new_simple ("audio/x-avi-unknown", NULL));
audiosrctempl = gst_pad_template_new ("audio_%02d",
subcaps = gst_caps_new_simple ("application/x-subtitle-avi", NULL);
subsrctempl = gst_pad_template_new ("subtitle_%02d",
GST_PAD_SRC, GST_PAD_SOMETIMES, subcaps);
- gst_element_class_add_pad_template (element_class, audiosrctempl);
- gst_element_class_add_pad_template (element_class, videosrctempl);
- gst_element_class_add_pad_template (element_class, subsrctempl);
- gst_element_class_add_pad_template (element_class,
+ gst_element_class_add_pad_template (gstelement_class, audiosrctempl);
+ gst_element_class_add_pad_template (gstelement_class, videosrctempl);
+ gst_element_class_add_pad_template (gstelement_class, subsrctempl);
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&sink_templ));
- gst_element_class_set_details_simple (element_class, "Avi demuxer",
+
+ gst_element_class_set_details_simple (gstelement_class, "Avi demuxer",
"Codec/Demuxer",
"Demultiplex an avi file into audio and video",
"Erik Walthinsen <omega@cse.ogi.edu>, "
}
static void
-gst_avi_demux_class_init (GstAviDemuxClass * klass)
-{
- GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
- GObjectClass *gobject_class = (GObjectClass *) klass;
-
- GST_DEBUG_CATEGORY_INIT (avidemux_debug, "avidemux",
- 0, "Demuxer for AVI streams");
-
- parent_class = g_type_class_peek_parent (klass);
-
- gobject_class->finalize = gst_avi_demux_finalize;
- gstelement_class->change_state =
- GST_DEBUG_FUNCPTR (gst_avi_demux_change_state);
-
- gstelement_class->set_index = GST_DEBUG_FUNCPTR (gst_avi_demux_set_index);
- gstelement_class->get_index = GST_DEBUG_FUNCPTR (gst_avi_demux_get_index);
-}
-
-static void
gst_avi_demux_init (GstAviDemux * avi)
{
avi->sinkpad = gst_pad_new_from_static_template (&sink_templ, "sink");
if (gst_adapter_available (avi->adapter) < 8)
return FALSE;
- data = gst_adapter_peek (avi->adapter, 8);
+ data = gst_adapter_map (avi->adapter, 8);
*tag = GST_READ_UINT32_LE (data);
*size = GST_READ_UINT32_LE (data + 4);
+ gst_adapter_unmap (avi->adapter, 0);
return TRUE;
}
GstBuffer * buf, gst_riff_avih ** _avih)
{
gst_riff_avih *avih;
+ gsize size;
if (buf == NULL)
goto no_buffer;
- if (GST_BUFFER_SIZE (buf) < sizeof (gst_riff_avih))
+ size = gst_buffer_get_size (buf);
+ if (size < sizeof (gst_riff_avih))
goto avih_too_small;
- avih = g_memdup (GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (buf));
+ avih = g_malloc (size);
+ gst_buffer_extract (buf, 0, avih, size);
#if (G_BYTE_ORDER == G_BIG_ENDIAN)
avih->us_frame = GUINT32_FROM_LE (avih->us_frame);
{
GST_ELEMENT_ERROR (avi, STREAM, DEMUX, (NULL),
("Too small avih (%d available, %d needed)",
- GST_BUFFER_SIZE (buf), (int) sizeof (gst_riff_avih)));
+ size, (int) sizeof (gst_riff_avih)));
gst_buffer_unref (buf);
return FALSE;
}
guint16 bpe = 16;
guint32 num, i;
guint64 *indexes;
- guint size;
+ gsize size;
*_indexes = NULL;
- size = buf ? GST_BUFFER_SIZE (buf) : 0;
+ if (buf)
+ data = gst_buffer_map (buf, &size, NULL, GST_MAP_READ);
+ else
+ size = 0;
+
if (size < 24)
goto too_small;
- data = GST_BUFFER_DATA (buf);
-
/* check type of index. The opendml2 specs state that
* there should be 4 dwords per array entry. Type can be
* either frame or field (and we don't care). */
indexes[i] = GST_BUFFER_OFFSET_NONE;
*_indexes = indexes;
+ gst_buffer_unmap (buf, data, size);
gst_buffer_unref (buf);
return TRUE;
{
GST_ERROR_OBJECT (avi,
"Not enough data to parse superindex (%d available, 24 needed)", size);
- if (buf)
+ if (buf) {
+ gst_buffer_unmap (buf, data, size);
gst_buffer_unref (buf);
+ }
return FALSE;
}
invalid_params:
{
GST_ERROR_OBJECT (avi, "invalid index parameters (num = %d, bpe = %d)",
num, bpe);
- if (buf)
- gst_buffer_unref (buf);
+ gst_buffer_unmap (buf, data, size);
+ gst_buffer_unref (buf);
return FALSE;
}
}
guint16 bpe;
guint32 num, i;
guint64 baseoff;
- guint size;
+ gsize size;
- if (!buf)
+ if (buf == NULL)
return TRUE;
- size = GST_BUFFER_SIZE (buf);
-
+ data = gst_buffer_map (buf, &size, NULL, GST_MAP_READ);
/* check size */
if (size < 24)
goto too_small;
- data = GST_BUFFER_DATA (buf);
-
/* We don't support index-data yet */
if (data[3] & 0x80)
goto not_implemented;
if (G_UNLIKELY (!gst_avi_demux_add_index (avi, stream, num, &entry)))
goto out_of_mem;
}
+done:
+ gst_buffer_unmap (buf, data, size);
gst_buffer_unref (buf);
return TRUE;
{
GST_ERROR_OBJECT (avi,
"Not enough data to parse subindex (%d available, 24 needed)", size);
- gst_buffer_unref (buf);
- return TRUE; /* continue */
+ goto done; /* continue */
}
not_implemented:
{
GST_ELEMENT_ERROR (avi, STREAM, NOT_IMPLEMENTED, (NULL),
("Subindex-is-data is not implemented"));
+ gst_buffer_unmap (buf, data, size);
gst_buffer_unref (buf);
return FALSE;
}
empty_index:
{
GST_DEBUG_OBJECT (avi, "the index is empty");
- gst_buffer_unref (buf);
- return TRUE;
+ goto done; /* continue */
}
out_of_mem:
{
("Cannot allocate memory for %u*%u=%u bytes",
(guint) sizeof (GstAviIndexEntry), num,
(guint) sizeof (GstAviIndexEntry) * num));
+ gst_buffer_unmap (buf, data, size);
gst_buffer_unref (buf);
return FALSE;
}
{
gst_riff_vprp *vprp;
gint k;
+ gsize size;
g_return_val_if_fail (buf != NULL, FALSE);
g_return_val_if_fail (_vprp != NULL, FALSE);
- if (GST_BUFFER_SIZE (buf) < G_STRUCT_OFFSET (gst_riff_vprp, field_info))
+ size = gst_buffer_get_size (buf);
+
+ if (size < G_STRUCT_OFFSET (gst_riff_vprp, field_info))
goto too_small;
- vprp = g_memdup (GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (buf));
+ vprp = g_malloc (size);
+ gst_buffer_extract (buf, 0, vprp, size);
#if (G_BYTE_ORDER == G_BIG_ENDIAN)
vprp->format_token = GUINT32_FROM_LE (vprp->format_token);
/* size checking */
/* calculate fields based on size */
- k = (GST_BUFFER_SIZE (buf) - G_STRUCT_OFFSET (gst_riff_vprp, field_info)) /
- vprp->fields;
+ k = (size - G_STRUCT_OFFSET (gst_riff_vprp, field_info)) / vprp->fields;
if (vprp->fields > k) {
GST_WARNING_OBJECT (element,
"vprp header indicated %d fields, only %d available", vprp->fields, k);
{
GST_ERROR_OBJECT (element,
"Too small vprp (%d available, at least %d needed)",
- GST_BUFFER_SIZE (buf),
- (int) G_STRUCT_OFFSET (gst_riff_vprp, field_info));
+ size, (int) G_STRUCT_OFFSET (gst_riff_vprp, field_info));
gst_buffer_unref (buf);
return FALSE;
}
static inline void
gst_avi_demux_roundup_list (GstAviDemux * avi, GstBuffer ** buf)
{
- gint size = GST_BUFFER_SIZE (*buf);
+ gsize size;
+
+ size = gst_buffer_get_size (*buf);
if (G_UNLIKELY (size & 1)) {
GstBuffer *obuf;
+ guint8 *data;
GST_DEBUG_OBJECT (avi, "rounding up dubious list size %d", size);
obuf = gst_buffer_new_and_alloc (size + 1);
- memcpy (GST_BUFFER_DATA (obuf), GST_BUFFER_DATA (*buf), size);
+
+ data = gst_buffer_map (obuf, NULL, NULL, GST_MAP_WRITE);
+ gst_buffer_extract (*buf, 0, data, size);
/* assume 0 padding, at least makes outcome deterministic */
- (GST_BUFFER_DATA (obuf))[size] = 0;
+ data[size] = 0;
+ gst_buffer_unmap (obuf, data, size + 1);
gst_buffer_replace (buf, obuf);
}
}
case GST_RIFF_TAG_strn:
g_free (stream->name);
if (sub != NULL) {
- stream->name =
- g_strndup ((gchar *) GST_BUFFER_DATA (sub),
- (gsize) GST_BUFFER_SIZE (sub));
+ gchar *bdata;
+ gsize bsize;
+
+ bdata = gst_buffer_map (sub, &bsize, NULL, GST_MAP_READ);
+ stream->name = g_strndup (bdata, bsize);
+ gst_buffer_unmap (sub, bdata, bsize);
gst_buffer_unref (sub);
sub = NULL;
} else {
switch (tag) {
case GST_RIFF_TAG_dmlh:{
gst_riff_dmlh dmlh, *_dmlh;
- guint size;
+ gsize size;
+ guint8 *data;
/* sub == NULL is possible and means an empty buffer */
- size = sub ? GST_BUFFER_SIZE (sub) : 0;
+ if (sub == NULL)
+ goto next;
+
+ data = gst_buffer_map (sub, &size, NULL, GST_MAP_READ);
/* check size */
if (size < sizeof (gst_riff_dmlh)) {
GST_ERROR_OBJECT (avi,
"DMLH entry is too small (%d bytes, %d needed)",
size, (int) sizeof (gst_riff_dmlh));
+ gst_buffer_unmap (sub, data, size);
goto next;
}
- _dmlh = (gst_riff_dmlh *) GST_BUFFER_DATA (sub);
+ _dmlh = (gst_riff_dmlh *) data;
dmlh.totalframes = GST_READ_UINT32_LE (&_dmlh->totalframes);
+ gst_buffer_unmap (sub, data, size);
GST_INFO_OBJECT (avi, "dmlh tag found: totalframes: %u",
dmlh.totalframes);
gst_avi_demux_parse_index (GstAviDemux * avi, GstBuffer * buf)
{
guint8 *data;
- guint size;
+ gsize size;
guint i, num, n;
gst_riff_index_entry *index;
GstClockTime stamp;
if (!buf)
return FALSE;
- data = GST_BUFFER_DATA (buf);
- size = GST_BUFFER_SIZE (buf);
+ data = gst_buffer_map (buf, &size, NULL, GST_MAP_READ);
stamp = gst_util_get_timestamp ();
n++;
}
+ gst_buffer_unmap (buf, data, size);
gst_buffer_unref (buf);
/* get stream stats now */
empty_list:
{
GST_DEBUG_OBJECT (avi, "empty index");
+ gst_buffer_unmap (buf, data, size);
gst_buffer_unref (buf);
return FALSE;
}
("Cannot allocate memory for %u*%u=%u bytes",
(guint) sizeof (GstAviIndexEntry), num,
(guint) sizeof (GstAviIndexEntry) * num));
+ gst_buffer_unmap (buf, data, size);
gst_buffer_unref (buf);
return FALSE;
}
GstBuffer *buf;
guint32 tag;
guint32 size;
+ gsize bsize;
+ guint8 *bdata;
GST_DEBUG ("demux stream index at offset %" G_GUINT64_FORMAT, offset);
res = gst_pad_pull_range (avi->sinkpad, offset, 8, &buf);
if (res != GST_FLOW_OK)
goto pull_failed;
- else if (GST_BUFFER_SIZE (buf) < 8)
+
+ bdata = gst_buffer_map (buf, &bsize, NULL, GST_MAP_READ);
+ if (bsize < 8)
goto too_small;
/* check tag first before blindy trying to read 'size' bytes */
- tag = GST_READ_UINT32_LE (GST_BUFFER_DATA (buf));
- size = GST_READ_UINT32_LE (GST_BUFFER_DATA (buf) + 4);
+ tag = GST_READ_UINT32_LE (bdata);
+ size = GST_READ_UINT32_LE (bdata + 4);
if (tag == GST_RIFF_TAG_LIST) {
/* this is the movi tag */
GST_DEBUG_OBJECT (avi, "skip LIST chunk, size %" G_GUINT32_FORMAT,
(8 + GST_ROUND_UP_2 (size)));
offset += 8 + GST_ROUND_UP_2 (size);
+ gst_buffer_unmap (buf, bdata, bsize);
gst_buffer_unref (buf);
+
res = gst_pad_pull_range (avi->sinkpad, offset, 8, &buf);
if (res != GST_FLOW_OK)
goto pull_failed;
- else if (GST_BUFFER_SIZE (buf) < 8)
+
+ bdata = gst_buffer_map (buf, &bsize, NULL, GST_MAP_READ);
+ if (bsize < 8)
goto too_small;
- tag = GST_READ_UINT32_LE (GST_BUFFER_DATA (buf));
- size = GST_READ_UINT32_LE (GST_BUFFER_DATA (buf) + 4);
+
+ tag = GST_READ_UINT32_LE (bdata);
+ size = GST_READ_UINT32_LE (bdata + 4);
}
+ gst_buffer_unmap (buf, bdata, bsize);
+ gst_buffer_unref (buf);
if (tag != GST_RIFF_TAG_idx1)
goto no_index;
if (!size)
goto zero_index;
- gst_buffer_unref (buf);
-
GST_DEBUG ("index found at offset %" G_GUINT64_FORMAT, offset);
/* read chunk, advance offset */
return;
GST_DEBUG ("will parse index chunk size %u for tag %"
- GST_FOURCC_FORMAT, GST_BUFFER_SIZE (buf), GST_FOURCC_ARGS (tag));
+ GST_FOURCC_FORMAT, gst_buffer_get_size (buf), GST_FOURCC_ARGS (tag));
gst_avi_demux_parse_index (avi, buf);
too_small:
{
GST_DEBUG_OBJECT (avi, "Buffer is too small");
+ gst_buffer_unmap (buf, bdata, bsize);
gst_buffer_unref (buf);
return;
}
GST_WARNING_OBJECT (avi,
"No index data (idx1) after movi chunk, but %" GST_FOURCC_FORMAT,
GST_FOURCC_ARGS (tag));
- gst_buffer_unref (buf);
return;
}
zero_index:
{
GST_WARNING_OBJECT (avi, "Empty index data (idx1) after movi chunk");
- gst_buffer_unref (buf);
return;
}
}
offset += 8 + GST_ROUND_UP_2 (size);
GST_DEBUG ("will parse index chunk size %u for tag %"
- GST_FOURCC_FORMAT, GST_BUFFER_SIZE (buf), GST_FOURCC_ARGS (tag));
+ GST_FOURCC_FORMAT, gst_buffer_get_size (buf), GST_FOURCC_ARGS (tag));
avi->offset = avi->first_movi_offset;
gst_avi_demux_parse_index (avi, buf);
{
GstFlowReturn res = GST_FLOW_OK;
GstBuffer *buf = NULL;
- guint bufsize;
+ gsize bufsize;
guint8 *bufdata;
res = gst_pad_pull_range (avi->sinkpad, offset, 8, &buf);
if (res != GST_FLOW_OK)
goto pull_failed;
- bufsize = GST_BUFFER_SIZE (buf);
+ bufdata = gst_buffer_map (buf, &bufsize, NULL, GST_MAP_READ);
if (bufsize != 8)
goto wrong_size;
- bufdata = GST_BUFFER_DATA (buf);
-
*tag = GST_READ_UINT32_LE (bufdata);
*size = GST_READ_UINT32_LE (bufdata + 4);
*size, offset + 8, offset + 8 + (gint64) * size);
done:
+ gst_buffer_unmap (buf, bufdata, bufsize);
gst_buffer_unref (buf);
return res;
gint64 stop;
gint i;
GstTagList *tags = NULL;
+ guint8 fourcc[4];
GST_DEBUG ("Reading and parsing avi headers: %d", avi->header_state);
GST_DEBUG ("Reading %d bytes", size);
buf = gst_adapter_take_buffer (avi->adapter, size);
- if (GST_READ_UINT32_LE (GST_BUFFER_DATA (buf)) != GST_RIFF_LIST_hdrl)
+ gst_buffer_extract (buf, 0, fourcc, 4);
+
+ if (GST_READ_UINT32_LE (fourcc) != GST_RIFF_LIST_hdrl)
goto header_no_hdrl;
/* mind padding */
switch (tag) {
case GST_RIFF_TAG_LIST:
- if (GST_BUFFER_SIZE (sub) < 4)
+ if (gst_buffer_get_size (sub) < 4)
goto next;
- switch (GST_READ_UINT32_LE (GST_BUFFER_DATA (sub))) {
+ gst_buffer_extract (sub, 0, fourcc, 4);
+
+ switch (GST_READ_UINT32_LE (fourcc)) {
case GST_RIFF_LIST_strl:
if (!(gst_avi_demux_parse_stream (avi, sub))) {
sub = NULL;
default:
GST_WARNING_OBJECT (avi,
"Unknown list %" GST_FOURCC_FORMAT " in AVI header",
- GST_FOURCC_ARGS (GST_READ_UINT32_LE (GST_BUFFER_DATA
- (sub))));
+ GST_FOURCC_ARGS (GST_READ_UINT32_LE (fourcc)));
/* fall-through */
case GST_RIFF_TAG_JUNQ:
case GST_RIFF_TAG_JUNK:
goto next;
+ break;
}
- break;
case GST_RIFF_IDIT:
gst_avi_demux_parse_idit (avi, sub);
goto next;
if (gst_adapter_available (avi->adapter) < 12)
return GST_FLOW_OK;
- data = gst_adapter_peek (avi->adapter, 12);
+ data = gst_adapter_map (avi->adapter, 12);
tag = GST_READ_UINT32_LE (data);
size = GST_READ_UINT32_LE (data + 4);
ltag = GST_READ_UINT32_LE (data + 8);
+ gst_adapter_unmap (avi->adapter, 0);
if (tag == GST_RIFF_TAG_LIST) {
switch (ltag) {
static void
gst_avi_demux_parse_idit (GstAviDemux * avi, GstBuffer * buf)
{
- gchar *data = (gchar *) GST_BUFFER_DATA (buf);
- guint size = GST_BUFFER_SIZE (buf);
+ gchar *data, *ptr;
+ gsize size, left;
gchar *safedata = NULL;
+ data = gst_buffer_map (buf, &size, NULL, GST_MAP_READ);
/*
* According to:
* http://www.eden-foundation.org/products/code/film_date_stamp/index.html
*/
/* skip eventual initial whitespace */
- while (size > 0 && g_ascii_isspace (data[0])) {
- data++;
- size--;
+ ptr = data;
+ left = size;
+
+ while (left > 0 && g_ascii_isspace (ptr[0])) {
+ ptr++;
+ left--;
}
- if (size == 0) {
+ if (left == 0) {
goto non_parsable;
}
/* make a safe copy to add a \0 to the end of the string */
- safedata = g_strndup (data, size);
+ safedata = g_strndup (ptr, left);
/* test if the first char is a alpha or a number */
- if (g_ascii_isdigit (data[0])) {
+ if (g_ascii_isdigit (ptr[0])) {
gst_avi_demux_parse_idit_nums_only (avi, safedata);
g_free (safedata);
return;
- } else if (g_ascii_isalpha (data[0])) {
+ } else if (g_ascii_isalpha (ptr[0])) {
gst_avi_demux_parse_idit_text (avi, safedata);
g_free (safedata);
return;
non_parsable:
GST_WARNING_OBJECT (avi, "IDIT tag has no parsable info");
+ gst_buffer_unmap (buf, data, size);
}
/*
GstElement *element = GST_ELEMENT_CAST (avi);
GstClockTime stamp;
GstTagList *tags = NULL;
+ guint8 fourcc[4];
stamp = gst_util_get_timestamp ();
goto pull_range_failed;
else if (tag != GST_RIFF_TAG_LIST)
goto no_list;
- else if (GST_BUFFER_SIZE (buf) < 4)
+ else if (gst_buffer_get_size (buf) < 4)
goto no_header;
GST_DEBUG_OBJECT (avi, "parsing headers");
/* Find the 'hdrl' LIST tag */
- while (GST_READ_UINT32_LE (GST_BUFFER_DATA (buf)) != GST_RIFF_LIST_hdrl) {
+ gst_buffer_extract (buf, 0, fourcc, 4);
+ while (GST_READ_UINT32_LE (fourcc) != GST_RIFF_LIST_hdrl) {
GST_LOG_OBJECT (avi, "buffer contains %" GST_FOURCC_FORMAT,
- GST_FOURCC_ARGS (GST_READ_UINT32_LE (GST_BUFFER_DATA (buf))));
+ GST_FOURCC_ARGS (GST_READ_UINT32_LE (fourcc)));
/* Eat up */
gst_buffer_unref (buf);
goto pull_range_failed;
else if (tag != GST_RIFF_TAG_LIST)
goto no_list;
- else if (GST_BUFFER_SIZE (buf) < 4)
+ else if (gst_buffer_get_size (buf) < 4)
goto no_header;
+ gst_buffer_extract (buf, 0, fourcc, 4);
}
GST_DEBUG_OBJECT (avi, "hdrl LIST tag found");
/* now, read the elements from the header until the end */
while (gst_riff_parse_chunk (element, buf, &offset, &tag, &sub)) {
+ gsize size;
+ guint8 *data;
+
/* sub can be NULL on empty tags */
if (!sub)
continue;
+ data = gst_buffer_map (sub, &size, NULL, GST_MAP_READ);
+
switch (tag) {
case GST_RIFF_TAG_LIST:
- {
- guint8 *data;
- guint32 fourcc;
-
- if (GST_BUFFER_SIZE (sub) < 4)
+ if (size < 4)
goto next;
- data = GST_BUFFER_DATA (sub);
- fourcc = GST_READ_UINT32_LE (data);
-
- switch (fourcc) {
+ switch (GST_READ_UINT32_LE (data)) {
case GST_RIFF_LIST_strl:
if (!(gst_avi_demux_parse_stream (avi, sub))) {
GST_ELEMENT_WARNING (avi, STREAM, DEMUX, (NULL),
sub = NULL;
break;
case GST_RIFF_LIST_INFO:
- GST_BUFFER_DATA (sub) = data + 4;
- GST_BUFFER_SIZE (sub) -= 4;
+ gst_buffer_resize (sub, 4, -1);
gst_riff_parse_info (element, sub, &tags);
if (tags) {
if (avi->globaltags) {
default:
GST_WARNING_OBJECT (avi,
"Unknown list %" GST_FOURCC_FORMAT " in AVI header",
- GST_FOURCC_ARGS (fourcc));
- GST_MEMDUMP_OBJECT (avi, "Unknown list", GST_BUFFER_DATA (sub),
- GST_BUFFER_SIZE (sub));
+ GST_FOURCC_ARGS (GST_READ_UINT32_LE (data)));
+ GST_MEMDUMP_OBJECT (avi, "Unknown list", data, size);
/* fall-through */
case GST_RIFF_TAG_JUNQ:
case GST_RIFF_TAG_JUNK:
goto next;
}
break;
- }
case GST_RIFF_IDIT:
gst_avi_demux_parse_idit (avi, sub);
goto next;
GST_WARNING_OBJECT (avi,
"Unknown tag %" GST_FOURCC_FORMAT " in AVI header at off %d",
GST_FOURCC_ARGS (tag), offset);
- GST_MEMDUMP_OBJECT (avi, "Unknown tag", GST_BUFFER_DATA (sub),
- GST_BUFFER_SIZE (sub));
+ GST_MEMDUMP_OBJECT (avi, "Unknown tag", data, size);
/* fall-through */
case GST_RIFF_TAG_JUNQ:
case GST_RIFF_TAG_JUNK:
next:
- if (sub)
+ if (sub) {
+ gst_buffer_unmap (sub, data, size);
gst_buffer_unref (sub);
+ }
sub = NULL;
break;
}
/* Now, find the data (i.e. skip all junk between header and data) */
do {
guint size;
+ gsize bsize;
guint8 *data;
guint32 tag, ltag;
if (res != GST_FLOW_OK) {
GST_DEBUG_OBJECT (avi, "pull_range failure while looking for tags");
goto pull_range_failed;
- } else if (GST_BUFFER_SIZE (buf) < 12) {
+ } else if (gst_buffer_get_size (buf) < 12) {
GST_DEBUG_OBJECT (avi, "got %d bytes which is less than 12 bytes",
- GST_BUFFER_SIZE (buf));
+ gst_buffer_get_size (buf));
gst_buffer_unref (buf);
return GST_FLOW_ERROR;
}
- data = GST_BUFFER_DATA (buf);
-
+ data = gst_buffer_map (buf, &bsize, NULL, GST_MAP_READ);
tag = GST_READ_UINT32_LE (data);
size = GST_READ_UINT32_LE (data + 4);
ltag = GST_READ_UINT32_LE (data + 8);
GST_DEBUG ("tag %" GST_FOURCC_FORMAT ", size %u",
GST_FOURCC_ARGS (tag), size);
- GST_MEMDUMP ("Tag content", data, GST_BUFFER_SIZE (buf));
+ GST_MEMDUMP ("Tag content", data, bsize);
+ gst_buffer_unmap (buf, data, bsize);
gst_buffer_unref (buf);
switch (tag) {
GST_DEBUG_OBJECT (avi, "couldn't read INFO chunk");
goto pull_range_failed;
}
- GST_DEBUG ("got size %u", GST_BUFFER_SIZE (buf));
+ GST_DEBUG ("got size %u", gst_buffer_get_size (buf));
if (size < 4) {
GST_DEBUG ("skipping INFO LIST prefix");
avi->offset += (4 - GST_ROUND_UP_2 (size));
continue;
}
- sub = gst_buffer_create_sub (buf, 4, GST_BUFFER_SIZE (buf) - 4);
+ sub = gst_buffer_copy_region (buf, GST_BUFFER_COPY_ALL, 4, -1);
gst_riff_parse_info (element, sub, &tags);
if (tags) {
if (avi->globaltags) {
GST_DEBUG_OBJECT (avi, "couldn't read INFO chunk");
goto pull_range_failed;
}
- GST_MEMDUMP ("Junk", GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (buf));
+ data = gst_buffer_map (buf, &bsize, NULL, GST_MAP_READ);
+ GST_MEMDUMP ("Junk", data, bsize);
+ gst_buffer_unmap (buf, data, bsize);
gst_buffer_unref (buf);
}
avi->offset += 8 + GST_ROUND_UP_2 (size);
GstStructure *s;
gint y, w, h;
gint bpp, stride;
- guint8 *tmp = NULL;
+ guint8 *tmp = NULL, *data;
+ gsize size;
if (stream->strh->type != GST_RIFF_FCC_vids)
return buf;
stride = w * (bpp / 8);
buf = gst_buffer_make_writable (buf);
- if (GST_BUFFER_SIZE (buf) < (stride * h)) {
+
+ data = gst_buffer_map (buf, &size, NULL, GST_MAP_READWRITE);
+ if (size < (stride * h)) {
GST_WARNING ("Buffer is smaller than reported Width x Height x Depth");
+ gst_buffer_unmap (buf, data, size);
return buf;
}
tmp = g_malloc (stride);
for (y = 0; y < h / 2; y++) {
- swap_line (GST_BUFFER_DATA (buf) + stride * y,
- GST_BUFFER_DATA (buf) + stride * (h - 1 - y), tmp, stride);
+ swap_line (data + stride * y, data + stride * (h - 1 - y), tmp, stride);
}
g_free (tmp);
+ gst_buffer_unmap (buf, data, size);
+
return buf;
}
goto pull_failed;
/* check for short buffers, this is EOS as well */
- if (GST_BUFFER_SIZE (buf) < size)
+ if (gst_buffer_get_size (buf) < size)
goto short_buffer;
/* invert the picture if needed */
GST_DEBUG_OBJECT (avi, "Pushing buffer of size %u, ts %"
GST_TIME_FORMAT ", dur %" GST_TIME_FORMAT ", off %" G_GUINT64_FORMAT
", off_end %" G_GUINT64_FORMAT,
- GST_BUFFER_SIZE (buf), GST_TIME_ARGS (timestamp),
+ gst_buffer_get_size (buf), GST_TIME_ARGS (timestamp),
GST_TIME_ARGS (duration), out_offset, out_offset_end);
ret = gst_pad_push (stream->pad, buf);
{
GST_WARNING_OBJECT (avi, "Short read at offset %" G_GUINT64_FORMAT
", only got %d/%" G_GUINT64_FORMAT " bytes (truncated file?)", offset,
- GST_BUFFER_SIZE (buf), size);
+ gst_buffer_get_size (buf), size);
gst_buffer_unref (buf);
ret = GST_FLOW_UNEXPECTED;
goto beach;
if (size) {
buf = gst_adapter_take_buffer (avi->adapter, GST_ROUND_UP_2 (size));
/* patch the size */
- GST_BUFFER_SIZE (buf) = size;
+ gst_buffer_resize (buf, 0, size);
} else {
buf = NULL;
}
avi->stream[i].discont = TRUE;
}
- GST_DEBUG ("Store %d bytes in adapter", GST_BUFFER_SIZE (buf));
+ GST_DEBUG ("Store %d bytes in adapter", gst_buffer_get_size (buf));
gst_adapter_push (avi->adapter, buf);
switch (avi->state) {
"wmaversion = (int) [ 1, 2 ] ")
);
-static void gst_avi_mux_base_init (gpointer g_class);
-static void gst_avi_mux_class_init (GstAviMuxClass * klass);
-static void gst_avi_mux_init (GstAviMux * avimux);
static void gst_avi_mux_pad_reset (GstAviPad * avipad, gboolean free);
static GstFlowReturn gst_avi_mux_collect_pads (GstCollectPads * pads,
static GstStateChangeReturn gst_avi_mux_change_state (GstElement * element,
GstStateChange transition);
-static GstElementClass *parent_class = NULL;
-
-GType
-gst_avi_mux_get_type (void)
-{
- static GType avimux_type = 0;
-
- if (!avimux_type) {
- static const GTypeInfo avimux_info = {
- sizeof (GstAviMuxClass),
- gst_avi_mux_base_init,
- NULL,
- (GClassInitFunc) gst_avi_mux_class_init,
- NULL,
- NULL,
- sizeof (GstAviMux),
- 0,
- (GInstanceInitFunc) gst_avi_mux_init,
- };
- static const GInterfaceInfo tag_setter_info = {
- NULL,
- NULL,
- NULL
- };
-
- avimux_type =
- g_type_register_static (GST_TYPE_ELEMENT, "GstAviMux", &avimux_info, 0);
- g_type_add_interface_static (avimux_type, GST_TYPE_TAG_SETTER,
- &tag_setter_info);
- }
- return avimux_type;
-}
-
-static void
-gst_avi_mux_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&src_factory));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&audio_sink_factory));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&video_sink_factory));
-
- gst_element_class_set_details_simple (element_class, "Avi muxer",
- "Codec/Muxer",
- "Muxes audio and video into an avi stream",
- "GStreamer maintainers <gstreamer-devel@lists.sourceforge.net>");
-
- GST_DEBUG_CATEGORY_INIT (avimux_debug, "avimux", 0, "Muxer for AVI streams");
-}
+#define gst_avi_mux_parent_class parent_class
+G_DEFINE_TYPE_WITH_CODE (GstAviMux, gst_avi_mux, GST_TYPE_ELEMENT,
+ G_IMPLEMENT_INTERFACE (GST_TYPE_TAG_SETTER, NULL));
static void
gst_avi_mux_finalize (GObject * object)
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
- parent_class = g_type_class_peek_parent (klass);
+ GST_DEBUG_CATEGORY_INIT (avimux_debug, "avimux", 0, "Muxer for AVI streams");
gobject_class->get_property = gst_avi_mux_get_property;
gobject_class->set_property = gst_avi_mux_set_property;
GST_DEBUG_FUNCPTR (gst_avi_mux_request_new_pad);
gstelement_class->release_pad = GST_DEBUG_FUNCPTR (gst_avi_mux_release_pad);
gstelement_class->change_state = GST_DEBUG_FUNCPTR (gst_avi_mux_change_state);
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&src_factory));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&audio_sink_factory));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&video_sink_factory));
+
+ gst_element_class_set_details_simple (gstelement_class, "Avi muxer",
+ "Codec/Muxer",
+ "Muxes audio and video into an avi stream",
+ "GStreamer maintainers <gstreamer-devel@lists.sourceforge.net>");
}
/* reset pad to initial state
avipad->vids_codec_data = gst_value_get_buffer (codec_data);
gst_buffer_ref (avipad->vids_codec_data);
/* keep global track of size */
- avimux->codec_data_size += GST_BUFFER_SIZE (avipad->vids_codec_data);
+ avimux->codec_data_size += gst_buffer_get_size (avipad->vids_codec_data);
} else {
avipad->prepend_buffer =
gst_buffer_ref (gst_value_get_buffer (codec_data));
GstBuffer * buffer)
{
guint8 *data;
- guint size;
+ gsize size;
guint spf;
guint32 header;
gulong layer;
gulong version;
gint lsf, mpg25;
- data = GST_BUFFER_DATA (buffer);
- size = GST_BUFFER_SIZE (buffer);
-
+ data = gst_buffer_map (buffer, &size, NULL, GST_MAP_READ);
if (size < 4)
goto not_parsed;
GST_WARNING_OBJECT (avimux, "input mpeg audio has varying frame size");
goto cbr_fallback;
}
+done:
+ gst_buffer_unmap (buffer, data, size);
return GST_FLOW_OK;
avipad->hdr.scale = 1;
/* no need to check further */
avipad->hook = NULL;
- return GST_FLOW_OK;
+ goto done;
}
}
avipad->auds_codec_data = gst_value_get_buffer (codec_data);
gst_buffer_ref (avipad->auds_codec_data);
/* keep global track of size */
- avimux->codec_data_size += GST_BUFFER_SIZE (avipad->auds_codec_data);
+ avimux->codec_data_size += gst_buffer_get_size (avipad->auds_codec_data);
}
if (!strcmp (mimetype, "audio/x-raw-int")) {
GstBuffer *codec_data_buf = avipad->auds_codec_data;
const gchar *stream_format;
guint codec;
+ guint8 data[2];
stream_format = gst_structure_get_string (structure, "stream-format");
if (stream_format) {
}
/* vbr case needs some special handling */
- if (!codec_data_buf || GST_BUFFER_SIZE (codec_data_buf) < 2) {
+ if (!codec_data_buf || gst_buffer_get_size (codec_data_buf) < 2) {
GST_WARNING_OBJECT (avimux, "no (valid) codec_data for AAC audio");
break;
}
avipad->auds.format = GST_RIFF_WAVE_FORMAT_AAC;
/* need to determine frame length */
- codec = GST_READ_UINT16_BE (GST_BUFFER_DATA (codec_data_buf));
+ gst_buffer_extract (codec_data_buf, 0, data, 2);
+ codec = GST_READ_UINT16_BE (data);
avipad->parent.hdr.scale = (codec & 0x4) ? 960 : 1024;
break;
}
GstByteWriter bw;
GSList *node;
guint avih, riff, hdrl;
+ guint8 *bdata;
+ gsize bsize;
GST_DEBUG_OBJECT (avimux, "creating avi header, data_size %u, idx_size %u",
avimux->data_size, avimux->idx_size);
if (avipad->is_video) {
codec_size = vidpad->vids_codec_data ?
- GST_BUFFER_SIZE (vidpad->vids_codec_data) : 0;
+ gst_buffer_get_size (vidpad->vids_codec_data) : 0;
/* the video header */
strf = gst_avi_mux_start_chunk (&bw, "strf", 0);
/* the actual header */
gst_byte_writer_put_uint32_le (&bw, vidpad->vids.num_colors);
gst_byte_writer_put_uint32_le (&bw, vidpad->vids.imp_colors);
if (vidpad->vids_codec_data) {
- gst_byte_writer_put_data (&bw,
- GST_BUFFER_DATA (vidpad->vids_codec_data),
- GST_BUFFER_SIZE (vidpad->vids_codec_data));
+ bdata =
+ gst_buffer_map (vidpad->vids_codec_data, &bsize, NULL,
+ GST_MAP_READ);
+ gst_byte_writer_put_data (&bw, bdata, bsize);
+ gst_buffer_unmap (vidpad->vids_codec_data, bdata, bsize);
}
gst_avi_mux_end_chunk (&bw, strf);
}
} else {
codec_size = audpad->auds_codec_data ?
- GST_BUFFER_SIZE (audpad->auds_codec_data) : 0;
+ gst_buffer_get_size (audpad->auds_codec_data) : 0;
/* the audio header */
strf = gst_avi_mux_start_chunk (&bw, "strf", 0);
/* the actual header */
gst_byte_writer_put_uint16_le (&bw, audpad->auds.size);
gst_byte_writer_put_uint16_le (&bw, codec_size);
if (audpad->auds_codec_data) {
- gst_byte_writer_put_data (&bw,
- GST_BUFFER_DATA (audpad->auds_codec_data),
- GST_BUFFER_SIZE (audpad->auds_codec_data));
+ bdata =
+ gst_buffer_map (audpad->auds_codec_data, &bsize, NULL,
+ GST_MAP_READ);
+ gst_byte_writer_put_data (&bw, bdata, bsize);
+ gst_buffer_unmap (vidpad->vids_codec_data, bdata, bsize);
}
gst_avi_mux_end_chunk (&bw, strf);
}
buffer = gst_byte_writer_reset_and_get_buffer (&bw);
/* ... but RIFF includes more than just header */
- size = GST_READ_UINT32_LE (GST_BUFFER_DATA (buffer) + 4);
+ bdata = gst_buffer_map (buffer, &bsize, NULL, GST_MAP_READWRITE);
+ size = GST_READ_UINT32_LE (bdata + 4);
size += 8 + avimux->data_size + avimux->idx_size;
- GST_WRITE_UINT32_LE (GST_BUFFER_DATA (buffer) + 4, size);
+ GST_WRITE_UINT32_LE (bdata + 4, size);
- GST_MEMDUMP_OBJECT (avimux, "avi header", GST_BUFFER_DATA (buffer),
- GST_BUFFER_SIZE (buffer));
+ GST_MEMDUMP_OBJECT (avimux, "avi header", bdata, bsize);
+ gst_buffer_unmap (buffer, bdata, bsize);
return buffer;
}
gst_avi_mux_riff_get_avix_header (guint32 datax_size)
{
GstBuffer *buffer;
- guint8 *buffdata;
+ guint8 *bdata;
+ gsize bsize;
buffer = gst_buffer_new_and_alloc (24);
- buffdata = GST_BUFFER_DATA (buffer);
- memcpy (buffdata + 0, "RIFF", 4);
- GST_WRITE_UINT32_LE (buffdata + 4, datax_size + 3 * 4);
- memcpy (buffdata + 8, "AVIX", 4);
- memcpy (buffdata + 12, "LIST", 4);
- GST_WRITE_UINT32_LE (buffdata + 16, datax_size);
- memcpy (buffdata + 20, "movi", 4);
+ bdata = gst_buffer_map (buffer, &bsize, NULL, GST_MAP_WRITE);
+ memcpy (bdata + 0, "RIFF", 4);
+ GST_WRITE_UINT32_LE (bdata + 4, datax_size + 3 * 4);
+ memcpy (bdata + 8, "AVIX", 4);
+ memcpy (bdata + 12, "LIST", 4);
+ GST_WRITE_UINT32_LE (bdata + 16, datax_size);
+ memcpy (bdata + 20, "movi", 4);
+ gst_buffer_unmap (buffer, bdata, bsize);
return buffer;
}
gst_avi_mux_riff_get_header (GstAviPad * avipad, guint32 video_frame_size)
{
GstBuffer *buffer;
- guint8 *buffdata;
+ guint8 *bdata;
+ gsize bsize;
buffer = gst_buffer_new_and_alloc (8);
- buffdata = GST_BUFFER_DATA (buffer);
- memcpy (buffdata + 0, avipad->tag, 4);
- GST_WRITE_UINT32_LE (buffdata + 4, video_frame_size);
+
+ bdata = gst_buffer_map (buffer, &bsize, NULL, GST_MAP_WRITE);
+ memcpy (bdata + 0, avipad->tag, 4);
+ GST_WRITE_UINT32_LE (bdata + 4, video_frame_size);
+ gst_buffer_unmap (buffer, bdata, bsize);
return buffer;
}
{
GstFlowReturn res;
GstBuffer *buffer;
- guint8 *buffdata, *data;
+ guint8 *data;
gst_riff_index_entry *entry;
gint i;
guint32 size, entry_count;
gboolean is_pcm = FALSE;
guint32 pcm_samples = 0;
+ guint8 *bdata;
+ gsize bsize;
/* check if it is pcm */
if (avipad && !avipad->is_video) {
/* allocate the maximum possible */
buffer = gst_buffer_new_and_alloc (32 + 8 * avimux->idx_index);
- buffdata = GST_BUFFER_DATA (buffer);
+
+ bdata = gst_buffer_map (buffer, &bsize, NULL, GST_MAP_WRITE);
+ data = bdata;
/* general index chunk info */
- memcpy (buffdata + 0, chunk, 4); /* chunk id */
- GST_WRITE_UINT32_LE (buffdata + 4, 0); /* chunk size; fill later */
- GST_WRITE_UINT16_LE (buffdata + 8, 2); /* index entry is 2 words */
- buffdata[10] = 0; /* index subtype */
- buffdata[11] = GST_AVI_INDEX_OF_CHUNKS; /* index type: AVI_INDEX_OF_CHUNKS */
- GST_WRITE_UINT32_LE (buffdata + 12, 0); /* entries in use; fill later */
- memcpy (buffdata + 16, code, 4); /* stream to which index refers */
- GST_WRITE_UINT64_LE (buffdata + 20, avimux->avix_start); /* base offset */
- GST_WRITE_UINT32_LE (buffdata + 28, 0); /* reserved */
- buffdata += 32;
+ memcpy (bdata + 0, chunk, 4); /* chunk id */
+ GST_WRITE_UINT32_LE (bdata + 4, 0); /* chunk size; fill later */
+ GST_WRITE_UINT16_LE (bdata + 8, 2); /* index entry is 2 words */
+ bdata[10] = 0; /* index subtype */
+ bdata[11] = GST_AVI_INDEX_OF_CHUNKS; /* index type: AVI_INDEX_OF_CHUNKS */
+ GST_WRITE_UINT32_LE (bdata + 12, 0); /* entries in use; fill later */
+ memcpy (bdata + 16, code, 4); /* stream to which index refers */
+ GST_WRITE_UINT64_LE (bdata + 20, avimux->avix_start); /* base offset */
+ GST_WRITE_UINT32_LE (bdata + 28, 0); /* reserved */
+ bdata += 32;
/* now the actual index entries */
i = avimux->idx_index;
while (i > 0) {
if (memcmp (&entry->id, code, 4) == 0) {
/* enter relative offset to the data (!) */
- GST_WRITE_UINT32_LE (buffdata, GUINT32_FROM_LE (entry->offset) + 8);
+ GST_WRITE_UINT32_LE (bdata, GUINT32_FROM_LE (entry->offset) + 8);
/* msb is set if not (!) keyframe */
- GST_WRITE_UINT32_LE (buffdata + 4, GUINT32_FROM_LE (entry->size)
+ GST_WRITE_UINT32_LE (bdata + 4, GUINT32_FROM_LE (entry->size)
| (GUINT32_FROM_LE (entry->flags)
& GST_RIFF_IF_KEYFRAME ? 0 : 1U << 31));
- buffdata += 8;
+ bdata += 8;
}
i--;
entry++;
}
/* ok, now we know the size and no of entries, fill in where needed */
- data = GST_BUFFER_DATA (buffer);
- GST_BUFFER_SIZE (buffer) = size = buffdata - data;
+ size = bdata - data;
GST_WRITE_UINT32_LE (data + 4, size - 8);
entry_count = (size - 32) / 8;
GST_WRITE_UINT32_LE (data + 12, entry_count);
+ gst_buffer_unmap (buffer, data, size);
/* decorate and send */
gst_buffer_set_caps (buffer, GST_PAD_CAPS (avimux->srcpad));
GstFlowReturn res;
GstBuffer *buffer;
guint8 *buffdata;
+ gsize buffsize;
buffer = gst_buffer_new_and_alloc (8);
- buffdata = GST_BUFFER_DATA (buffer);
+
+ buffdata = gst_buffer_map (buffer, &buffsize, NULL, GST_MAP_WRITE);
memcpy (buffdata + 0, "idx1", 4);
GST_WRITE_UINT32_LE (buffdata + 4,
avimux->idx_index * sizeof (gst_riff_index_entry));
+ gst_buffer_unmap (buffer, buffdata, buffsize);
gst_buffer_set_caps (buffer, GST_PAD_CAPS (avimux->srcpad));
res = gst_pad_push (avimux->srcpad, buffer);
return res;
buffer = gst_buffer_new ();
- GST_BUFFER_SIZE (buffer) = avimux->idx_index * sizeof (gst_riff_index_entry);
- GST_BUFFER_DATA (buffer) = (guint8 *) avimux->idx;
- GST_BUFFER_MALLOCDATA (buffer) = GST_BUFFER_DATA (buffer);
+
+ buffsize = avimux->idx_index * sizeof (gst_riff_index_entry);
+ buffdata = (guint8 *) avimux->idx;
avimux->idx = NULL; /* will be free()'ed by gst_buffer_unref() */
- avimux->total_data += GST_BUFFER_SIZE (buffer) + 8;
+
+ gst_buffer_take_memory (buffer,
+ gst_memory_new_wrapped (0, buffdata, g_free, buffsize, 0, buffsize));
+
+ avimux->total_data += buffsize + 8;
gst_buffer_set_caps (buffer, GST_PAD_CAPS (avimux->srcpad));
res = gst_pad_push (avimux->srcpad, buffer);
}
header = gst_avi_mux_riff_get_avix_header (0);
- avimux->total_data += GST_BUFFER_SIZE (header);
+ avimux->total_data += gst_buffer_get_size (header);
/* avix_start is used as base offset for the odml index chunk */
avimux->idx_offset = avimux->total_data - avimux->avix_start;
gst_buffer_set_caps (header, GST_PAD_CAPS (avimux->srcpad));
avimux->is_bigfile = FALSE;
header = gst_avi_mux_riff_get_avi_header (avimux);
- avimux->total_data += GST_BUFFER_SIZE (header);
+ avimux->total_data += gst_buffer_get_size (header);
gst_buffer_set_caps (header, GST_PAD_CAPS (avimux->srcpad));
res = gst_pad_push (avimux->srcpad, header);
gst_avi_mux_send_pad_data (GstAviMux * avimux, gulong num_bytes)
{
GstBuffer *buffer;
+ guint8 *bdata;
+ gsize bsize;
buffer = gst_buffer_new_and_alloc (num_bytes);
- memset (GST_BUFFER_DATA (buffer), 0, num_bytes);
+
+ bdata = gst_buffer_map (buffer, &bsize, NULL, GST_MAP_WRITE);
+ memset (bdata, 0, num_bytes);
+ gst_buffer_unmap (buffer, bdata, bsize);
gst_buffer_set_caps (buffer, GST_PAD_CAPS (avimux->srcpad));
return gst_pad_push (avimux->srcpad, buffer);
}
GstBuffer *data, *header;
gulong total_size, pad_bytes = 0;
guint flags;
+ gsize datasize;
data = gst_collect_pads_pop (avimux->collect, avipad->collect);
/* arrange downstream running time */
- data = gst_buffer_make_metadata_writable (data);
+ data = gst_buffer_make_writable (data);
GST_BUFFER_TIMESTAMP (data) =
gst_segment_to_running_time (&avipad->collect->segment,
GST_FORMAT_TIME, GST_BUFFER_TIMESTAMP (data));
if (vidpad->prepend_buffer) {
GstBuffer *newdata = gst_buffer_merge (vidpad->prepend_buffer, data);
- gst_buffer_copy_metadata (newdata, data, GST_BUFFER_COPY_TIMESTAMPS);
+ gst_buffer_copy_into (newdata, data, GST_BUFFER_COPY_TIMESTAMPS, 0, -1);
gst_buffer_unref (data);
gst_buffer_unref (vidpad->prepend_buffer);
return res;
}
+ datasize = gst_buffer_get_size (data);
+
/* need to restart or start a next avix chunk ? */
if ((avimux->is_bigfile ? avimux->datax_size : avimux->data_size) +
- GST_BUFFER_SIZE (data) > GST_AVI_MAX_SIZE) {
+ datasize > GST_AVI_MAX_SIZE) {
if (avimux->enable_large_avi) {
if ((res = gst_avi_mux_bigfile (avimux, FALSE)) != GST_FLOW_OK)
return res;
}
/* get header and record some stats */
- if (GST_BUFFER_SIZE (data) & 1) {
- pad_bytes = 2 - (GST_BUFFER_SIZE (data) & 1);
+ if (datasize & 1) {
+ pad_bytes = 2 - (datasize & 1);
}
- header = gst_avi_mux_riff_get_header (avipad, GST_BUFFER_SIZE (data));
- total_size = GST_BUFFER_SIZE (header) + GST_BUFFER_SIZE (data) + pad_bytes;
+ header = gst_avi_mux_riff_get_header (avipad, datasize);
+ total_size = gst_buffer_get_size (header) + datasize + pad_bytes;
if (avimux->is_bigfile) {
avimux->datax_size += total_size;
avipad->hook (avimux, avipad, data);
/* the suggested buffer size is the max frame size */
- if (avipad->hdr.bufsize < GST_BUFFER_SIZE (data))
- avipad->hdr.bufsize = GST_BUFFER_SIZE (data);
+ if (avipad->hdr.bufsize < datasize)
+ avipad->hdr.bufsize = datasize;
if (avipad->is_video) {
avimux->total_frames++;
GstAviAudioPad *audpad = (GstAviAudioPad *) avipad;
flags = 0;
- audpad->audio_size += GST_BUFFER_SIZE (data);
+ audpad->audio_size += datasize;
audpad->audio_time += GST_BUFFER_DURATION (data);
}
- gst_avi_mux_add_index (avimux, avipad, flags, GST_BUFFER_SIZE (data));
+ gst_avi_mux_add_index (avimux, avipad, flags, datasize);
/* prepare buffers for sending */
gst_buffer_set_caps (header, GST_PAD_CAPS (avimux->srcpad));
- data = gst_buffer_make_metadata_writable (data);
+ data = gst_buffer_make_writable (data);
gst_buffer_set_caps (data, GST_PAD_CAPS (avimux->srcpad));
GST_LOG_OBJECT (avimux, "pushing buffers: head, data");
static gboolean gst_avi_subtitle_send_event (GstElement * element,
GstEvent * event);
-GST_BOILERPLATE (GstAviSubtitle, gst_avi_subtitle, GstElement,
- GST_TYPE_ELEMENT);
+#define gst_avi_subtitle_parent_class parent_class
+G_DEFINE_TYPE (GstAviSubtitle, gst_avi_subtitle, GST_TYPE_ELEMENT);
#define IS_BOM_UTF8(data) ((GST_READ_UINT32_BE(data) >> 8) == 0xEFBBBF)
#define IS_BOM_UTF16_BE(data) (GST_READ_UINT16_BE(data) == 0xFEFF)
{
const gchar *input_enc = NULL;
GstBuffer *ret = NULL;
- gchar *data;
+ gchar *data, *bdata;
+ gsize bsize;
- data = (gchar *) GST_BUFFER_DATA (buffer) + offset;
+ bdata = gst_buffer_map (buffer, &bsize, NULL, GST_MAP_READ);
+ data = bdata + offset;
if (len >= (3 + 1) && IS_BOM_UTF8 (data) &&
g_utf8_validate (data + 3, len - 3, NULL)) {
- ret = gst_buffer_create_sub (buffer, offset + 3, len - 3);
+ ret =
+ gst_buffer_copy_region (buffer, GST_BUFFER_COPY_ALL, offset + 3,
+ len - 3);
} else if (len >= 2 && IS_BOM_UTF16_BE (data)) {
input_enc = "UTF-16BE";
data += 2;
len -= 4;
} else if (g_utf8_validate (data, len, NULL)) {
/* not specified, check if it's UTF-8 */
- ret = gst_buffer_create_sub (buffer, offset, len);
+ ret = gst_buffer_copy_region (buffer, GST_BUFFER_COPY_ALL, offset, len);
} else {
/* we could fall back to gst_tag_freeform_to_utf8() here */
GST_WARNING_OBJECT (sub, "unspecified encoding, and not UTF-8");
- return NULL;
+ ret = NULL;
+ goto done;
}
g_return_val_if_fail (ret != NULL || input_enc != NULL, NULL);
if (input_enc) {
GError *err = NULL;
gchar *utf8;
+ gsize slen;
GST_DEBUG_OBJECT (sub, "converting subtitles from %s to UTF-8", input_enc);
utf8 = g_convert (data, len, "UTF-8", input_enc, NULL, NULL, &err);
if (err != NULL) {
GST_WARNING_OBJECT (sub, "conversion to UTF-8 failed : %s", err->message);
g_error_free (err);
- return NULL;
+ ret = NULL;
+ goto done;
}
ret = gst_buffer_new ();
- GST_BUFFER_DATA (ret) = (guint8 *) utf8;
- GST_BUFFER_MALLOCDATA (ret) = (guint8 *) utf8;
- GST_BUFFER_SIZE (ret) = strlen (utf8);
+ slen = strlen (utf8);
+ gst_buffer_take_memory (ret,
+ gst_memory_new_wrapped (0, utf8, g_free, slen, 0, slen));
+
GST_BUFFER_OFFSET (ret) = 0;
}
-
GST_BUFFER_CAPS (ret) = gst_caps_new_simple ("application/x-subtitle", NULL);
+
+done:
+ gst_buffer_unmap (buffer, bdata, bsize);
+
return ret;
}
static GstFlowReturn
gst_avi_subtitle_parse_gab2_chunk (GstAviSubtitle * sub, GstBuffer * buf)
{
- const guint8 *data;
+ guint8 *data;
gchar *name_utf8;
guint name_length;
guint file_length;
- guint size;
+ gsize size;
- data = GST_BUFFER_DATA (buf);
- size = GST_BUFFER_SIZE (buf);
+ data = gst_buffer_map (buf, &size, NULL, GST_MAP_READ);
/* check the magic word "GAB2\0", and the next word must be 2 */
if (size < 12 || memcmp (data, "GAB2\0\2\0", 5 + 2) != 0)
if (sub->subfile == NULL)
goto extract_failed;
+ gst_buffer_unmap (buf, data, size);
+
return GST_FLOW_OK;
/* ERRORS */
wrong_magic_word:
{
GST_ELEMENT_ERROR (sub, STREAM, DECODE, (NULL), ("Wrong magic word"));
+ gst_buffer_unmap (buf, data, size);
return GST_FLOW_ERROR;
}
wrong_name_length:
{
GST_ELEMENT_ERROR (sub, STREAM, DECODE, (NULL),
("name doesn't fit in buffer (%d < %d)", size, 17 + name_length));
+ gst_buffer_unmap (buf, data, size);
return GST_FLOW_ERROR;
}
wrong_fixed_word_2:
GST_ELEMENT_ERROR (sub, STREAM, DECODE, (NULL),
("wrong fixed word: expected %u, got %u", 4,
GST_READ_UINT16_LE (data + 11 + name_length)));
+ gst_buffer_unmap (buf, data, size);
return GST_FLOW_ERROR;
}
wrong_total_length:
GST_ELEMENT_ERROR (sub, STREAM, DECODE, (NULL),
("buffer size is wrong: need %d bytes, have %d bytes",
17 + name_length + file_length, size));
+ gst_buffer_unmap (buf, data, size);
return GST_FLOW_ERROR;
}
extract_failed:
{
GST_ELEMENT_ERROR (sub, STREAM, DECODE, (NULL),
("could not extract subtitles"));
+ gst_buffer_unmap (buf, data, size);
return GST_FLOW_ERROR;
}
}
}
static void
-gst_avi_subtitle_base_init (gpointer klass)
+gst_avi_subtitle_class_init (GstAviSubtitleClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
GST_DEBUG_CATEGORY_INIT (avisubtitle_debug, "avisubtitle", 0,
"parse avi subtitle stream");
- gst_element_class_add_pad_template (element_class,
+ gstelement_class->change_state =
+ GST_DEBUG_FUNCPTR (gst_avi_subtitle_change_state);
+ gstelement_class->send_event =
+ GST_DEBUG_FUNCPTR (gst_avi_subtitle_send_event);
+
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&sink_template));
- gst_element_class_add_pad_template (element_class,
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&src_template));
- gst_element_class_set_details_simple (element_class,
+ gst_element_class_set_details_simple (gstelement_class,
"Avi subtitle parser", "Codec/Parser/Subtitle",
"Parse avi subtitle stream", "Thijs Vermeir <thijsvermeir@gmail.com>");
}
static void
-gst_avi_subtitle_class_init (GstAviSubtitleClass * klass)
-{
- GstElementClass *gstelement_class = (GstElementClass *) klass;
-
- gstelement_class->change_state =
- GST_DEBUG_FUNCPTR (gst_avi_subtitle_change_state);
- gstelement_class->send_event =
- GST_DEBUG_FUNCPTR (gst_avi_subtitle_send_event);
-}
-
-static void
-gst_avi_subtitle_init (GstAviSubtitle * self, GstAviSubtitleClass * klass)
+gst_avi_subtitle_init (GstAviSubtitle * self)
{
GstCaps *caps;
static GstElementClass *parent_class = NULL;
-typedef struct _GstFencedBuffer GstFencedBuffer;
-struct _GstFencedBuffer
+typedef struct _GstMetaFenced
{
- GstBuffer buffer;
+ GstMeta meta;
+
void *region;
unsigned int length;
-};
+} GstMetaFenced;
+
+static const GstMetaInfo *
+gst_meta_fenced_get_info (void)
+{
+ static const GstMetaInfo *meta_fenced_info = NULL;
+
+ if (meta_fenced_info == NULL) {
+ meta_fenced_info = gst_meta_register ("GstMetaFenced", "GstMetaFenced",
+ sizeof (GstMetaFenced),
+ (GstMetaInitFunction) NULL,
+ (GstMetaFreeFunction) NULL,
+ (GstMetaTransformFunction) NULL,
+ (GstMetaSerializeFunction) NULL, (GstMetaDeserializeFunction) NULL);
+ }
+ return meta_fenced_info;
+}
-GType gst_fenced_buffer_get_type (void);
-static void gst_fenced_buffer_finalize (GstFencedBuffer * buf);
-static GstFencedBuffer *gst_fenced_buffer_copy (const GstBuffer * buffer);
+#define GST_META_FENCED_GET(buf) ((GstMetaFenced *)gst_buffer_get_meta(buf,gst_meta_fenced_get_info()))
+#define GST_META_FENCED_ADD(buf) ((GstMetaFenced *)gst_buffer_add_meta(buf,gst_meta_fenced_get_info(),NULL))
+
+static void gst_fenced_buffer_dispose (GstBuffer * buf);
+static GstBuffer *gst_fenced_buffer_copy (const GstBuffer * buffer);
static void *gst_fenced_buffer_alloc (GstBuffer * buffer, unsigned int length,
gboolean fence_top);
+#if 0
static GstFlowReturn gst_efence_buffer_alloc (GstPad * pad, guint64 offset,
guint size, GstCaps * caps, GstBuffer ** buf);
-
-#define GST_TYPE_FENCED_BUFFER (gst_fenced_buffer_get_type())
-
-#define GST_IS_FENCED_BUFFER(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_FENCED_BUFFER))
-#define GST_FENCED_BUFFER(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_FENCED_BUFFER, GstFencedBuffer))
+#endif
GType
gst_gst_efence_get_type (void)
GST_DEBUG_FUNCPTR (gst_pad_proxy_setcaps));
gst_pad_set_chain_function (filter->sinkpad,
GST_DEBUG_FUNCPTR (gst_efence_chain));
+#if 0
gst_pad_set_bufferalloc_function (filter->sinkpad,
GST_DEBUG_FUNCPTR (gst_efence_buffer_alloc));
+#endif
gst_element_add_pad (GST_ELEMENT (filter), filter->sinkpad);
filter->srcpad =
efence = GST_EFENCE (GST_OBJECT_PARENT (pad));
g_return_val_if_fail (GST_IS_EFENCE (efence), GST_FLOW_ERROR);
+#if 0
if (GST_IS_FENCED_BUFFER (buffer)) {
GST_DEBUG_OBJECT (efence, "Passing on existing fenced buffer with caps %"
GST_PTR_FORMAT, GST_BUFFER_CAPS (buffer));
return gst_pad_push (efence->srcpad, buffer);
}
+#endif
copy = (GstBuffer *) gst_fenced_buffer_copy (buffer);
return gst_pad_activate_pull (efence->sinkpad, active);
}
+#if 0
static GstFlowReturn
gst_efence_buffer_alloc (GstPad * pad, guint64 offset,
guint size, GstCaps * caps, GstBuffer ** buf)
return GST_FLOW_OK;
}
+#endif
static void
gst_efence_set_property (GObject * object, guint prop_id,
plugin_init, VERSION, "LGPL", GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN);
-static GstBufferClass *fenced_buffer_parent_class = NULL;
-
static void
-gst_fenced_buffer_finalize (GstFencedBuffer * buffer)
+gst_fenced_buffer_dispose (GstBuffer * buffer)
{
- GstFencedBuffer *fenced_buffer;
+ GstMetaFenced *meta;
- GST_DEBUG ("free buffer=%p", buffer);
+ meta = GST_META_FENCED_GET (buffer);
- fenced_buffer = GST_FENCED_BUFFER (buffer);
+ GST_DEBUG ("free buffer=%p", buffer);
/* free our data */
if (GST_BUFFER_DATA (buffer)) {
- GST_DEBUG ("free region %p %d", fenced_buffer->region,
- fenced_buffer->length);
- munmap (fenced_buffer->region, fenced_buffer->length);
+ GST_DEBUG ("free region %p %d", meta->region, meta->length);
+ munmap (meta->region, meta->length);
}
-
- GST_MINI_OBJECT_CLASS (fenced_buffer_parent_class)->finalize (GST_MINI_OBJECT
- (buffer));
}
-static GstFencedBuffer *
+static GstBuffer *
gst_fenced_buffer_copy (const GstBuffer * buffer)
{
GstBuffer *copy;
g_return_val_if_fail (buffer != NULL, NULL);
/* create a fresh new buffer */
- copy = (GstBuffer *) gst_mini_object_new (GST_TYPE_FENCED_BUFFER);
+ copy = gst_buffer_new ();
/* we simply copy everything from our parent */
- ptr = gst_fenced_buffer_alloc (GST_BUFFER (copy),
- GST_BUFFER_SIZE (buffer), TRUE);
+ ptr = gst_fenced_buffer_alloc (copy, GST_BUFFER_SIZE (buffer), TRUE);
memcpy (ptr, GST_BUFFER_DATA (buffer), GST_BUFFER_SIZE (buffer));
/* copy relevant flags */
", caps: %" GST_PTR_FORMAT, buffer,
GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (copy)), GST_BUFFER_CAPS (copy));
- return GST_FENCED_BUFFER (copy);
+ return copy;
}
void *
{
int alloc_size;
void *region;
- GstFencedBuffer *fenced_buffer = (GstFencedBuffer *) buffer;
+ GstMetaFenced *meta;
int page_size;
GST_DEBUG ("buffer=%p length=%d fence_top=%d", buffer, length, fence_top);
g_warning ("mmap failed");
return NULL;
}
+
+ GST_MINI_OBJECT_CAST (buffer)->dispose =
+ (GstMiniObjectDisposeFunction) gst_fenced_buffer_dispose;
+ GST_MINI_OBJECT_CAST (buffer)->copy =
+ (GstMiniObjectCopyFunction) gst_fenced_buffer_copy;
+
+ meta = GST_META_FENCED_ADD (buffer);
+
#if 0
munmap (region, page_size);
munmap (region + alloc_size - page_size, page_size);
- fenced_buffer->region = region + page_size;
- fenced_buffer->length = alloc_size - page_size;
+ meta->region = region + page_size;
+ meta->length = alloc_size - page_size;
#else
mprotect (region, page_size, PROT_NONE);
mprotect ((char *) region + alloc_size - page_size, page_size, PROT_NONE);
- fenced_buffer->region = region;
- fenced_buffer->length = alloc_size;
+ meta->region = region;
+ meta->length = alloc_size;
#endif
- GST_DEBUG ("new region %p %d", fenced_buffer->region, fenced_buffer->length);
+ GST_DEBUG ("new region %p %d", meta->region, meta->length);
if (fence_top) {
int offset;
return (void *) ((char *) region + page_size);
}
}
-
-static void
-gst_fenced_buffer_class_init (gpointer g_class, gpointer class_data)
-{
- GstMiniObjectClass *mini_object_class = GST_MINI_OBJECT_CLASS (g_class);
-
- fenced_buffer_parent_class = g_type_class_peek_parent (g_class);
-
- mini_object_class->finalize =
- (GstMiniObjectFinalizeFunction) gst_fenced_buffer_finalize;
- mini_object_class->copy = (GstMiniObjectCopyFunction) gst_fenced_buffer_copy;
-}
-
-GType
-gst_fenced_buffer_get_type (void)
-{
- static GType fenced_buf_type = 0;
-
- if (G_UNLIKELY (!fenced_buf_type)) {
- static const GTypeInfo fenced_buf_info = {
- sizeof (GstBufferClass),
- NULL,
- NULL,
- (GClassInitFunc) gst_fenced_buffer_class_init,
- NULL,
- NULL,
- sizeof (GstFencedBuffer),
- 0,
- NULL,
- };
-
- fenced_buf_type = g_type_register_static (GST_TYPE_BUFFER,
- "GstFencedBuffer", &fenced_buf_info, 0);
- }
- return fenced_buf_type;
-}
gboolean key;
gst_index_entry_assoc_map (entry, GST_FORMAT_TIME, &time);
- key = ! !(GST_INDEX_ASSOC_FLAGS (entry) & GST_ASSOCIATION_FLAG_KEY_UNIT);
+ key = !!(GST_INDEX_ASSOC_FLAGS (entry) & GST_ASSOCIATION_FLAG_KEY_UNIT);
GST_LOG_OBJECT (demux, "position already mapped to time %" GST_TIME_FORMAT
", keyframe %d", GST_TIME_ARGS (time), key);
/* there is not really a way to delete the existing one */
- if (time != ts || key != ! !keyframe)
+ if (time != ts || key != !!keyframe)
GST_DEBUG_OBJECT (demux, "metadata mismatch");
#endif
return;
if (format != GST_FORMAT_TIME)
goto wrong_format;
- flush = ! !(flags & GST_SEEK_FLAG_FLUSH);
+ flush = !!(flags & GST_SEEK_FLAG_FLUSH);
/* FIXME : the keyframe flag is never used ! */
/* Work on a copy until we are sure the seek succeeded. */
demux->seeking = seeking;
GST_OBJECT_UNLOCK (demux);
- flush = ! !(flags & GST_SEEK_FLAG_FLUSH);
+ flush = !!(flags & GST_SEEK_FLAG_FLUSH);
/* FIXME : the keyframe flag is never used */
if (flush) {
&stop_type, &stop);
gst_event_unref (event);
- flush = ! !(flags & GST_SEEK_FLAG_FLUSH);
+ flush = !!(flags & GST_SEEK_FLAG_FLUSH);
if (format != GST_FORMAT_TIME && format != GST_FORMAT_DEFAULT) {
GST_ERROR_OBJECT (pad, "Seek in invalid format: %s",
/* sync sample atom */
stream->stps_present = FALSE;
if ((stream->stss_present =
- ! !qtdemux_tree_get_child_by_type_full (stbl, FOURCC_stss,
+ !!qtdemux_tree_get_child_by_type_full (stbl, FOURCC_stss,
&stream->stss) ? TRUE : FALSE) == TRUE) {
/* copy atom data into a new buffer for later use */
stream->stss.data = g_memdup (stream->stss.data, stream->stss.size);
/* partial sync sample atom */
if ((stream->stps_present =
- ! !qtdemux_tree_get_child_by_type_full (stbl, FOURCC_stps,
+ !!qtdemux_tree_get_child_by_type_full (stbl, FOURCC_stps,
&stream->stps) ? TRUE : FALSE) == TRUE) {
/* copy atom data into a new buffer for later use */
stream->stps.data = g_memdup (stream->stps.data, stream->stps.size);
/* composition time-to-sample */
if ((stream->ctts_present =
- ! !qtdemux_tree_get_child_by_type_full (stbl, FOURCC_ctts,
+ !!qtdemux_tree_get_child_by_type_full (stbl, FOURCC_ctts,
&stream->ctts) ? TRUE : FALSE) == TRUE) {
/* copy atom data into a new buffer for later use */
stream->ctts.data = g_memdup (stream->ctts.data, stream->ctts.size);
entry->pos + demux->common.ebml_segment_start);
}
- flush = ! !(flags & GST_SEEK_FLAG_FLUSH);
- keyunit = ! !(flags & GST_SEEK_FLAG_KEY_UNIT);
+ flush = !!(flags & GST_SEEK_FLAG_FLUSH);
+ keyunit = !!(flags & GST_SEEK_FLAG_KEY_UNIT);
if (flush) {
GST_DEBUG_OBJECT (demux, "Starting flush");
libgstreplaygain_la_CFLAGS = \
$(GST_PLUGINS_BASE_CFLAGS) $(GST_BASE_CFLAGS) $(GST_CFLAGS)
libgstreplaygain_la_LIBADD = \
- $(GST_PLUGINS_BASE_LIBS) -lgstpbutils-0.10 \
+ $(GST_PLUGINS_BASE_LIBS) -lgstpbutils-$(GST_MAJORMINOR) \
$(GST_BASE_LIBS) $(GST_LIBS) $(LIBM)
libgstreplaygain_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
libgstreplaygain_la_LIBTOOLFLAGS = --tag=disable-static
"encoding-name = (string) \"AC3\"")
);
-GST_BOILERPLATE (GstRtpAC3Depay, gst_rtp_ac3_depay, GstBaseRTPDepayload,
- GST_TYPE_BASE_RTP_DEPAYLOAD);
+G_DEFINE_TYPE (GstRtpAC3Depay, gst_rtp_ac3_depay, GST_TYPE_BASE_RTP_DEPAYLOAD);
static gboolean gst_rtp_ac3_depay_setcaps (GstBaseRTPDepayload * depayload,
GstCaps * caps);
GstBuffer * buf);
static void
-gst_rtp_ac3_depay_base_init (gpointer klass)
+gst_rtp_ac3_depay_class_init (GstRtpAC3DepayClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstElementClass *gstelement_class;
+ GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
+
+ gstelement_class = (GstElementClass *) klass;
+ gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
- gst_element_class_add_pad_template (element_class,
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_ac3_depay_src_template));
- gst_element_class_add_pad_template (element_class,
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_ac3_depay_sink_template));
- gst_element_class_set_details_simple (element_class, "RTP AC3 depayloader",
+ gst_element_class_set_details_simple (gstelement_class, "RTP AC3 depayloader",
"Codec/Depayloader/Network/RTP",
"Extracts AC3 audio from RTP packets (RFC 4184)",
"Wim Taymans <wim.taymans@gmail.com>");
-}
-
-static void
-gst_rtp_ac3_depay_class_init (GstRtpAC3DepayClass * klass)
-{
- GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
-
- gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
gstbasertpdepayload_class->set_caps = gst_rtp_ac3_depay_setcaps;
gstbasertpdepayload_class->process = gst_rtp_ac3_depay_process;
}
static void
-gst_rtp_ac3_depay_init (GstRtpAC3Depay * rtpac3depay,
- GstRtpAC3DepayClass * klass)
+gst_rtp_ac3_depay_init (GstRtpAC3Depay * rtpac3depay)
{
- /* needed because of GST_BOILERPLATE */
+ /* needed because of G_DEFINE_TYPE */
}
static gboolean
{
GstRtpAC3Depay *rtpac3depay;
GstBuffer *outbuf;
+ GstRTPBuffer rtp = { NULL, };
+ guint8 *payload;
+ guint16 FT, NF;
rtpac3depay = GST_RTP_AC3_DEPAY (depayload);
- {
- guint8 *payload;
- guint16 FT, NF;
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
- if (gst_rtp_buffer_get_payload_len (buf) < 2)
- goto empty_packet;
+ if (gst_rtp_buffer_get_payload_len (&rtp) < 2)
+ goto empty_packet;
- payload = gst_rtp_buffer_get_payload (buf);
+ payload = gst_rtp_buffer_get_payload (&rtp);
- /* strip off header
- *
- * 0 1
- * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5
- * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
- * | MBZ | FT| NF |
- * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
- */
- FT = payload[0] & 0x3;
- NF = payload[1];
+ /* strip off header
+ *
+ * 0 1
+ * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | MBZ | FT| NF |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ */
+ FT = payload[0] & 0x3;
+ NF = payload[1];
- GST_DEBUG_OBJECT (rtpac3depay, "FT: %d, NF: %d", FT, NF);
+ GST_DEBUG_OBJECT (rtpac3depay, "FT: %d, NF: %d", FT, NF);
- /* We don't bother with fragmented packets yet */
- outbuf = gst_rtp_buffer_get_payload_subbuffer (buf, 2, -1);
+ /* We don't bother with fragmented packets yet */
+ outbuf = gst_rtp_buffer_get_payload_subbuffer (&rtp, 2, -1);
- if (outbuf)
- GST_DEBUG_OBJECT (rtpac3depay, "pushing buffer of size %d",
- GST_BUFFER_SIZE (outbuf));
+ gst_rtp_buffer_unmap (&rtp);
- return outbuf;
- }
+ if (outbuf)
+ GST_DEBUG_OBJECT (rtpac3depay, "pushing buffer of size %d",
+ gst_buffer_get_size (outbuf));
- return NULL;
+ return outbuf;
/* ERRORS */
empty_packet:
{
GST_ELEMENT_WARNING (rtpac3depay, STREAM, DECODE,
("Empty Payload."), (NULL));
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
}
static GstFlowReturn gst_rtp_ac3_pay_handle_buffer (GstBaseRTPPayload * payload,
GstBuffer * buffer);
-GST_BOILERPLATE (GstRtpAC3Pay, gst_rtp_ac3_pay, GstBaseRTPPayload,
- GST_TYPE_BASE_RTP_PAYLOAD)
-
- static void gst_rtp_ac3_pay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_ac3_pay_src_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_ac3_pay_sink_template));
-
- gst_element_class_set_details_simple (element_class,
- "RTP AC3 audio payloader", "Codec/Payloader/Network/RTP",
- "Payload AC3 audio as RTP packets (RFC 4184)",
- "Wim Taymans <wim.taymans@gmail.com>");
-}
+#define gst_rtp_ac3_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpAC3Pay, gst_rtp_ac3_pay, GST_TYPE_BASE_RTP_PAYLOAD);
static void
gst_rtp_ac3_pay_class_init (GstRtpAC3PayClass * klass)
GstElementClass *gstelement_class;
GstBaseRTPPayloadClass *gstbasertppayload_class;
+ GST_DEBUG_CATEGORY_INIT (rtpac3pay_debug, "rtpac3pay", 0,
+ "AC3 Audio RTP Depayloader");
+
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
gstelement_class->change_state = gst_rtp_ac3_pay_change_state;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_ac3_pay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_ac3_pay_sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP AC3 audio payloader", "Codec/Payloader/Network/RTP",
+ "Payload AC3 audio as RTP packets (RFC 4184)",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
gstbasertppayload_class->set_caps = gst_rtp_ac3_pay_setcaps;
gstbasertppayload_class->handle_event = gst_rtp_ac3_pay_handle_event;
gstbasertppayload_class->handle_buffer = gst_rtp_ac3_pay_handle_buffer;
-
- GST_DEBUG_CATEGORY_INIT (rtpac3pay_debug, "rtpac3pay", 0,
- "AC3 Audio RTP Depayloader");
}
static void
-gst_rtp_ac3_pay_init (GstRtpAC3Pay * rtpac3pay, GstRtpAC3PayClass * klass)
+gst_rtp_ac3_pay_init (GstRtpAC3Pay * rtpac3pay)
{
rtpac3pay->adapter = gst_adapter_new ();
}
guint8 *payload;
guint payload_len;
guint packet_len;
+ GstRTPBuffer rtp = { NULL, };
/* this will be the total length of the packet */
packet_len = gst_rtp_buffer_calc_packet_len (2 + avail, 0, 0);
* 3: other fragment
* NF: amount of frames if FT = 0, else number of fragments.
*/
+ gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
GST_LOG_OBJECT (rtpac3pay, "FT %u, NF %u", FT, NF);
- payload = gst_rtp_buffer_get_payload (outbuf);
+ payload = gst_rtp_buffer_get_payload (&rtp);
payload[0] = (FT & 3);
payload[1] = NF;
payload_len -= 2;
avail -= payload_len;
if (avail == 0)
- gst_rtp_buffer_set_marker (outbuf, TRUE);
+ gst_rtp_buffer_set_marker (&rtp, TRUE);
+ gst_rtp_buffer_unmap (&rtp);
GST_BUFFER_TIMESTAMP (outbuf) = rtpac3pay->first_ts;
GST_BUFFER_DURATION (outbuf) = rtpac3pay->duration;
{
GstRtpAC3Pay *rtpac3pay;
GstFlowReturn ret;
- guint size, avail, left, NF;
+ gsize size, avail, left, NF;
guint8 *data, *p;
guint packet_len;
GstClockTime duration, timestamp;
rtpac3pay = GST_RTP_AC3_PAY (basepayload);
- size = GST_BUFFER_SIZE (buffer);
- data = GST_BUFFER_DATA (buffer);
+ data = gst_buffer_map (buffer, &size, NULL, GST_MAP_READ);
duration = GST_BUFFER_DURATION (buffer);
timestamp = GST_BUFFER_TIMESTAMP (buffer);
p += frame_size;
left -= frame_size;
}
+ gst_buffer_unmap (buffer, data, size);
if (NF == 0)
goto no_frames;
static GstBuffer *gst_rtp_amr_depay_process (GstBaseRTPDepayload * depayload,
GstBuffer * buf);
-GST_BOILERPLATE (GstRtpAMRDepay, gst_rtp_amr_depay, GstBaseRTPDepayload,
- GST_TYPE_BASE_RTP_DEPAYLOAD);
+#define gst_rtp_amr_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpAMRDepay, gst_rtp_amr_depay, GST_TYPE_BASE_RTP_DEPAYLOAD);
static void
-gst_rtp_amr_depay_base_init (gpointer klass)
+gst_rtp_amr_depay_class_init (GstRtpAMRDepayClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstElementClass *gstelement_class;
+ GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
+
+ gstelement_class = (GstElementClass *) klass;
+ gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
- gst_element_class_add_pad_template (element_class,
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_amr_depay_src_template));
- gst_element_class_add_pad_template (element_class,
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_amr_depay_sink_template));
- gst_element_class_set_details_simple (element_class, "RTP AMR depayloader",
+ gst_element_class_set_details_simple (gstelement_class, "RTP AMR depayloader",
"Codec/Depayloader/Network/RTP",
"Extracts AMR or AMR-WB audio from RTP packets (RFC 3267)",
"Wim Taymans <wim.taymans@gmail.com>");
-}
-
-static void
-gst_rtp_amr_depay_class_init (GstRtpAMRDepayClass * klass)
-{
- GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
-
- gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
gstbasertpdepayload_class->process = gst_rtp_amr_depay_process;
gstbasertpdepayload_class->set_caps = gst_rtp_amr_depay_setcaps;
}
static void
-gst_rtp_amr_depay_init (GstRtpAMRDepay * rtpamrdepay,
- GstRtpAMRDepayClass * klass)
+gst_rtp_amr_depay_init (GstRtpAMRDepay * rtpamrdepay)
{
GstBaseRTPDepayload *depayload;
const gint *frame_size;
GstBuffer *outbuf = NULL;
gint payload_len;
+ GstRTPBuffer rtp = { NULL };
+ guint8 *odata;
+ gsize osize;
rtpamrdepay = GST_RTP_AMR_DEPAY (depayload);
else
frame_size = wb_frame_size;
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
+
/* when we get here, 1 channel, 8000/16000 Hz, octet aligned, no CRC,
* no robust sorting, no interleaving data is to be depayloaded */
{
gint amr_len;
gint ILL, ILP;
- payload_len = gst_rtp_buffer_get_payload_len (buf);
+ payload_len = gst_rtp_buffer_get_payload_len (&rtp);
/* need at least 2 bytes for the header */
if (payload_len < 2)
goto too_small;
- payload = gst_rtp_buffer_get_payload (buf);
+ payload = gst_rtp_buffer_get_payload (&rtp);
/* depay CMR. The CMR is used by the sender to request
* a new encoding mode.
outbuf = gst_buffer_new_and_alloc (payload_len);
/* point to destination */
- p = GST_BUFFER_DATA (outbuf);
+ odata = gst_buffer_map (outbuf, &osize, NULL, GST_MAP_WRITE);
+
/* point to first data packet */
+ p = odata;
dp = payload + num_packets;
if (rtpamrdepay->crc) {
/* skip CRC if present */
dp += fr_size;
}
}
+ gst_buffer_unmap (outbuf, odata, osize);
+
/* we can set the duration because each packet is 20 milliseconds */
GST_BUFFER_DURATION (outbuf) = num_packets * 20 * GST_MSECOND;
- if (gst_rtp_buffer_get_marker (buf)) {
+ if (gst_rtp_buffer_get_marker (&rtp)) {
/* marker bit marks a discont buffer after a talkspurt. */
GST_DEBUG_OBJECT (depayload, "marker bit was set");
GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
}
GST_DEBUG_OBJECT (depayload, "pushing buffer of size %d",
- GST_BUFFER_SIZE (outbuf));
+ gst_buffer_get_size (outbuf));
}
return outbuf;
static GstStateChangeReturn
gst_rtp_amr_pay_change_state (GstElement * element, GstStateChange transition);
-GST_BOILERPLATE (GstRtpAMRPay, gst_rtp_amr_pay, GstBaseRTPPayload,
- GST_TYPE_BASE_RTP_PAYLOAD);
+#define gst_rtp_amr_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpAMRPay, gst_rtp_amr_pay, GST_TYPE_BASE_RTP_PAYLOAD);
static void
-gst_rtp_amr_pay_base_init (gpointer klass)
+gst_rtp_amr_pay_class_init (GstRtpAMRPayClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstElementClass *gstelement_class;
+ GstBaseRTPPayloadClass *gstbasertppayload_class;
+
+ gstelement_class = (GstElementClass *) klass;
+ gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
- gst_element_class_add_pad_template (element_class,
+ gstelement_class->change_state = gst_rtp_amr_pay_change_state;
+
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_amr_pay_src_template));
- gst_element_class_add_pad_template (element_class,
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_amr_pay_sink_template));
- gst_element_class_set_details_simple (element_class, "RTP AMR payloader",
+ gst_element_class_set_details_simple (gstelement_class, "RTP AMR payloader",
"Codec/Payloader/Network/RTP",
"Payload-encode AMR or AMR-WB audio into RTP packets (RFC 3267)",
"Wim Taymans <wim.taymans@gmail.com>");
-}
-
-static void
-gst_rtp_amr_pay_class_init (GstRtpAMRPayClass * klass)
-{
- GstBaseRTPPayloadClass *gstbasertppayload_class;
- GstElementClass *gstelement_class;
-
- gstelement_class = (GstElementClass *) klass;
- gstelement_class->change_state = gst_rtp_amr_pay_change_state;
-
- gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
gstbasertppayload_class->set_caps = gst_rtp_amr_pay_setcaps;
gstbasertppayload_class->handle_buffer = gst_rtp_amr_pay_handle_buffer;
}
static void
-gst_rtp_amr_pay_init (GstRtpAMRPay * rtpamrpay, GstRtpAMRPayClass * klass)
+gst_rtp_amr_pay_init (GstRtpAMRPay * rtpamrpay)
{
- /* needed because of GST_BOILERPLATE */
}
static void
GstRtpAMRPay *rtpamrpay;
const gint *frame_size;
GstFlowReturn ret;
- guint size, payload_len;
+ guint payload_len;
+ gsize size;
GstBuffer *outbuf;
- guint8 *payload, *data, *payload_amr;
+ guint8 *payload, *data, *ptr, *payload_amr;
GstClockTime timestamp, duration;
guint packet_len, mtu;
gint i, num_packets, num_nonempty_packets;
gint amr_len;
gboolean sid = FALSE;
+ GstRTPBuffer rtp = { NULL };
rtpamrpay = GST_RTP_AMR_PAY (basepayload);
mtu = GST_BASE_RTP_PAYLOAD_MTU (rtpamrpay);
- size = GST_BUFFER_SIZE (buffer);
- data = GST_BUFFER_DATA (buffer);
+ data = gst_buffer_map (buffer, &size, NULL, GST_MAP_READ);
+
timestamp = GST_BUFFER_TIMESTAMP (buffer);
duration = GST_BUFFER_DURATION (buffer);
/* now alloc output buffer */
outbuf = gst_rtp_buffer_new_allocate (payload_len, 0, 0);
+ gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
+
/* copy timestamp */
GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
if (GST_BUFFER_IS_DISCONT (buffer)) {
GST_DEBUG_OBJECT (basepayload, "discont, setting marker bit");
GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
- gst_rtp_buffer_set_marker (outbuf, TRUE);
+ gst_rtp_buffer_set_marker (&rtp, TRUE);
gst_rtp_amr_pay_recalc_rtp_time (rtpamrpay, timestamp);
}
(num_packets * 160) << (rtpamrpay->mode == GST_RTP_AMR_P_MODE_WB);
/* get payload, this is now writable */
- payload = gst_rtp_buffer_get_payload (outbuf);
+ payload = gst_rtp_buffer_get_payload (&rtp);
/* 0 1 2 3 4 5 6 7
* +-+-+-+-+-+-+-+-+
/* copy data in payload, first we copy all the FTs then all
* the AMR data. The last FT has to have the F flag cleared. */
+ ptr = data;
for (i = 1; i <= num_packets; i++) {
guint8 FT;
gint fr_size;
* |F| FT |Q|P|P| more FT...
* +-+-+-+-+-+-+-+-+
*/
- FT = (*data & 0x78) >> 3;
+ FT = (*ptr & 0x78) >> 3;
fr_size = frame_size[FT];
if (i == num_packets)
/* last packet, clear F flag */
- payload[i] = *data & 0x7f;
+ payload[i] = *ptr & 0x7f;
else
/* set F flag */
- payload[i] = *data | 0x80;
+ payload[i] = *ptr | 0x80;
- memcpy (payload_amr, &data[1], fr_size);
+ memcpy (payload_amr, &ptr[1], fr_size);
/* all sizes are > 0 since we checked for that above */
- data += fr_size + 1;
+ ptr += fr_size + 1;
payload_amr += fr_size;
}
+ gst_buffer_unmap (buffer, data, size);
gst_buffer_unref (buffer);
+ gst_rtp_buffer_unmap (&rtp);
+
ret = gst_basertppayload_push (basepayload, outbuf);
return ret;
{
GST_ELEMENT_ERROR (basepayload, STREAM, FORMAT,
(NULL), ("received AMR frame with size <= 0"));
+ gst_buffer_unmap (buffer, data, size);
gst_buffer_unref (buffer);
return GST_FLOW_ERROR;
{
GST_ELEMENT_ERROR (basepayload, STREAM, FORMAT,
(NULL), ("received incomplete AMR frames"));
+ gst_buffer_unmap (buffer, data, size);
gst_buffer_unref (buffer);
return GST_FLOW_ERROR;
{
GST_ELEMENT_ERROR (basepayload, STREAM, FORMAT,
(NULL), ("received too many AMR frames for MTU"));
+ gst_buffer_unmap (buffer, data, size);
gst_buffer_unref (buffer);
return GST_FLOW_ERROR;
static gboolean gst_rtp_bv_depay_setcaps (GstBaseRTPDepayload * depayload,
GstCaps * caps);
-GST_BOILERPLATE (GstRTPBVDepay, gst_rtp_bv_depay, GstBaseRTPDepayload,
- GST_TYPE_BASE_RTP_DEPAYLOAD);
+#define gst_rtp_bv_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRTPBVDepay, gst_rtp_bv_depay, GST_TYPE_BASE_RTP_DEPAYLOAD);
static void
-gst_rtp_bv_depay_base_init (gpointer klass)
+gst_rtp_bv_depay_class_init (GstRTPBVDepayClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstElementClass *gstelement_class;
+ GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
+
+ gstelement_class = (GstElementClass *) klass;
+ gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
- gst_element_class_add_pad_template (element_class,
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_bv_depay_src_template));
- gst_element_class_add_pad_template (element_class,
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_bv_depay_sink_template));
- gst_element_class_set_details_simple (element_class,
+
+ gst_element_class_set_details_simple (gstelement_class,
"RTP BroadcomVoice depayloader", "Codec/Depayloader/Network/RTP",
"Extracts BroadcomVoice audio from RTP packets (RFC 4298)",
"Wim Taymans <wim.taymans@collabora.co.uk>");
-}
-
-static void
-gst_rtp_bv_depay_class_init (GstRTPBVDepayClass * klass)
-{
- GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
-
- gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
gstbasertpdepayload_class->process = gst_rtp_bv_depay_process;
gstbasertpdepayload_class->set_caps = gst_rtp_bv_depay_setcaps;
}
static void
-gst_rtp_bv_depay_init (GstRTPBVDepay * rtpbvdepay, GstRTPBVDepayClass * klass)
+gst_rtp_bv_depay_init (GstRTPBVDepay * rtpbvdepay)
{
rtpbvdepay->mode = -1;
}
{
GstBuffer *outbuf;
gboolean marker;
+ GstRTPBuffer rtp = { NULL, };
+
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
- marker = gst_rtp_buffer_get_marker (buf);
+ marker = gst_rtp_buffer_get_marker (&rtp);
GST_DEBUG ("process : got %d bytes, mark %d ts %u seqn %d",
- GST_BUFFER_SIZE (buf), marker,
- gst_rtp_buffer_get_timestamp (buf), gst_rtp_buffer_get_seq (buf));
+ gst_buffer_get_size (buf), marker,
+ gst_rtp_buffer_get_timestamp (&rtp), gst_rtp_buffer_get_seq (&rtp));
- outbuf = gst_rtp_buffer_get_payload_buffer (buf);
+ outbuf = gst_rtp_buffer_get_payload_buffer (&rtp);
+ gst_rtp_buffer_unmap (&rtp);
if (marker && outbuf) {
/* mark start of talkspurt with DISCONT */
static gboolean gst_rtp_bv_pay_sink_setcaps (GstBaseRTPPayload * payload,
GstCaps * caps);
-GST_BOILERPLATE (GstRTPBVPay, gst_rtp_bv_pay, GstBaseRTPAudioPayload,
- GST_TYPE_BASE_RTP_AUDIO_PAYLOAD);
+#define gst_rtp_bv_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRTPBVPay, gst_rtp_bv_pay, GST_TYPE_BASE_RTP_AUDIO_PAYLOAD);
static void
-gst_rtp_bv_pay_base_init (gpointer klass)
+gst_rtp_bv_pay_class_init (GstRTPBVPayClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstElementClass *gstelement_class;
+ GstBaseRTPPayloadClass *gstbasertppayload_class;
+
+ GST_DEBUG_CATEGORY_INIT (rtpbvpay_debug, "rtpbvpay", 0,
+ "BroadcomVoice audio RTP payloader");
- gst_element_class_add_pad_template (element_class,
+ gstelement_class = (GstElementClass *) klass;
+ gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
+
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_bv_pay_sink_template));
- gst_element_class_add_pad_template (element_class,
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_bv_pay_src_template));
- gst_element_class_set_details_simple (element_class, "RTP BV Payloader",
+
+ gst_element_class_set_details_simple (gstelement_class, "RTP BV Payloader",
"Codec/Payloader/Network/RTP",
"Packetize BroadcomVoice audio streams into RTP packets (RFC 4298)",
"Wim Taymans <wim.taymans@collabora.co.uk>");
-}
-
-static void
-gst_rtp_bv_pay_class_init (GstRTPBVPayClass * klass)
-{
- GstBaseRTPPayloadClass *gstbasertppayload_class;
-
- gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
gstbasertppayload_class->set_caps = gst_rtp_bv_pay_sink_setcaps;
gstbasertppayload_class->get_caps = gst_rtp_bv_pay_sink_getcaps;
-
- GST_DEBUG_CATEGORY_INIT (rtpbvpay_debug, "rtpbvpay", 0,
- "BroadcomVoice audio RTP payloader");
}
static void
-gst_rtp_bv_pay_init (GstRTPBVPay * rtpbvpay, GstRTPBVPayClass * klass)
+gst_rtp_bv_pay_init (GstRTPBVPay * rtpbvpay)
{
GstBaseRTPAudioPayload *basertpaudiopayload;
static gboolean gst_rtp_celt_depay_setcaps (GstBaseRTPDepayload * depayload,
GstCaps * caps);
-GST_BOILERPLATE (GstRtpCELTDepay, gst_rtp_celt_depay, GstBaseRTPDepayload,
+#define gst_rtp_celt_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpCELTDepay, gst_rtp_celt_depay,
GST_TYPE_BASE_RTP_DEPAYLOAD);
static void
-gst_rtp_celt_depay_base_init (gpointer klass)
+gst_rtp_celt_depay_class_init (GstRtpCELTDepayClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_celt_depay_src_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_celt_depay_sink_template));
- gst_element_class_set_details_simple (element_class, "RTP CELT depayloader",
- "Codec/Depayloader/Network/RTP",
- "Extracts CELT audio from RTP packets",
- "Wim Taymans <wim.taymans@gmail.com>");
+ GstElementClass *gstelement_class;
+ GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
GST_DEBUG_CATEGORY_INIT (rtpceltdepay_debug, "rtpceltdepay", 0,
"CELT RTP Depayloader");
-}
-
-static void
-gst_rtp_celt_depay_class_init (GstRtpCELTDepayClass * klass)
-{
- GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
+ gstelement_class = (GstElementClass *) klass;
gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_celt_depay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_celt_depay_sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP CELT depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts CELT audio from RTP packets",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
gstbasertpdepayload_class->process = gst_rtp_celt_depay_process;
gstbasertpdepayload_class->set_caps = gst_rtp_celt_depay_setcaps;
}
static void
-gst_rtp_celt_depay_init (GstRtpCELTDepay * rtpceltdepay,
- GstRtpCELTDepayClass * klass)
+gst_rtp_celt_depay_init (GstRtpCELTDepay * rtpceltdepay)
{
}
GstRtpCELTDepay *rtpceltdepay;
gint clock_rate, nb_channels = 0, frame_size = 0;
GstBuffer *buf;
- guint8 *data;
+ guint8 *data, *ptr;
+ gsize size;
const gchar *params;
GstCaps *srccaps;
gboolean res;
/* construct minimal header and comment packet for the decoder */
buf = gst_buffer_new_and_alloc (60);
- data = GST_BUFFER_DATA (buf);
- memcpy (data, "CELT ", 8);
- data += 8;
- memcpy (data, "1.1.12", 7);
- data += 20;
- GST_WRITE_UINT32_LE (data, 0x80000006); /* version */
- data += 4;
- GST_WRITE_UINT32_LE (data, 56); /* header_size */
- data += 4;
- GST_WRITE_UINT32_LE (data, clock_rate); /* rate */
- data += 4;
- GST_WRITE_UINT32_LE (data, nb_channels); /* channels */
- data += 4;
- GST_WRITE_UINT32_LE (data, frame_size); /* frame-size */
- data += 4;
- GST_WRITE_UINT32_LE (data, -1); /* overlap */
- data += 4;
- GST_WRITE_UINT32_LE (data, -1); /* bytes_per_packet */
- data += 4;
- GST_WRITE_UINT32_LE (data, 0); /* extra headers */
+ ptr = data = gst_buffer_map (buf, &size, NULL, GST_MAP_WRITE);
+ memcpy (ptr, "CELT ", 8);
+ ptr += 8;
+ memcpy (ptr, "1.1.12", 7);
+ ptr += 20;
+ GST_WRITE_UINT32_LE (ptr, 0x80000006); /* version */
+ ptr += 4;
+ GST_WRITE_UINT32_LE (ptr, 56); /* header_size */
+ ptr += 4;
+ GST_WRITE_UINT32_LE (ptr, clock_rate); /* rate */
+ ptr += 4;
+ GST_WRITE_UINT32_LE (ptr, nb_channels); /* channels */
+ ptr += 4;
+ GST_WRITE_UINT32_LE (ptr, frame_size); /* frame-size */
+ ptr += 4;
+ GST_WRITE_UINT32_LE (ptr, -1); /* overlap */
+ ptr += 4;
+ GST_WRITE_UINT32_LE (ptr, -1); /* bytes_per_packet */
+ ptr += 4;
+ GST_WRITE_UINT32_LE (ptr, 0); /* extra headers */
+ gst_buffer_unmap (buf, data, size);
srccaps = gst_caps_new_simple ("audio/x-celt", NULL);
res = gst_pad_set_caps (depayload->srcpad, srccaps);
gst_base_rtp_depayload_push (GST_BASE_RTP_DEPAYLOAD (rtpceltdepay), buf);
buf = gst_buffer_new_and_alloc (sizeof (gst_rtp_celt_comment));
- memcpy (GST_BUFFER_DATA (buf), gst_rtp_celt_comment,
- sizeof (gst_rtp_celt_comment));
+ gst_buffer_fill (buf, 0, gst_rtp_celt_comment, sizeof (gst_rtp_celt_comment));
gst_buffer_set_caps (buf, GST_PAD_CAPS (depayload->srcpad));
gst_base_rtp_depayload_push (GST_BASE_RTP_DEPAYLOAD (rtpceltdepay), buf);
GstClockTime framesize_ns = 0, timestamp;
guint n = 0;
GstRtpCELTDepay *rtpceltdepay;
+ GstRTPBuffer rtp = { NULL, };
rtpceltdepay = GST_RTP_CELT_DEPAY (depayload);
clock_rate = depayload->clock_rate;
timestamp = GST_BUFFER_TIMESTAMP (buf);
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
+
GST_LOG_OBJECT (depayload, "got %d bytes, mark %d ts %u seqn %d",
- GST_BUFFER_SIZE (buf),
- gst_rtp_buffer_get_marker (buf),
- gst_rtp_buffer_get_timestamp (buf), gst_rtp_buffer_get_seq (buf));
+ gst_buffer_get_size (buf),
+ gst_rtp_buffer_get_marker (&rtp),
+ gst_rtp_buffer_get_timestamp (&rtp), gst_rtp_buffer_get_seq (&rtp));
GST_LOG_OBJECT (depayload, "got clock-rate=%d, frame_size=%d, "
"_ns=%" GST_TIME_FORMAT ", timestamp=%" GST_TIME_FORMAT, clock_rate,
frame_size, GST_TIME_ARGS (framesize_ns), GST_TIME_ARGS (timestamp));
- payload = gst_rtp_buffer_get_payload (buf);
- payload_len = gst_rtp_buffer_get_payload_len (buf);
+ payload = gst_rtp_buffer_get_payload (&rtp);
+ payload_len = gst_rtp_buffer_get_payload_len (&rtp);
/* first count how many bytes are consumed by the size headers and make offset
* point to the first data byte */
total_size += size + 1;
} while (s == 0xff);
- outbuf = gst_rtp_buffer_get_payload_subbuffer (buf, offset, size);
+ outbuf = gst_rtp_buffer_get_payload_subbuffer (&rtp, offset, size);
offset += size;
if (frame_size != -1 && clock_rate != -1) {
gst_base_rtp_depayload_push (depayload, outbuf);
}
+ gst_rtp_buffer_unmap (&rtp);
+
return NULL;
}
static GstFlowReturn gst_rtp_celt_pay_handle_buffer (GstBaseRTPPayload *
payload, GstBuffer * buffer);
-GST_BOILERPLATE (GstRtpCELTPay, gst_rtp_celt_pay, GstBaseRTPPayload,
- GST_TYPE_BASE_RTP_PAYLOAD);
-
-static void
-gst_rtp_celt_pay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_celt_pay_sink_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_celt_pay_src_template));
- gst_element_class_set_details_simple (element_class, "RTP CELT payloader",
- "Codec/Payloader/Network/RTP",
- "Payload-encodes CELT audio into a RTP packet",
- "Wim Taymans <wim.taymans@gmail.com>");
-
- GST_DEBUG_CATEGORY_INIT (rtpceltpay_debug, "rtpceltpay", 0,
- "CELT RTP Payloader");
-}
+#define gst_rtp_celt_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpCELTPay, gst_rtp_celt_pay, GST_TYPE_BASE_RTP_PAYLOAD);
static void
gst_rtp_celt_pay_class_init (GstRtpCELTPayClass * klass)
GstElementClass *gstelement_class;
GstBaseRTPPayloadClass *gstbasertppayload_class;
+ GST_DEBUG_CATEGORY_INIT (rtpceltpay_debug, "rtpceltpay", 0,
+ "CELT RTP Payloader");
+
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
gstelement_class->change_state = gst_rtp_celt_pay_change_state;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_celt_pay_sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_celt_pay_src_template));
+
+ gst_element_class_set_details_simple (gstelement_class, "RTP CELT payloader",
+ "Codec/Payloader/Network/RTP",
+ "Payload-encodes CELT audio into a RTP packet",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
gstbasertppayload_class->set_caps = gst_rtp_celt_pay_setcaps;
gstbasertppayload_class->get_caps = gst_rtp_celt_pay_getcaps;
gstbasertppayload_class->handle_buffer = gst_rtp_celt_pay_handle_buffer;
}
static void
-gst_rtp_celt_pay_init (GstRtpCELTPay * rtpceltpay, GstRtpCELTPayClass * klass)
+gst_rtp_celt_pay_init (GstRtpCELTPay * rtpceltpay)
{
rtpceltpay->queue = g_queue_new ();
}
guint8 *payload, *spayload;
guint payload_len;
GstClockTime duration;
+ GstRTPBuffer rtp = { NULL, };
payload_len = rtpceltpay->bytes + rtpceltpay->sbytes;
duration = rtpceltpay->qduration;
GST_BUFFER_DURATION (outbuf) = duration;
+ gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
+
/* point to the payload for size headers and data */
- spayload = gst_rtp_buffer_get_payload (outbuf);
+ spayload = gst_rtp_buffer_get_payload (&rtp);
payload = spayload + rtpceltpay->sbytes;
while ((buf = g_queue_pop_head (rtpceltpay->queue))) {
GST_BUFFER_TIMESTAMP (outbuf) = GST_BUFFER_TIMESTAMP (buf);
/* write the size to the header */
- size = GST_BUFFER_SIZE (buf);
+ size = gst_buffer_get_size (buf);
while (size > 0xff) {
*spayload++ = 0xff;
size -= 0xff;
}
*spayload++ = size;
- size = GST_BUFFER_SIZE (buf);
/* copy payload */
- memcpy (payload, GST_BUFFER_DATA (buf), size);
+ size = gst_buffer_get_size (buf);
+ gst_buffer_extract (buf, 0, payload, size);
payload += size;
gst_buffer_unref (buf);
}
+ gst_rtp_buffer_unmap (&rtp);
/* we consumed it all */
rtpceltpay->bytes = 0;
{
GstFlowReturn ret;
GstRtpCELTPay *rtpceltpay;
- guint size, payload_len;
+ gsize size, payload_len;
guint8 *data;
GstClockTime duration, packet_dur;
guint i, ssize, packet_len;
ret = GST_FLOW_OK;
- size = GST_BUFFER_SIZE (buffer);
- data = GST_BUFFER_DATA (buffer);
+ data = gst_buffer_map (buffer, &size, NULL, GST_MAP_READ);
switch (rtpceltpay->packet) {
case 0:
if (!gst_rtp_celt_pay_parse_ident (rtpceltpay, data, size))
goto parse_error;
- goto done;
+ goto cleanup;
case 1:
/* comment packet, we ignore it */
- goto done;
+ goto cleanup;
default:
/* other packets go in the payload */
break;
}
+ gst_buffer_unmap (buffer, data, size);
duration = GST_BUFFER_DURATION (buffer);
return ret;
/* ERRORS */
+cleanup:
+ {
+ gst_buffer_unmap (buffer, data, size);
+ goto done;
+ }
parse_error:
{
GST_ELEMENT_ERROR (rtpceltpay, STREAM, DECODE, (NULL),
("Error parsing first identification packet."));
+ gst_buffer_unmap (buffer, data, size);
return GST_FLOW_ERROR;
}
}
static GstFlowReturn gst_rtp_depay_chain_rtcp (GstPad * pad,
GstBuffer * buffer);
-GST_BOILERPLATE (GstRTPDepay, gst_rtp_depay, GstElement, GST_TYPE_ELEMENT);
+G_DEFINE_TYPE (GstRTPDepay, gst_rtp_depay, GST_TYPE_ELEMENT);
static void
-gst_rtp_depay_base_init (gpointer klass)
+gst_rtp_depay_class_init (GstRTPDepayClass * klass)
{
GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
+ GST_DEBUG_CATEGORY_INIT (rtpdepay_debug, "rtpdepay", 0, "RTP decoder");
+
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_depay_src_rtp_template));
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_depay_sink_rtp_template));
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_depay_sink_rtcp_template));
+
gst_element_class_set_details_simple (gstelement_class,
"Dummy RTP session manager", "Codec/Depayloader/Network/RTP",
"Accepts raw RTP and RTCP packets and sends them forward",
}
static void
-gst_rtp_depay_class_init (GstRTPDepayClass * klass)
-{
- GST_DEBUG_CATEGORY_INIT (rtpdepay_debug, "rtpdepay", 0, "RTP decoder");
-}
-
-static void
-gst_rtp_depay_init (GstRTPDepay * rtpdepay, GstRTPDepayClass * klass)
+gst_rtp_depay_init (GstRTPDepay * rtpdepay)
{
/* the input rtp pad */
rtpdepay->sink_rtp =
static gboolean gst_rtp_dv_depay_setcaps (GstBaseRTPDepayload * depayload,
GstCaps * caps);
-GST_BOILERPLATE (GstRTPDVDepay, gst_rtp_dv_depay, GstBaseRTPDepayload,
- GST_TYPE_BASE_RTP_DEPAYLOAD)
+#define gst_rtp_dv_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRTPDVDepay, gst_rtp_dv_depay, GST_TYPE_BASE_RTP_DEPAYLOAD);
- static void gst_rtp_dv_depay_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&src_factory));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&sink_factory));
-
- gst_element_class_set_details_simple (element_class, "RTP DV Depayloader",
- "Codec/Depayloader/Network/RTP",
- "Depayloads DV from RTP packets (RFC 3189)",
- "Marcel Moreaux <marcelm@spacelabs.nl>, Wim Taymans <wim.taymans@gmail.com>");
-}
-/* initialize the plugin's class */
static void
gst_rtp_dv_depay_class_init (GstRTPDVDepayClass * klass)
{
GstBaseRTPDepayloadClass *gstbasertpdepayload_class =
(GstBaseRTPDepayloadClass *) klass;
+ GST_DEBUG_CATEGORY_INIT (rtpdvdepay_debug, "rtpdvdepay", 0,
+ "DV RTP Depayloader");
+
gstelement_class->change_state =
GST_DEBUG_FUNCPTR (gst_rtp_dv_depay_change_state);
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&src_factory));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&sink_factory));
+
+ gst_element_class_set_details_simple (gstelement_class, "RTP DV Depayloader",
+ "Codec/Depayloader/Network/RTP",
+ "Depayloads DV from RTP packets (RFC 3189)",
+ "Marcel Moreaux <marcelm@spacelabs.nl>, Wim Taymans <wim.taymans@gmail.com>");
+
gstbasertpdepayload_class->process =
GST_DEBUG_FUNCPTR (gst_rtp_dv_depay_process);
gstbasertpdepayload_class->set_caps =
GST_DEBUG_FUNCPTR (gst_rtp_dv_depay_setcaps);
-
- GST_DEBUG_CATEGORY_INIT (rtpdvdepay_debug, "rtpdvdepay", 0,
- "DV RTP Depayloader");
}
/* initialize the new element
* initialize structure
*/
static void
-gst_rtp_dv_depay_init (GstRTPDVDepay * filter, GstRTPDVDepayClass * klass)
+gst_rtp_dv_depay_init (GstRTPDVDepay * filter)
{
}
gint clock_rate;
gboolean systemstream, ret;
const gchar *encode, *media;
+ guint8 *data;
+ gsize size;
rtpdvdepay = GST_RTP_DV_DEPAY (depayload);
/* Initialize the new accumulator frame.
* If the previous frame exists, copy that into the accumulator frame.
* This way, missing packets in the stream won't show up badly. */
- memset (GST_BUFFER_DATA (rtpdvdepay->acc), 0, rtpdvdepay->frame_size);
+ data = gst_buffer_map (rtpdvdepay->acc, &size, NULL, GST_MAP_WRITE);
+ memset (data, 0, rtpdvdepay->frame_size);
+ gst_buffer_unmap (rtpdvdepay->acc, data, size);
srccaps = gst_caps_new_simple ("video/x-dv",
"systemstream", G_TYPE_BOOLEAN, systemstream,
guint payload_len, location;
GstRTPDVDepay *dvdepay = GST_RTP_DV_DEPAY (base);
gboolean marker;
+ GstRTPBuffer rtp = { NULL, };
+
+ gst_rtp_buffer_map (in, GST_MAP_READ, &rtp);
- marker = gst_rtp_buffer_get_marker (in);
+ marker = gst_rtp_buffer_get_marker (&rtp);
/* Check if the received packet contains (the start of) a new frame, we do
* this by checking the RTP timestamp. */
- rtp_ts = gst_rtp_buffer_get_timestamp (in);
+ rtp_ts = gst_rtp_buffer_get_timestamp (&rtp);
/* we cannot copy the packet yet if the marker is set, we will do that below
* after taking out the data */
}
/* Extract the payload */
- payload_len = gst_rtp_buffer_get_payload_len (in);
- payload = gst_rtp_buffer_get_payload (in);
+ payload_len = gst_rtp_buffer_get_payload_len (&rtp);
+ payload = gst_rtp_buffer_get_payload (&rtp);
/* copy all DIF chunks in their place. */
while (payload_len >= 80) {
/* And copy it in, provided the location is sane. */
if (offset >= 0 && offset <= dvdepay->frame_size - 80)
- memcpy (GST_BUFFER_DATA (dvdepay->acc) + offset, payload, 80);
+ gst_buffer_fill (dvdepay->acc, offset, payload, 80);
payload += 80;
payload_len -= 80;
}
+ gst_rtp_buffer_unmap (&rtp);
if (marker) {
GST_DEBUG_OBJECT (dvdepay, "marker bit complete frame %u", rtp_ts);
static void gst_dv_pay_get_property (GObject * object,
guint prop_id, GValue * value, GParamSpec * pspec);
-GST_BOILERPLATE (GstRTPDVPay, gst_rtp_dv_pay, GstBaseRTPPayload,
- GST_TYPE_BASE_RTP_PAYLOAD)
-
- static void gst_rtp_dv_pay_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_dv_pay_sink_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_dv_pay_src_template));
- gst_element_class_set_details_simple (element_class, "RTP DV Payloader",
- "Codec/Payloader/Network/RTP",
- "Payloads DV into RTP packets (RFC 3189)",
- "Marcel Moreaux <marcelm@spacelabs.nl>, Wim Taymans <wim.taymans@gmail.com>");
-}
+#define gst_rtp_dv_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRTPDVPay, gst_rtp_dv_pay, GST_TYPE_BASE_RTP_PAYLOAD);
static void
gst_rtp_dv_pay_class_init (GstRTPDVPayClass * klass)
{
GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
GstBaseRTPPayloadClass *gstbasertppayload_class;
+ GST_DEBUG_CATEGORY_INIT (rtpdvpay_debug, "rtpdvpay", 0, "DV RTP Payloader");
+
gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
gobject_class->set_property = gst_dv_pay_set_property;
gobject_class->get_property = gst_dv_pay_get_property;
- gstbasertppayload_class->set_caps = gst_rtp_dv_pay_setcaps;
- gstbasertppayload_class->handle_buffer = gst_rtp_dv_pay_handle_buffer;
-
g_object_class_install_property (gobject_class, PROP_MODE,
g_param_spec_enum ("mode", "Mode",
"The payload mode of payloading",
GST_TYPE_DV_PAY_MODE, DEFAULT_MODE,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
- GST_DEBUG_CATEGORY_INIT (rtpdvpay_debug, "rtpdvpay", 0, "DV RTP Payloader");
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_dv_pay_sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_dv_pay_src_template));
+
+ gst_element_class_set_details_simple (gstelement_class, "RTP DV Payloader",
+ "Codec/Payloader/Network/RTP",
+ "Payloads DV into RTP packets (RFC 3189)",
+ "Marcel Moreaux <marcelm@spacelabs.nl>, Wim Taymans <wim.taymans@gmail.com>");
+
+ gstbasertppayload_class->set_caps = gst_rtp_dv_pay_setcaps;
+ gstbasertppayload_class->handle_buffer = gst_rtp_dv_pay_handle_buffer;
}
static void
-gst_rtp_dv_pay_init (GstRTPDVPay * rtpdvpay, GstRTPDVPayClass * klass)
+gst_rtp_dv_pay_init (GstRTPDVPay * rtpdvpay)
{
}
}
static gboolean
-gst_dv_pay_negotiate (GstRTPDVPay * rtpdvpay, guint8 * data, guint size)
+gst_dv_pay_negotiate (GstRTPDVPay * rtpdvpay, guint8 * data, gsize size)
{
const gchar *encode, *media;
gboolean audio_bundled, res;
GstBuffer *outbuf;
GstFlowReturn ret = GST_FLOW_OK;
gint hdrlen;
- guint size;
- guint8 *data;
+ gsize size, osize;
+ guint8 *data, *odata;
guint8 *dest;
guint filled;
+ GstRTPBuffer rtp = { NULL, };
rtpdvpay = GST_RTP_DV_PAY (basepayload);
max_payload_size = ((GST_BASE_RTP_PAYLOAD_MTU (rtpdvpay) - hdrlen) / 80) * 80;
/* The length of the buffer to transmit. */
- size = GST_BUFFER_SIZE (buffer);
- data = GST_BUFFER_DATA (buffer);
+ data = gst_buffer_map (buffer, &size, NULL, GST_MAP_READ);
+
+ odata = data;
+ osize = size;
GST_DEBUG_OBJECT (rtpdvpay,
"DV RTP payloader got buffer of %u bytes, splitting in %u byte "
if (outbuf == NULL) {
outbuf = gst_rtp_buffer_new_allocate (max_payload_size, 0, 0);
GST_BUFFER_TIMESTAMP (outbuf) = GST_BUFFER_TIMESTAMP (buffer);
- dest = gst_rtp_buffer_get_payload (outbuf);
+
+ gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
+ dest = gst_rtp_buffer_get_payload (&rtp);
filled = 0;
}
guint hlen;
/* set marker */
- gst_rtp_buffer_set_marker (outbuf, TRUE);
+ gst_rtp_buffer_set_marker (&rtp, TRUE);
/* shrink buffer to last packet */
- hlen = gst_rtp_buffer_get_header_len (outbuf);
- gst_rtp_buffer_set_packet_len (outbuf, hlen + filled);
+ hlen = gst_rtp_buffer_get_header_len (&rtp);
+ gst_rtp_buffer_set_packet_len (&rtp, hlen + filled);
}
+
/* Push out the created piece, and check for errors. */
+ gst_rtp_buffer_unmap (&rtp);
ret = gst_basertppayload_push (basepayload, outbuf);
if (ret != GST_FLOW_OK)
break;
outbuf = NULL;
}
}
+ gst_buffer_unmap (buffer, odata, osize);
gst_buffer_unref (buffer);
return ret;
)
);
-GST_BOILERPLATE (GstRtpG722Depay, gst_rtp_g722_depay, GstBaseRTPDepayload,
+#define gst_rtp_g722_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpG722Depay, gst_rtp_g722_depay,
GST_TYPE_BASE_RTP_DEPAYLOAD);
static gboolean gst_rtp_g722_depay_setcaps (GstBaseRTPDepayload * depayload,
GstBuffer * buf);
static void
-gst_rtp_g722_depay_base_init (gpointer klass)
+gst_rtp_g722_depay_class_init (GstRtpG722DepayClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstElementClass *gstelement_class;
+ GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
+
+ GST_DEBUG_CATEGORY_INIT (rtpg722depay_debug, "rtpg722depay", 0,
+ "G722 RTP Depayloader");
- gst_element_class_add_pad_template (element_class,
+ gstelement_class = (GstElementClass *) klass;
+ gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
+
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_g722_depay_src_template));
- gst_element_class_add_pad_template (element_class,
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_g722_depay_sink_template));
- gst_element_class_set_details_simple (element_class, "RTP audio depayloader",
- "Codec/Depayloader/Network/RTP",
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP audio depayloader", "Codec/Depayloader/Network/RTP",
"Extracts G722 audio from RTP packets",
"Wim Taymans <wim.taymans@gmail.com>");
-}
-
-static void
-gst_rtp_g722_depay_class_init (GstRtpG722DepayClass * klass)
-{
- GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
-
- gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
gstbasertpdepayload_class->set_caps = gst_rtp_g722_depay_setcaps;
gstbasertpdepayload_class->process = gst_rtp_g722_depay_process;
-
- GST_DEBUG_CATEGORY_INIT (rtpg722depay_debug, "rtpg722depay", 0,
- "G722 RTP Depayloader");
}
static void
-gst_rtp_g722_depay_init (GstRtpG722Depay * rtpg722depay,
- GstRtpG722DepayClass * klass)
+gst_rtp_g722_depay_init (GstRtpG722Depay * rtpg722depay)
{
- /* needed because of GST_BOILERPLATE */
}
static gint
GstBuffer *outbuf;
gint payload_len;
gboolean marker;
+ GstRTPBuffer rtp = { NULL };
rtpg722depay = GST_RTP_G722_DEPAY (depayload);
- payload_len = gst_rtp_buffer_get_payload_len (buf);
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
+
+ payload_len = gst_rtp_buffer_get_payload_len (&rtp);
if (payload_len <= 0)
goto empty_packet;
GST_DEBUG_OBJECT (rtpg722depay, "got payload of %d bytes", payload_len);
- outbuf = gst_rtp_buffer_get_payload_buffer (buf);
- marker = gst_rtp_buffer_get_marker (buf);
+ outbuf = gst_rtp_buffer_get_payload_buffer (&rtp);
+ marker = gst_rtp_buffer_get_marker (&rtp);
+ gst_rtp_buffer_unmap (&rtp);
if (marker && outbuf) {
/* mark talk spurt with DISCONT */
{
GST_ELEMENT_WARNING (rtpg722depay, STREAM, DECODE,
("Empty Payload."), (NULL));
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
}
static GstCaps *gst_rtp_g722_pay_getcaps (GstBaseRTPPayload * rtppayload,
GstPad * pad);
-GST_BOILERPLATE (GstRtpG722Pay, gst_rtp_g722_pay, GstBaseRTPAudioPayload,
+#define gst_rtp_g722_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpG722Pay, gst_rtp_g722_pay,
GST_TYPE_BASE_RTP_AUDIO_PAYLOAD);
static void
-gst_rtp_g722_pay_base_init (gpointer klass)
+gst_rtp_g722_pay_class_init (GstRtpG722PayClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstElementClass *gstelement_class;
+ GstBaseRTPPayloadClass *gstbasertppayload_class;
+
+ GST_DEBUG_CATEGORY_INIT (rtpg722pay_debug, "rtpg722pay", 0,
+ "G722 RTP Payloader");
- gst_element_class_add_pad_template (element_class,
+ gstelement_class = (GstElementClass *) klass;
+ gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
+
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_g722_pay_src_template));
- gst_element_class_add_pad_template (element_class,
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_g722_pay_sink_template));
- gst_element_class_set_details_simple (element_class, "RTP audio payloader",
+ gst_element_class_set_details_simple (gstelement_class, "RTP audio payloader",
"Codec/Payloader/Network/RTP",
"Payload-encode Raw audio into RTP packets (RFC 3551)",
"Wim Taymans <wim.taymans@gmail.com>");
-}
-
-static void
-gst_rtp_g722_pay_class_init (GstRtpG722PayClass * klass)
-{
- GstBaseRTPPayloadClass *gstbasertppayload_class;
-
- gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
gstbasertppayload_class->set_caps = gst_rtp_g722_pay_setcaps;
gstbasertppayload_class->get_caps = gst_rtp_g722_pay_getcaps;
-
- GST_DEBUG_CATEGORY_INIT (rtpg722pay_debug, "rtpg722pay", 0,
- "G722 RTP Payloader");
}
static void
-gst_rtp_g722_pay_init (GstRtpG722Pay * rtpg722pay, GstRtpG722PayClass * klass)
+gst_rtp_g722_pay_init (GstRtpG722Pay * rtpg722pay)
{
GstBaseRTPAudioPayload *basertpaudiopayload;
static GstBuffer *gst_rtp_g723_depay_process (GstBaseRTPDepayload * depayload,
GstBuffer * buf);
-GST_BOILERPLATE (GstRtpG723Depay, gst_rtp_g723_depay, GstBaseRTPDepayload,
+#define gst_rtp_g723_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpG723Depay, gst_rtp_g723_depay,
GST_TYPE_BASE_RTP_DEPAYLOAD);
static void
-gst_rtp_g723_depay_base_init (gpointer klass)
+gst_rtp_g723_depay_class_init (GstRtpG723DepayClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstElementClass *gstelement_class;
+ GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
+
+ GST_DEBUG_CATEGORY_INIT (rtpg723depay_debug, "rtpg723depay", 0,
+ "G.723 RTP Depayloader");
- gst_element_class_add_pad_template (element_class,
+ gstelement_class = (GstElementClass *) klass;
+ gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
+
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_g723_depay_src_template));
- gst_element_class_add_pad_template (element_class,
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_g723_depay_sink_template));
- gst_element_class_set_details_simple (element_class, "RTP G.723 depayloader",
- "Codec/Depayloader/Network/RTP",
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP G.723 depayloader", "Codec/Depayloader/Network/RTP",
"Extracts G.723 audio from RTP packets (RFC 3551)",
"Wim Taymans <wim.taymans@gmail.com>");
- GST_DEBUG_CATEGORY_INIT (rtpg723depay_debug, "rtpg723depay", 0,
- "G.723 RTP Depayloader");
-}
-
-static void
-gst_rtp_g723_depay_class_init (GstRtpG723DepayClass * klass)
-{
- GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
-
- gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
-
gstbasertpdepayload_class->process = gst_rtp_g723_depay_process;
gstbasertpdepayload_class->set_caps = gst_rtp_g723_depay_setcaps;
}
static void
-gst_rtp_g723_depay_init (GstRtpG723Depay * rtpg723depay,
- GstRtpG723DepayClass * klass)
+gst_rtp_g723_depay_init (GstRtpG723Depay * rtpg723depay)
{
GstBaseRTPDepayload *depayload;
GstBuffer *outbuf = NULL;
gint payload_len;
gboolean marker;
+ GstRTPBuffer rtp = { NULL };
rtpg723depay = GST_RTP_G723_DEPAY (depayload);
- payload_len = gst_rtp_buffer_get_payload_len (buf);
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
+
+ payload_len = gst_rtp_buffer_get_payload_len (&rtp);
/* At least 4 bytes */
if (payload_len < 4)
GST_LOG_OBJECT (rtpg723depay, "payload len %d", payload_len);
- outbuf = gst_rtp_buffer_get_payload_buffer (buf);
- marker = gst_rtp_buffer_get_marker (buf);
+ outbuf = gst_rtp_buffer_get_payload_buffer (&rtp);
+ marker = gst_rtp_buffer_get_marker (&rtp);
+ gst_rtp_buffer_unmap (&rtp);
if (marker) {
/* marker bit starts talkspurt */
}
GST_LOG_OBJECT (depayload, "pushing buffer of size %d",
- GST_BUFFER_SIZE (outbuf));
+ gst_buffer_get_size (outbuf));
return outbuf;
bad_packet:
{
/* no fatal error */
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
}
static GstStateChangeReturn gst_rtp_g723_pay_change_state (GstElement * element,
GstStateChange transition);
-GST_BOILERPLATE (GstRTPG723Pay, gst_rtp_g723_pay, GstBaseRTPPayload,
- GST_TYPE_BASE_RTP_PAYLOAD);
-
-static void
-gst_rtp_g723_pay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_g723_pay_sink_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_g723_pay_src_template));
- gst_element_class_set_details_simple (element_class, "RTP G.723 payloader",
- "Codec/Payloader/Network/RTP",
- "Packetize G.723 audio into RTP packets",
- "Wim Taymans <wim.taymans@gmail.com>");
-}
+#define gst_rtp_g723_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRTPG723Pay, gst_rtp_g723_pay, GST_TYPE_BASE_RTP_PAYLOAD);
static void
gst_rtp_g723_pay_class_init (GstRTPG723PayClass * klass)
gstelement_class->change_state = gst_rtp_g723_pay_change_state;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_g723_pay_sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_g723_pay_src_template));
+
+ gst_element_class_set_details_simple (gstelement_class, "RTP G.723 payloader",
+ "Codec/Payloader/Network/RTP",
+ "Packetize G.723 audio into RTP packets",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
payload_class->set_caps = gst_rtp_g723_pay_set_caps;
payload_class->handle_buffer = gst_rtp_g723_pay_handle_buffer;
}
static void
-gst_rtp_g723_pay_init (GstRTPG723Pay * pay, GstRTPG723PayClass * klass)
+gst_rtp_g723_pay_init (GstRTPG723Pay * pay)
{
GstBaseRTPPayload *payload = GST_BASE_RTP_PAYLOAD (pay);
GstFlowReturn ret;
guint8 *payload;
guint avail;
+ GstRTPBuffer rtp = { NULL };
avail = gst_adapter_available (pay->adapter);
outbuf = gst_rtp_buffer_new_allocate (avail, 0, 0);
- payload = gst_rtp_buffer_get_payload (outbuf);
+
+ gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
+ payload = gst_rtp_buffer_get_payload (&rtp);
GST_BUFFER_TIMESTAMP (outbuf) = pay->timestamp;
GST_BUFFER_DURATION (outbuf) = pay->duration;
/* set discont and marker */
if (pay->discont) {
GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
- gst_rtp_buffer_set_marker (outbuf, TRUE);
+ gst_rtp_buffer_set_marker (&rtp, TRUE);
pay->discont = FALSE;
}
+ gst_rtp_buffer_unmap (&rtp);
ret = gst_basertppayload_push (GST_BASE_RTP_PAYLOAD (pay), outbuf);
{
GstFlowReturn ret = GST_FLOW_OK;
guint8 *data;
- guint size;
+ gsize size;
guint8 HDR;
GstRTPG723Pay *pay;
GstClockTime packet_dur, timestamp;
pay = GST_RTP_G723_PAY (payload);
- size = GST_BUFFER_SIZE (buf);
- data = GST_BUFFER_DATA (buf);
+ data = gst_buffer_map (buf, &size, NULL, GST_MAP_READ);
timestamp = GST_BUFFER_TIMESTAMP (buf);
if (GST_BUFFER_IS_DISCONT (buf)) {
else
pay->timestamp = 0;
}
+ gst_buffer_unmap (buf, data, size);
/* add packet to the queue */
gst_adapter_push (pay->adapter, buf);
GST_ELEMENT_WARNING (pay, STREAM, WRONG_TYPE,
("Invalid input buffer size"),
("Input size should be 4, 20 or 24, got %u", size));
+ gst_buffer_unmap (buf, data, size);
gst_buffer_unref (buf);
return GST_FLOW_OK;
}
GST_ELEMENT_WARNING (pay, STREAM, WRONG_TYPE,
("Wrong input buffer size"),
("Expected input buffer size %u but got %u", size_tab[HDR], size));
+ gst_buffer_unmap (buf, data, size);
gst_buffer_unref (buf);
return GST_FLOW_OK;
}
static gboolean gst_rtp_g726_depay_setcaps (GstBaseRTPDepayload * depayload,
GstCaps * caps);
-GST_BOILERPLATE (GstRtpG726Depay, gst_rtp_g726_depay, GstBaseRTPDepayload,
+#define gst_rtp_g726_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpG726Depay, gst_rtp_g726_depay,
GST_TYPE_BASE_RTP_DEPAYLOAD);
static void
-gst_rtp_g726_depay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_g726_depay_src_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_g726_depay_sink_template));
- gst_element_class_set_details_simple (element_class, "RTP G.726 depayloader",
- "Codec/Depayloader/Network/RTP",
- "Extracts G.726 audio from RTP packets",
- "Axis Communications <dev-gstreamer@axis.com>");
-}
-
-static void
gst_rtp_g726_depay_class_init (GstRtpG726DepayClass * klass)
{
GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
+ GST_DEBUG_CATEGORY_INIT (rtpg726depay_debug, "rtpg726depay", 0,
+ "G.726 RTP Depayloader");
+
gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
gobject_class->set_property = gst_rtp_g726_depay_set_property;
"Force AAL2 decoding for compatibility with bad payloaders",
DEFAULT_FORCE_AAL2, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_g726_depay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_g726_depay_sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP G.726 depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts G.726 audio from RTP packets",
+ "Axis Communications <dev-gstreamer@axis.com>");
+
gstbasertpdepayload_class->process = gst_rtp_g726_depay_process;
gstbasertpdepayload_class->set_caps = gst_rtp_g726_depay_setcaps;
-
- GST_DEBUG_CATEGORY_INIT (rtpg726depay_debug, "rtpg726depay", 0,
- "G.726 RTP Depayloader");
}
static void
-gst_rtp_g726_depay_init (GstRtpG726Depay * rtpG726depay,
- GstRtpG726DepayClass * klass)
+gst_rtp_g726_depay_init (GstRtpG726Depay * rtpG726depay)
{
GstBaseRTPDepayload *depayload;
GstRtpG726Depay *depay;
GstBuffer *outbuf = NULL;
gboolean marker;
+ GstRTPBuffer rtp = { NULL };
depay = GST_RTP_G726_DEPAY (depayload);
- marker = gst_rtp_buffer_get_marker (buf);
+ gst_rtp_buffer_map (buf, GST_MAP_READWRITE, &rtp);
+
+ marker = gst_rtp_buffer_get_marker (&rtp);
GST_DEBUG ("process : got %d bytes, mark %d ts %u seqn %d",
- GST_BUFFER_SIZE (buf), marker,
- gst_rtp_buffer_get_timestamp (buf), gst_rtp_buffer_get_seq (buf));
+ gst_buffer_get_size (buf), marker,
+ gst_rtp_buffer_get_timestamp (&rtp), gst_rtp_buffer_get_seq (&rtp));
if (depay->aal2 || depay->force_aal2) {
/* AAL2, we can just copy the bytes */
- outbuf = gst_rtp_buffer_get_payload_buffer (buf);
+ outbuf = gst_rtp_buffer_get_payload_buffer (&rtp);
if (!outbuf)
goto bad_len;
} else {
- guint8 *in, *out, tmp;
+ guint8 *in, *out, tmp, *odata;
guint len;
+ gsize osize;
- in = gst_rtp_buffer_get_payload (buf);
- len = gst_rtp_buffer_get_payload_len (buf);
-
- if (gst_buffer_is_writable (buf)) {
- outbuf = gst_rtp_buffer_get_payload_buffer (buf);
- } else {
- GstBuffer *copy;
-
- /* copy buffer */
- copy = gst_buffer_copy (buf);
- outbuf = gst_rtp_buffer_get_payload_buffer (copy);
- gst_buffer_unref (copy);
- }
+ in = gst_rtp_buffer_get_payload (&rtp);
+ len = gst_rtp_buffer_get_payload_len (&rtp);
+ outbuf = gst_rtp_buffer_get_payload_buffer (&rtp);
if (!outbuf)
goto bad_len;
+ outbuf = gst_buffer_make_writable (outbuf);
- out = GST_BUFFER_DATA (outbuf);
+ odata = gst_buffer_map (outbuf, &osize, NULL, GST_MAP_WRITE);
+ out = odata;
/* we need to reshuffle the bytes, input is always of the form
* A B C D ... with the number of bits depending on the bitrate. */
break;
}
}
+ gst_buffer_unmap (outbuf, odata, osize);
}
if (marker) {
static GstFlowReturn gst_rtp_g726_pay_handle_buffer (GstBaseRTPPayload *
payload, GstBuffer * buffer);
-GST_BOILERPLATE (GstRtpG726Pay, gst_rtp_g726_pay, GstBaseRTPAudioPayload,
+#define gst_rtp_g726_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpG726Pay, gst_rtp_g726_pay,
GST_TYPE_BASE_RTP_AUDIO_PAYLOAD);
static void
-gst_rtp_g726_pay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_g726_pay_sink_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_g726_pay_src_template));
- gst_element_class_set_details_simple (element_class, "RTP G.726 payloader",
- "Codec/Payloader/Network/RTP",
- "Payload-encodes G.726 audio into a RTP packet",
- "Axis Communications <dev-gstreamer@axis.com>");
-}
-
-static void
gst_rtp_g726_pay_class_init (GstRtpG726PayClass * klass)
{
GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
GstBaseRTPPayloadClass *gstbasertppayload_class;
gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
gobject_class->set_property = gst_rtp_g726_pay_set_property;
"Force AAL2 encoding for compatibility with bad depayloaders",
DEFAULT_FORCE_AAL2, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_g726_pay_sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_g726_pay_src_template));
+
+ gst_element_class_set_details_simple (gstelement_class, "RTP G.726 payloader",
+ "Codec/Payloader/Network/RTP",
+ "Payload-encodes G.726 audio into a RTP packet",
+ "Axis Communications <dev-gstreamer@axis.com>");
+
gstbasertppayload_class->set_caps = gst_rtp_g726_pay_setcaps;
gstbasertppayload_class->handle_buffer = gst_rtp_g726_pay_handle_buffer;
}
static void
-gst_rtp_g726_pay_init (GstRtpG726Pay * rtpg726pay, GstRtpG726PayClass * klass)
+gst_rtp_g726_pay_init (GstRtpG726Pay * rtpg726pay)
{
GstBaseRTPAudioPayload *basertpaudiopayload;
if (!pay->aal2) {
guint8 *data, tmp;
- guint len;
+ gsize len;
/* for non AAL2, we need to reshuffle the bytes, we can do this in-place
* when the buffer is writable. */
buffer = gst_buffer_make_writable (buffer);
- data = GST_BUFFER_DATA (buffer);
- len = GST_BUFFER_SIZE (buffer);
+ data = gst_buffer_map (buffer, &len, NULL, GST_MAP_READWRITE);
GST_LOG_OBJECT (pay, "packing %u bytes of data", len);
break;
}
}
+ gst_buffer_unmap (buffer, data, len);
}
res =
static GstBuffer *gst_rtp_g729_depay_process (GstBaseRTPDepayload * depayload,
GstBuffer * buf);
-GST_BOILERPLATE (GstRtpG729Depay, gst_rtp_g729_depay, GstBaseRTPDepayload,
+#define gst_rtp_g729_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpG729Depay, gst_rtp_g729_depay,
GST_TYPE_BASE_RTP_DEPAYLOAD);
static void
-gst_rtp_g729_depay_base_init (gpointer klass)
+gst_rtp_g729_depay_class_init (GstRtpG729DepayClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstElementClass *gstelement_class;
+ GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
+
+ GST_DEBUG_CATEGORY_INIT (rtpg729depay_debug, "rtpg729depay", 0,
+ "G.729 RTP Depayloader");
- gst_element_class_add_pad_template (element_class,
+ gstelement_class = (GstElementClass *) klass;
+ gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
+
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_g729_depay_src_template));
- gst_element_class_add_pad_template (element_class,
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_g729_depay_sink_template));
- gst_element_class_set_details_simple (element_class, "RTP G.729 depayloader",
- "Codec/Depayloader/Network/RTP",
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP G.729 depayloader", "Codec/Depayloader/Network/RTP",
"Extracts G.729 audio from RTP packets (RFC 3551)",
"Laurent Glayal <spglegle@yahoo.fr>");
- GST_DEBUG_CATEGORY_INIT (rtpg729depay_debug, "rtpg729depay", 0,
- "G.729 RTP Depayloader");
-}
-
-static void
-gst_rtp_g729_depay_class_init (GstRtpG729DepayClass * klass)
-{
- GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
-
- gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
-
gstbasertpdepayload_class->process = gst_rtp_g729_depay_process;
gstbasertpdepayload_class->set_caps = gst_rtp_g729_depay_setcaps;
}
static void
-gst_rtp_g729_depay_init (GstRtpG729Depay * rtpg729depay,
- GstRtpG729DepayClass * klass)
+gst_rtp_g729_depay_init (GstRtpG729Depay * rtpg729depay)
{
GstBaseRTPDepayload *depayload;
}
}
-
static GstBuffer *
gst_rtp_g729_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
{
GstBuffer *outbuf = NULL;
gint payload_len;
gboolean marker;
+ GstRTPBuffer rtp = { NULL };
rtpg729depay = GST_RTP_G729_DEPAY (depayload);
- payload_len = gst_rtp_buffer_get_payload_len (buf);
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
+
+ payload_len = gst_rtp_buffer_get_payload_len (&rtp);
/* At least 2 bytes (CNG from G729 Annex B) */
if (payload_len < 2) {
GST_LOG_OBJECT (rtpg729depay, "G729 payload contains CNG frame");
}
- outbuf = gst_rtp_buffer_get_payload_buffer (buf);
- marker = gst_rtp_buffer_get_marker (buf);
+ outbuf = gst_rtp_buffer_get_payload_buffer (&rtp);
+ marker = gst_rtp_buffer_get_marker (&rtp);
+
+ gst_rtp_buffer_unmap (&rtp);
if (marker) {
/* marker bit starts talkspurt */
}
GST_LOG_OBJECT (depayload, "pushing buffer of size %d",
- GST_BUFFER_SIZE (outbuf));
+ gst_buffer_get_size (outbuf));
return outbuf;
"clock-rate = (int) 8000, " "encoding-name = (string) \"G729\"")
);
-GST_BOILERPLATE (GstRTPG729Pay, gst_rtp_g729_pay, GstBaseRTPPayload,
- GST_TYPE_BASE_RTP_PAYLOAD);
-
-static void
-gst_rtp_g729_pay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_g729_pay_sink_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_g729_pay_src_template));
- gst_element_class_set_details_simple (element_class, "RTP G.729 payloader",
- "Codec/Payloader/Network/RTP",
- "Packetize G.729 audio into RTP packets",
- "Olivier Crete <olivier.crete@collabora.co.uk>");
-
- GST_DEBUG_CATEGORY_INIT (rtpg729pay_debug, "rtpg729pay", 0,
- "G.729 RTP Payloader");
-}
+#define gst_rtp_g729_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRTPG729Pay, gst_rtp_g729_pay, GST_TYPE_BASE_RTP_PAYLOAD);
static void
gst_rtp_g729_pay_finalize (GObject * object)
GstElementClass *gstelement_class = (GstElementClass *) klass;
GstBaseRTPPayloadClass *payload_class = GST_BASE_RTP_PAYLOAD_CLASS (klass);
+ GST_DEBUG_CATEGORY_INIT (rtpg729pay_debug, "rtpg729pay", 0,
+ "G.729 RTP Payloader");
+
gobject_class->finalize = gst_rtp_g729_pay_finalize;
gstelement_class->change_state = gst_rtp_g729_pay_change_state;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_g729_pay_sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_g729_pay_src_template));
+
+ gst_element_class_set_details_simple (gstelement_class, "RTP G.729 payloader",
+ "Codec/Payloader/Network/RTP",
+ "Packetize G.729 audio into RTP packets",
+ "Olivier Crete <olivier.crete@collabora.co.uk>");
+
payload_class->set_caps = gst_rtp_g729_pay_set_caps;
payload_class->handle_buffer = gst_rtp_g729_pay_handle_buffer;
}
static void
-gst_rtp_g729_pay_init (GstRTPG729Pay * pay, GstRTPG729PayClass * klass)
+gst_rtp_g729_pay_init (GstRTPG729Pay * pay)
{
GstBaseRTPPayload *payload = GST_BASE_RTP_PAYLOAD (pay);
GstBuffer *outbuf;
guint8 *payload;
GstFlowReturn ret;
+ GstRTPBuffer rtp = { NULL };
basepayload = GST_BASE_RTP_PAYLOAD (rtpg729pay);
/* create buffer to hold the payload */
outbuf = gst_rtp_buffer_new_allocate (payload_len, 0, 0);
+ gst_rtp_buffer_map (outbuf, GST_MAP_READWRITE, &rtp);
+
/* copy payload */
- payload = gst_rtp_buffer_get_payload (outbuf);
+ payload = gst_rtp_buffer_get_payload (&rtp);
memcpy (payload, data, payload_len);
/* set metadata */
if (G_UNLIKELY (rtpg729pay->discont)) {
GST_DEBUG_OBJECT (basepayload, "discont, setting marker bit");
GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
- gst_rtp_buffer_set_marker (outbuf, TRUE);
+ gst_rtp_buffer_set_marker (&rtp, TRUE);
rtpg729pay->discont = FALSE;
}
+ gst_rtp_buffer_unmap (&rtp);
ret = gst_basertppayload_push (basepayload, outbuf);
guint minptime_octets = 0;
guint min_payload_len;
guint max_payload_len;
+ gsize size;
+ GstClockTime timestamp;
- available = GST_BUFFER_SIZE (buf);
+ size = gst_buffer_get_size (buf);
- if (available % G729_FRAME_SIZE != 0 &&
- available % G729_FRAME_SIZE != G729B_CN_FRAME_SIZE)
+ if (size % G729_FRAME_SIZE != 0 &&
+ size % G729_FRAME_SIZE != G729B_CN_FRAME_SIZE)
goto invalid_size;
/* max number of bytes based on given ptime, has to be multiple of
adapter = rtpg729pay->adapter;
available = gst_adapter_available (adapter);
+ timestamp = GST_BUFFER_TIMESTAMP (buf);
+
/* resync rtp time on discont or a discontinuous cn packet */
if (GST_BUFFER_IS_DISCONT (buf)) {
/* flush remainder */
available = 0;
}
rtpg729pay->discont = TRUE;
- gst_rtp_g729_pay_recalc_rtp_time (rtpg729pay, GST_BUFFER_TIMESTAMP (buf));
+ gst_rtp_g729_pay_recalc_rtp_time (rtpg729pay, timestamp);
}
- if (GST_BUFFER_SIZE (buf) < G729_FRAME_SIZE)
- gst_rtp_g729_pay_recalc_rtp_time (rtpg729pay, GST_BUFFER_TIMESTAMP (buf));
+ if (size < G729_FRAME_SIZE)
+ gst_rtp_g729_pay_recalc_rtp_time (rtpg729pay, timestamp);
if (G_UNLIKELY (!GST_CLOCK_TIME_IS_VALID (rtpg729pay->first_ts))) {
- rtpg729pay->first_ts = GST_BUFFER_TIMESTAMP (buf);
+ rtpg729pay->first_ts = timestamp;
rtpg729pay->first_rtp_time = rtpg729pay->next_rtp_time;
}
/* let's reset the base timestamp when the adapter is empty */
if (available == 0)
- rtpg729pay->next_ts = GST_BUFFER_TIMESTAMP (buf);
+ rtpg729pay->next_ts = timestamp;
- if (available == 0 &&
- GST_BUFFER_SIZE (buf) >= min_payload_len &&
- GST_BUFFER_SIZE (buf) <= max_payload_len) {
- ret = gst_rtp_g729_pay_push (rtpg729pay,
- GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (buf));
+ if (available == 0 && size >= min_payload_len && size <= max_payload_len) {
+ guint8 *data;
+
+ data = gst_buffer_map (buf, &size, NULL, GST_MAP_READ);
+ ret = gst_rtp_g729_pay_push (rtpg729pay, data, size);
+ gst_buffer_unmap (buf, data, size);
gst_buffer_unref (buf);
return ret;
}
("Invalid input buffer size"),
("Invalid buffer size, should be a multiple of"
" G729_FRAME_SIZE(10) with an optional G729B_CN_FRAME_SIZE(2)"
- " added to it, but it is %u", available));
+ " added to it, but it is %u", size));
gst_buffer_unref (buf);
return GST_FLOW_ERROR;
}
static gboolean gst_rtp_gsm_depay_setcaps (GstBaseRTPDepayload * _depayload,
GstCaps * caps);
-GST_BOILERPLATE (GstRTPGSMDepay, gst_rtp_gsm_depay, GstBaseRTPDepayload,
- GST_TYPE_BASE_RTP_DEPAYLOAD);
+#define gst_rtp_gsm_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRTPGSMDepay, gst_rtp_gsm_depay, GST_TYPE_BASE_RTP_DEPAYLOAD);
static void
-gst_rtp_gsm_depay_base_init (gpointer klass)
+gst_rtp_gsm_depay_class_init (GstRTPGSMDepayClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstElementClass *gstelement_class;
+ GstBaseRTPDepayloadClass *gstbasertp_depayload_class;
+
+ gstelement_class = (GstElementClass *) klass;
+ gstbasertp_depayload_class = (GstBaseRTPDepayloadClass *) klass;
- gst_element_class_add_pad_template (element_class,
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_gsm_depay_src_template));
- gst_element_class_add_pad_template (element_class,
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_gsm_depay_sink_template));
- gst_element_class_set_details_simple (element_class, "RTP GSM depayloader",
+
+ gst_element_class_set_details_simple (gstelement_class, "RTP GSM depayloader",
"Codec/Depayloader/Network/RTP",
"Extracts GSM audio from RTP packets", "Zeeshan Ali <zeenix@gmail.com>");
-}
-
-static void
-gst_rtp_gsm_depay_class_init (GstRTPGSMDepayClass * klass)
-{
- GstBaseRTPDepayloadClass *gstbasertp_depayload_class;
-
- gstbasertp_depayload_class = (GstBaseRTPDepayloadClass *) klass;
gstbasertp_depayload_class->process = gst_rtp_gsm_depay_process;
gstbasertp_depayload_class->set_caps = gst_rtp_gsm_depay_setcaps;
}
static void
-gst_rtp_gsm_depay_init (GstRTPGSMDepay * rtpgsmdepay,
- GstRTPGSMDepayClass * klass)
+gst_rtp_gsm_depay_init (GstRTPGSMDepay * rtpgsmdepay)
{
- /* needed because of GST_BOILERPLATE */
}
static gboolean
{
GstBuffer *outbuf = NULL;
gboolean marker;
+ GstRTPBuffer rtp = { NULL };
+
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
- marker = gst_rtp_buffer_get_marker (buf);
+ marker = gst_rtp_buffer_get_marker (&rtp);
GST_DEBUG ("process : got %d bytes, mark %d ts %u seqn %d",
- GST_BUFFER_SIZE (buf), marker,
- gst_rtp_buffer_get_timestamp (buf), gst_rtp_buffer_get_seq (buf));
+ gst_buffer_get_size (buf), marker,
+ gst_rtp_buffer_get_timestamp (&rtp), gst_rtp_buffer_get_seq (&rtp));
+
+ outbuf = gst_rtp_buffer_get_payload_buffer (&rtp);
- outbuf = gst_rtp_buffer_get_payload_buffer (buf);
+ gst_rtp_buffer_unmap (&rtp);
if (marker && outbuf) {
/* mark start of talkspurt with DISCONT */
static GstFlowReturn gst_rtp_gsm_pay_handle_buffer (GstBaseRTPPayload * payload,
GstBuffer * buffer);
-GST_BOILERPLATE (GstRTPGSMPay, gst_rtp_gsm_pay, GstBaseRTPPayload,
- GST_TYPE_BASE_RTP_PAYLOAD);
+#define gst_rtp_gsm_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRTPGSMPay, gst_rtp_gsm_pay, GST_TYPE_BASE_RTP_PAYLOAD);
static void
-gst_rtp_gsm_pay_base_init (gpointer klass)
+gst_rtp_gsm_pay_class_init (GstRTPGSMPayClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstElementClass *gstelement_class;
+ GstBaseRTPPayloadClass *gstbasertppayload_class;
+
+ GST_DEBUG_CATEGORY_INIT (rtpgsmpay_debug, "rtpgsmpay", 0,
+ "GSM Audio RTP Payloader");
- gst_element_class_add_pad_template (element_class,
+ gstelement_class = (GstElementClass *) klass;
+ gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
+
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_gsm_pay_sink_template));
- gst_element_class_add_pad_template (element_class,
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_gsm_pay_src_template));
- gst_element_class_set_details_simple (element_class, "RTP GSM payloader",
+
+ gst_element_class_set_details_simple (gstelement_class, "RTP GSM payloader",
"Codec/Payloader/Network/RTP",
"Payload-encodes GSM audio into a RTP packet",
"Zeeshan Ali <zeenix@gmail.com>");
-}
-
-static void
-gst_rtp_gsm_pay_class_init (GstRTPGSMPayClass * klass)
-{
- GstBaseRTPPayloadClass *gstbasertppayload_class;
-
- gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
gstbasertppayload_class->set_caps = gst_rtp_gsm_pay_setcaps;
gstbasertppayload_class->handle_buffer = gst_rtp_gsm_pay_handle_buffer;
-
- GST_DEBUG_CATEGORY_INIT (rtpgsmpay_debug, "rtpgsmpay", 0,
- "GSM Audio RTP Payloader");
}
static void
-gst_rtp_gsm_pay_init (GstRTPGSMPay * rtpgsmpay, GstRTPGSMPayClass * klass)
+gst_rtp_gsm_pay_init (GstRTPGSMPay * rtpgsmpay)
{
GST_BASE_RTP_PAYLOAD (rtpgsmpay)->clock_rate = 8000;
GST_BASE_RTP_PAYLOAD_PT (rtpgsmpay) = GST_RTP_PAYLOAD_GSM;
GstBuffer * buffer)
{
GstRTPGSMPay *rtpgsmpay;
- guint size, payload_len;
+ guint payload_len;
GstBuffer *outbuf;
guint8 *payload, *data;
GstClockTime timestamp, duration;
GstFlowReturn ret;
+ gsize size;
+ GstRTPBuffer rtp = { NULL };
rtpgsmpay = GST_RTP_GSM_PAY (basepayload);
- size = GST_BUFFER_SIZE (buffer);
+ data = gst_buffer_map (buffer, &size, NULL, GST_MAP_READ);
+
timestamp = GST_BUFFER_TIMESTAMP (buffer);
duration = GST_BUFFER_DURATION (buffer);
payload_len = size;
/* FIXME, just error out for now */
- if (payload_len > GST_BASE_RTP_PAYLOAD_MTU (rtpgsmpay)) {
- GST_ELEMENT_ERROR (rtpgsmpay, STREAM, ENCODE, (NULL),
- ("payload_len %u > mtu %u", payload_len,
- GST_BASE_RTP_PAYLOAD_MTU (rtpgsmpay)));
- return GST_FLOW_ERROR;
- }
+ if (payload_len > GST_BASE_RTP_PAYLOAD_MTU (rtpgsmpay))
+ goto too_big;
outbuf = gst_rtp_buffer_new_allocate (payload_len, 0, 0);
GST_BUFFER_DURATION (outbuf) = duration;
/* get payload */
- payload = gst_rtp_buffer_get_payload (outbuf);
-
- data = GST_BUFFER_DATA (buffer);
+ gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
/* copy data in payload */
- memcpy (&payload[0], data, size);
+ payload = gst_rtp_buffer_get_payload (&rtp);
+ memcpy (payload, data, size);
+
+ gst_rtp_buffer_unmap (&rtp);
+ gst_buffer_unmap (buffer, data, size);
gst_buffer_unref (buffer);
GST_DEBUG ("gst_rtp_gsm_pay_chain: pushing buffer of size %d",
- GST_BUFFER_SIZE (outbuf));
+ gst_buffer_get_size (outbuf));
ret = gst_basertppayload_push (basepayload, outbuf);
return ret;
+
+ /* ERRORS */
+too_big:
+ {
+ GST_ELEMENT_ERROR (rtpgsmpay, STREAM, ENCODE, (NULL),
+ ("payload_len %u > mtu %u", payload_len,
+ GST_BASE_RTP_PAYLOAD_MTU (rtpgsmpay)));
+ gst_buffer_unmap (buffer, data, size);
+ return GST_FLOW_ERROR;
+ }
}
gboolean
"clock-rate = (int) 90000, " "encoding-name = (string) \"X-GST\"")
);
-GST_BOILERPLATE (GstRtpGSTDepay, gst_rtp_gst_depay, GstBaseRTPDepayload,
- GST_TYPE_BASE_RTP_DEPAYLOAD);
+#define gst_rtp_gst_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpGSTDepay, gst_rtp_gst_depay, GST_TYPE_BASE_RTP_DEPAYLOAD);
static void gst_rtp_gst_depay_finalize (GObject * object);
GstBuffer * buf);
static void
-gst_rtp_gst_depay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_gst_depay_src_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_gst_depay_sink_template));
-
- gst_element_class_set_details_simple (element_class,
- "GStreamer depayloader", "Codec/Depayloader/Network",
- "Extracts GStreamer buffers from RTP packets",
- "Wim Taymans <wim.taymans@gmail.com>");
-}
-
-static void
gst_rtp_gst_depay_class_init (GstRtpGSTDepayClass * klass)
{
GObjectClass *gobject_class;
GstElementClass *gstelement_class;
GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
+ GST_DEBUG_CATEGORY_INIT (rtpgstdepay_debug, "rtpgstdepay", 0,
+ "Gstreamer RTP Depayloader");
+
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
gstelement_class->change_state = gst_rtp_gst_depay_change_state;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_gst_depay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_gst_depay_sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "GStreamer depayloader", "Codec/Depayloader/Network",
+ "Extracts GStreamer buffers from RTP packets",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
gstbasertpdepayload_class->set_caps = gst_rtp_gst_depay_setcaps;
gstbasertpdepayload_class->process = gst_rtp_gst_depay_process;
-
- GST_DEBUG_CATEGORY_INIT (rtpgstdepay_debug, "rtpgstdepay", 0,
- "Gstreamer RTP Depayloader");
}
static void
-gst_rtp_gst_depay_init (GstRtpGSTDepay * rtpgstdepay,
- GstRtpGSTDepayClass * klass)
+gst_rtp_gst_depay_init (GstRtpGSTDepay * rtpgstdepay)
{
rtpgstdepay->adapter = gst_adapter_new ();
}
gint payload_len;
guint8 *payload;
guint CV;
+ GstRTPBuffer rtp = { NULL };
rtpgstdepay = GST_RTP_GST_DEPAY (depayload);
- payload_len = gst_rtp_buffer_get_payload_len (buf);
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
+
+ payload_len = gst_rtp_buffer_get_payload_len (&rtp);
if (payload_len <= 8)
goto empty_packet;
gst_adapter_clear (rtpgstdepay->adapter);
}
- payload = gst_rtp_buffer_get_payload (buf);
+ payload = gst_rtp_buffer_get_payload (&rtp);
/* strip off header
*
*/
/* subbuffer skipping the 8 header bytes */
- subbuf = gst_rtp_buffer_get_payload_subbuffer (buf, 8, -1);
+ subbuf = gst_rtp_buffer_get_payload_subbuffer (&rtp, 8, -1);
gst_adapter_push (rtpgstdepay->adapter, subbuf);
- if (gst_rtp_buffer_get_marker (buf)) {
+ if (gst_rtp_buffer_get_marker (&rtp)) {
guint avail;
GstCaps *outcaps;
CV = (payload[0] >> 4) & 0x7;
if (payload[0] & 0x80) {
- guint b, csize, size, offset;
+ guint b, csize, left, offset;
+ gsize size;
guint8 *data;
GstBuffer *subbuf;
/* C bit, we have inline caps */
- data = GST_BUFFER_DATA (outbuf);
- size = GST_BUFFER_SIZE (outbuf);
+ data = gst_buffer_map (outbuf, &size, NULL, GST_MAP_READ);
/* start reading the length, we need this to skip to the data later */
csize = offset = 0;
+ left = size;
do {
- if (offset >= size)
+ if (offset >= left) {
+ gst_buffer_unmap (outbuf, data, size);
goto too_small;
+ }
b = data[offset++];
csize = (csize << 7) | (b & 0x7f);
} while (b & 0x80);
- if (size < csize)
+ if (left < csize) {
+ gst_buffer_unmap (outbuf, data, size);
goto too_small;
+ }
/* parse and store in cache */
outcaps = gst_caps_from_string ((gchar *) & data[offset]);
/* skip caps */
offset += csize;
- size -= csize;
+ left -= csize;
GST_DEBUG_OBJECT (rtpgstdepay,
"inline caps %u, length %u, %" GST_PTR_FORMAT, CV, csize, outcaps);
/* create real data buffer when needed */
if (size)
- subbuf = gst_buffer_create_sub (outbuf, offset, size);
+ subbuf =
+ gst_buffer_copy_region (outbuf, GST_BUFFER_COPY_ALL, offset, left);
else
subbuf = NULL;
+ gst_buffer_unmap (outbuf, data, size);
gst_buffer_unref (outbuf);
outbuf = subbuf;
}
{
GST_ELEMENT_WARNING (rtpgstdepay, STREAM, DECODE,
("Empty Payload."), (NULL));
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
too_small:
("Buffer too small."), (NULL));
if (outbuf)
gst_buffer_unref (outbuf);
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
missing_caps:
("Missing caps %u.", CV), (NULL));
if (outbuf)
gst_buffer_unref (outbuf);
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
}
static GstFlowReturn gst_rtp_gst_pay_handle_buffer (GstBaseRTPPayload * payload,
GstBuffer * buffer);
-GST_BOILERPLATE (GstRtpGSTPay, gst_rtp_gst_pay, GstBaseRTPPayload,
- GST_TYPE_BASE_RTP_PAYLOAD)
+#define gst_rtp_gst_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpGSTPay, gst_rtp_gst_pay, GST_TYPE_BASE_RTP_PAYLOAD);
- static void gst_rtp_gst_pay_base_init (gpointer klass)
+static void
+gst_rtp_gst_pay_class_init (GstRtpGSTPayClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstElementClass *gstelement_class;
+ GstBaseRTPPayloadClass *gstbasertppayload_class;
+
+ gstelement_class = (GstElementClass *) klass;
+ gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
- gst_element_class_add_pad_template (element_class,
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_gst_pay_src_template));
- gst_element_class_add_pad_template (element_class,
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_gst_pay_sink_template));
- gst_element_class_set_details_simple (element_class,
+ gst_element_class_set_details_simple (gstelement_class,
"RTP GStreamer payloader", "Codec/Payloader/Network/RTP",
"Payload GStreamer buffers as RTP packets",
"Wim Taymans <wim.taymans@gmail.com>");
-}
-
-static void
-gst_rtp_gst_pay_class_init (GstRtpGSTPayClass * klass)
-{
- GstBaseRTPPayloadClass *gstbasertppayload_class;
-
- gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
gstbasertppayload_class->set_caps = gst_rtp_gst_pay_setcaps;
gstbasertppayload_class->handle_buffer = gst_rtp_gst_pay_handle_buffer;
}
static void
-gst_rtp_gst_pay_init (GstRtpGSTPay * rtpgstpay, GstRtpGSTPayClass * klass)
+gst_rtp_gst_pay_init (GstRtpGSTPay * rtpgstpay)
{
}
GstBuffer * buffer)
{
GstRtpGSTPay *rtpgstpay;
- guint8 *data;
- guint size;
+ guint8 *data, *ptr;
+ gsize size, left;
GstBuffer *outbuf;
GstFlowReturn ret;
GstClockTime timestamp;
rtpgstpay = GST_RTP_GST_PAY (basepayload);
- size = GST_BUFFER_SIZE (buffer);
- data = GST_BUFFER_DATA (buffer);
+ data = gst_buffer_map (buffer, &size, NULL, GST_MAP_READ);
timestamp = GST_BUFFER_TIMESTAMP (buffer);
ret = GST_FLOW_OK;
* +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
*/
frag_offset = 0;
+ ptr = data;
+ left = size;
- while (size > 0) {
+ while (left > 0) {
guint towrite;
guint8 *payload;
guint payload_len;
guint packet_len;
+ GstRTPBuffer rtp = { NULL };
/* this will be the total lenght of the packet */
- packet_len = gst_rtp_buffer_calc_packet_len (8 + size, 0, 0);
+ packet_len = gst_rtp_buffer_calc_packet_len (8 + left, 0, 0);
/* fill one MTU or all available bytes */
towrite = MIN (packet_len, GST_BASE_RTP_PAYLOAD_MTU (rtpgstpay));
/* create buffer to hold the payload */
outbuf = gst_rtp_buffer_new_allocate (payload_len, 0, 0);
- payload = gst_rtp_buffer_get_payload (outbuf);
+
+ gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
+ payload = gst_rtp_buffer_get_payload (&rtp);
payload[0] = flags;
payload[1] = payload[2] = payload[3] = 0;
payload += 8;
payload_len -= 8;
- memcpy (payload, data, payload_len);
+ memcpy (payload, ptr, payload_len);
- data += payload_len;
- size -= payload_len;
+ ptr += payload_len;
+ left -= payload_len;
frag_offset += payload_len;
- if (size == 0)
- gst_rtp_buffer_set_marker (outbuf, TRUE);
+ if (left == 0)
+ gst_rtp_buffer_set_marker (&rtp, TRUE);
+
+ gst_rtp_buffer_unmap (&rtp);
GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
ret = gst_basertppayload_push (basepayload, outbuf);
}
+ gst_buffer_unmap (buffer, data, size);
gst_buffer_unref (buffer);
return ret;
"clock-rate = (int) 90000, " "encoding-name = (string) \"H263\"")
);
-GST_BOILERPLATE (GstRtpH263Depay, gst_rtp_h263_depay, GstBaseRTPDepayload,
+#define gst_rtp_h263_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpH263Depay, gst_rtp_h263_depay,
GST_TYPE_BASE_RTP_DEPAYLOAD);
static void gst_rtp_h263_depay_finalize (GObject * object);
GstCaps * caps);
static void
-gst_rtp_h263_depay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_h263_depay_src_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_h263_depay_sink_template));
-
- gst_element_class_set_details_simple (element_class, "RTP H263 depayloader",
- "Codec/Depayloader/Network/RTP",
- "Extracts H263 video from RTP packets (RFC 2190)",
- "Philippe Kalaf <philippe.kalaf@collabora.co.uk>, "
- "Edward Hervey <bilboed@bilboed.com>");
-}
-
-static void
gst_rtp_h263_depay_class_init (GstRtpH263DepayClass * klass)
{
GObjectClass *gobject_class;
GstElementClass *gstelement_class;
GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
+ GST_DEBUG_CATEGORY_INIT (rtph263depay_debug, "rtph263depay", 0,
+ "H263 Video RTP Depayloader");
+
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
- gstbasertpdepayload_class->process = gst_rtp_h263_depay_process;
- gstbasertpdepayload_class->set_caps = gst_rtp_h263_depay_setcaps;
-
gobject_class->finalize = gst_rtp_h263_depay_finalize;
gstelement_class->change_state = gst_rtp_h263_depay_change_state;
- GST_DEBUG_CATEGORY_INIT (rtph263depay_debug, "rtph263depay", 0,
- "H263 Video RTP Depayloader");
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_h263_depay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_h263_depay_sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP H263 depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts H263 video from RTP packets (RFC 2190)",
+ "Philippe Kalaf <philippe.kalaf@collabora.co.uk>, "
+ "Edward Hervey <bilboed@bilboed.com>");
+
+ gstbasertpdepayload_class->process = gst_rtp_h263_depay_process;
+ gstbasertpdepayload_class->set_caps = gst_rtp_h263_depay_setcaps;
}
static void
} else {
if (n > rest_bits) {
context->window =
- (context->
- window << rest_bits) | (*context->win_end & (((guint) pow (2.0,
- (double) rest_bits)) - 1));
+ (context->window << rest_bits) | (*context->
+ win_end & (((guint) pow (2.0, (double) rest_bits)) - 1));
n -= rest_bits;
rest_bits = 0;
} else {
gst_rtp_h263_pay_boundry_init (&bound, NULL, rtph263pay->data - 1, 0, 0);
context->gobs =
- (GstRtpH263PayGob **) g_malloc0 (format_props[context->
- piclayer->ptype_srcformat][0] * sizeof (GstRtpH263PayGob *));
+ (GstRtpH263PayGob **) g_malloc0 (format_props[context->piclayer->
+ ptype_srcformat][0] * sizeof (GstRtpH263PayGob *));
for (i = 0; i < format_props[context->piclayer->ptype_srcformat][0]; i++) {
/* GStreamer
- * Copyright (C) <2005> Wim Taymans <wim@fluendo.com>
+ * Copyright (C) <2005> Wim Taymans <wim.taymans@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
)
);
-GST_BOILERPLATE (GstRtpH263PDepay, gst_rtp_h263p_depay, GstBaseRTPDepayload,
+#define gst_rtp_h263p_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpH263PDepay, gst_rtp_h263p_depay,
GST_TYPE_BASE_RTP_DEPAYLOAD);
static void gst_rtp_h263p_depay_finalize (GObject * object);
GstCaps * caps);
static void
-gst_rtp_h263p_depay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_h263p_depay_src_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_h263p_depay_sink_template));
-
-
- gst_element_class_set_details_simple (element_class, "RTP H263 depayloader",
- "Codec/Depayloader/Network/RTP",
- "Extracts H263/+/++ video from RTP packets (RFC 4629)",
- "Wim Taymans <wim.taymans@gmail.com>");
-}
-
-static void
gst_rtp_h263p_depay_class_init (GstRtpH263PDepayClass * klass)
{
GObjectClass *gobject_class;
gstelement_class = (GstElementClass *) klass;
gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
- gstbasertpdepayload_class->process = gst_rtp_h263p_depay_process;
- gstbasertpdepayload_class->set_caps = gst_rtp_h263p_depay_setcaps;
-
gobject_class->finalize = gst_rtp_h263p_depay_finalize;
gstelement_class->change_state = gst_rtp_h263p_depay_change_state;
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_h263p_depay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_h263p_depay_sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP H263 depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts H263/+/++ video from RTP packets (RFC 4629)",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
+ gstbasertpdepayload_class->process = gst_rtp_h263p_depay_process;
+ gstbasertpdepayload_class->set_caps = gst_rtp_h263p_depay_setcaps;
}
static void
-gst_rtp_h263p_depay_init (GstRtpH263PDepay * rtph263pdepay,
- GstRtpH263PDepayClass * klass)
+gst_rtp_h263p_depay_init (GstRtpH263PDepay * rtph263pdepay)
{
rtph263pdepay->adapter = gst_adapter_new ();
}
static gboolean gst_rtp_ilbc_depay_setcaps (GstBaseRTPDepayload * depayload,
GstCaps * caps);
-GST_BOILERPLATE (GstRTPiLBCDepay, gst_rtp_ilbc_depay, GstBaseRTPDepayload,
+#define gst_rtp_ilbc_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRTPiLBCDepay, gst_rtp_ilbc_depay,
GST_TYPE_BASE_RTP_DEPAYLOAD);
#define GST_TYPE_ILBC_MODE (gst_ilbc_mode_get_type())
}
static void
-gst_rtp_ilbc_depay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_ilbc_depay_src_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_ilbc_depay_sink_template));
- gst_element_class_set_details_simple (element_class, "RTP iLBC depayloader",
- "Codec/Depayloader/Network/RTP",
- "Extracts iLBC audio from RTP packets (RFC 3952)",
- "Philippe Kalaf <philippe.kalaf@collabora.co.uk>");
-}
-
-static void
gst_rtp_ilbc_depay_class_init (GstRTPiLBCDepayClass * klass)
{
GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
gobject_class->set_property = gst_ilbc_depay_set_property;
GST_TYPE_ILBC_MODE, DEFAULT_MODE,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_ilbc_depay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_ilbc_depay_sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP iLBC depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts iLBC audio from RTP packets (RFC 3952)",
+ "Philippe Kalaf <philippe.kalaf@collabora.co.uk>");
+
gstbasertpdepayload_class->process = gst_rtp_ilbc_depay_process;
gstbasertpdepayload_class->set_caps = gst_rtp_ilbc_depay_setcaps;
}
static void
-gst_rtp_ilbc_depay_init (GstRTPiLBCDepay * rtpilbcdepay,
- GstRTPiLBCDepayClass * klass)
+gst_rtp_ilbc_depay_init (GstRTPiLBCDepay * rtpilbcdepay)
{
/* Set default mode */
rtpilbcdepay->mode = DEFAULT_MODE;
{
GstBuffer *outbuf;
gboolean marker;
+ GstRTPBuffer rtp = { NULL };
- marker = gst_rtp_buffer_get_marker (buf);
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
+
+ marker = gst_rtp_buffer_get_marker (&rtp);
GST_DEBUG ("process : got %d bytes, mark %d ts %u seqn %d",
- GST_BUFFER_SIZE (buf), marker,
- gst_rtp_buffer_get_timestamp (buf), gst_rtp_buffer_get_seq (buf));
+ gst_buffer_get_size (buf), marker,
+ gst_rtp_buffer_get_timestamp (&rtp), gst_rtp_buffer_get_seq (&rtp));
+
+ outbuf = gst_rtp_buffer_get_payload_buffer (&rtp);
- outbuf = gst_rtp_buffer_get_payload_buffer (buf);
+ gst_rtp_buffer_unmap (&rtp);
if (marker && outbuf) {
/* mark start of talkspurt with DISCONT */
static gboolean gst_rtp_ilbc_pay_sink_setcaps (GstBaseRTPPayload * payload,
GstCaps * caps);
-GST_BOILERPLATE (GstRTPILBCPay, gst_rtp_ilbc_pay, GstBaseRTPAudioPayload,
+#define gst_rtp_ilbc_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRTPILBCPay, gst_rtp_ilbc_pay,
GST_TYPE_BASE_RTP_AUDIO_PAYLOAD);
static void
-gst_rtp_ilbc_pay_base_init (gpointer klass)
+gst_rtp_ilbc_pay_class_init (GstRTPILBCPayClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstElementClass *gstelement_class;
+ GstBaseRTPPayloadClass *gstbasertppayload_class;
+
+ GST_DEBUG_CATEGORY_INIT (rtpilbcpay_debug, "rtpilbcpay", 0,
+ "iLBC audio RTP payloader");
- gst_element_class_add_pad_template (element_class,
+ gstelement_class = (GstElementClass *) klass;
+ gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
+
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_ilbc_pay_sink_template));
- gst_element_class_add_pad_template (element_class,
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_ilbc_pay_src_template));
- gst_element_class_set_details_simple (element_class, "RTP iLBC Payloader",
+
+ gst_element_class_set_details_simple (gstelement_class, "RTP iLBC Payloader",
"Codec/Payloader/Network/RTP",
"Packetize iLBC audio streams into RTP packets",
"Philippe Kalaf <philippe.kalaf@collabora.co.uk>");
-}
-
-static void
-gst_rtp_ilbc_pay_class_init (GstRTPILBCPayClass * klass)
-{
- GstBaseRTPPayloadClass *gstbasertppayload_class;
-
- gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
gstbasertppayload_class->set_caps = gst_rtp_ilbc_pay_sink_setcaps;
gstbasertppayload_class->get_caps = gst_rtp_ilbc_pay_sink_getcaps;
-
- GST_DEBUG_CATEGORY_INIT (rtpilbcpay_debug, "rtpilbcpay", 0,
- "iLBC audio RTP payloader");
}
static void
-gst_rtp_ilbc_pay_init (GstRTPILBCPay * rtpilbcpay, GstRTPILBCPayClass * klass)
+gst_rtp_ilbc_pay_init (GstRTPILBCPay * rtpilbcpay)
{
GstBaseRTPPayload *basertppayload;
GstBaseRTPAudioPayload *basertpaudiopayload;
"clock-rate = (int) 90000")
);
-GST_BOILERPLATE (GstRtpMPADepay, gst_rtp_mpa_depay, GstBaseRTPDepayload,
- GST_TYPE_BASE_RTP_DEPAYLOAD);
+#define gst_rtp_mpa_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpMPADepay, gst_rtp_mpa_depay, GST_TYPE_BASE_RTP_DEPAYLOAD);
static gboolean gst_rtp_mpa_depay_setcaps (GstBaseRTPDepayload * depayload,
GstCaps * caps);
GstBuffer * buf);
static void
-gst_rtp_mpa_depay_base_init (gpointer klass)
+gst_rtp_mpa_depay_class_init (GstRtpMPADepayClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstElementClass *gstelement_class;
+ GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
+
+ GST_DEBUG_CATEGORY_INIT (rtpmpadepay_debug, "rtpmpadepay", 0,
+ "MPEG Audio RTP Depayloader");
+
+ gstelement_class = (GstElementClass *) klass;
+ gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
- gst_element_class_add_pad_template (element_class,
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_mpa_depay_src_template));
- gst_element_class_add_pad_template (element_class,
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_mpa_depay_sink_template));
- gst_element_class_set_details_simple (element_class,
+ gst_element_class_set_details_simple (gstelement_class,
"RTP MPEG audio depayloader", "Codec/Depayloader/Network/RTP",
"Extracts MPEG audio from RTP packets (RFC 2038)",
"Wim Taymans <wim.taymans@gmail.com>");
-}
-
-static void
-gst_rtp_mpa_depay_class_init (GstRtpMPADepayClass * klass)
-{
- GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
-
- gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
gstbasertpdepayload_class->set_caps = gst_rtp_mpa_depay_setcaps;
gstbasertpdepayload_class->process = gst_rtp_mpa_depay_process;
-
- GST_DEBUG_CATEGORY_INIT (rtpmpadepay_debug, "rtpmpadepay", 0,
- "MPEG Audio RTP Depayloader");
}
static void
-gst_rtp_mpa_depay_init (GstRtpMPADepay * rtpmpadepay,
- GstRtpMPADepayClass * klass)
+gst_rtp_mpa_depay_init (GstRtpMPADepay * rtpmpadepay)
{
/* needed because of GST_BOILERPLATE */
}
{
GstRtpMPADepay *rtpmpadepay;
GstBuffer *outbuf;
+ GstRTPBuffer rtp = { NULL };
+ gint payload_len;
+#if 0
+ guint8 *payload;
+ guint16 frag_offset;
+#endif
+ gboolean marker;
rtpmpadepay = GST_RTP_MPA_DEPAY (depayload);
- {
- gint payload_len;
- gboolean marker;
-
- payload_len = gst_rtp_buffer_get_payload_len (buf);
-
- if (payload_len <= 4)
- goto empty_packet;
-
- /* strip off header
- *
- * 0 1 2 3
- * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
- * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
- * | MBZ | Frag_offset |
- * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
- */
- /* frag_offset = (payload[2] << 8) | payload[3]; */
-
- /* subbuffer skipping the 4 header bytes */
- outbuf = gst_rtp_buffer_get_payload_subbuffer (buf, 4, -1);
- marker = gst_rtp_buffer_get_marker (buf);
-
- if (marker) {
- /* mark start of talkspurt with discont */
- GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
- }
- GST_DEBUG_OBJECT (rtpmpadepay,
- "gst_rtp_mpa_depay_chain: pushing buffer of size %d",
- GST_BUFFER_SIZE (outbuf));
-
- /* FIXME, we can push half mpeg frames when they are split over multiple
- * RTP packets */
- return outbuf;
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
+
+ payload_len = gst_rtp_buffer_get_payload_len (&rtp);
+
+ if (payload_len <= 4)
+ goto empty_packet;
+
+#if 0
+ payload = gst_rtp_buffer_get_payload (&rtp);
+ /* strip off header
+ *
+ * 0 1 2 3
+ * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | MBZ | Frag_offset |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ */
+ frag_offset = (payload[2] << 8) | payload[3];
+#endif
+
+ /* subbuffer skipping the 4 header bytes */
+ outbuf = gst_rtp_buffer_get_payload_subbuffer (&rtp, 4, -1);
+ marker = gst_rtp_buffer_get_marker (&rtp);
+
+ if (marker) {
+ /* mark start of talkspurt with discont */
+ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
}
+ GST_DEBUG_OBJECT (rtpmpadepay,
+ "gst_rtp_mpa_depay_chain: pushing buffer of size %d",
+ gst_buffer_get_size (outbuf));
+
+ gst_rtp_buffer_unmap (&rtp);
- return NULL;
+ /* FIXME, we can push half mpeg frames when they are split over multiple
+ * RTP packets */
+ return outbuf;
/* ERRORS */
empty_packet:
{
GST_ELEMENT_WARNING (rtpmpadepay, STREAM, DECODE,
("Empty Payload."), (NULL));
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
}
static GstFlowReturn gst_rtp_mpa_pay_handle_buffer (GstBaseRTPPayload * payload,
GstBuffer * buffer);
-GST_BOILERPLATE (GstRtpMPAPay, gst_rtp_mpa_pay, GstBaseRTPPayload,
- GST_TYPE_BASE_RTP_PAYLOAD)
-
- static void gst_rtp_mpa_pay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_mpa_pay_src_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_mpa_pay_sink_template));
-
- gst_element_class_set_details_simple (element_class,
- "RTP MPEG audio payloader", "Codec/Payloader/Network/RTP",
- "Payload MPEG audio as RTP packets (RFC 2038)",
- "Wim Taymans <wim.taymans@gmail.com>");
-}
+#define gst_rtp_mpa_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpMPAPay, gst_rtp_mpa_pay, GST_TYPE_BASE_RTP_PAYLOAD);
static void
gst_rtp_mpa_pay_class_init (GstRtpMPAPayClass * klass)
GstElementClass *gstelement_class;
GstBaseRTPPayloadClass *gstbasertppayload_class;
+ GST_DEBUG_CATEGORY_INIT (rtpmpapay_debug, "rtpmpapay", 0,
+ "MPEG Audio RTP Depayloader");
+
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
gstelement_class->change_state = gst_rtp_mpa_pay_change_state;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_mpa_pay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_mpa_pay_sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP MPEG audio payloader", "Codec/Payloader/Network/RTP",
+ "Payload MPEG audio as RTP packets (RFC 2038)",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
gstbasertppayload_class->set_caps = gst_rtp_mpa_pay_setcaps;
gstbasertppayload_class->handle_event = gst_rtp_mpa_pay_handle_event;
gstbasertppayload_class->handle_buffer = gst_rtp_mpa_pay_handle_buffer;
-
- GST_DEBUG_CATEGORY_INIT (rtpmpapay_debug, "rtpmpapay", 0,
- "MPEG Audio RTP Depayloader");
}
static void
-gst_rtp_mpa_pay_init (GstRtpMPAPay * rtpmpapay, GstRtpMPAPayClass * klass)
+gst_rtp_mpa_pay_init (GstRtpMPAPay * rtpmpapay)
{
rtpmpapay->adapter = gst_adapter_new ();
}
guint8 *payload;
guint payload_len;
guint packet_len;
+ GstRTPBuffer rtp = { NULL };
/* this will be the total length of the packet */
packet_len = gst_rtp_buffer_calc_packet_len (4 + avail, 0, 0);
/* create buffer to hold the payload */
outbuf = gst_rtp_buffer_new_allocate (payload_len, 0, 0);
+ gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
+
payload_len -= 4;
- gst_rtp_buffer_set_payload_type (outbuf, GST_RTP_PAYLOAD_MPA);
+ gst_rtp_buffer_set_payload_type (&rtp, GST_RTP_PAYLOAD_MPA);
/*
* 0 1 2 3
* | MBZ | Frag_offset |
* +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
*/
- payload = gst_rtp_buffer_get_payload (outbuf);
+ payload = gst_rtp_buffer_get_payload (&rtp);
payload[0] = 0;
payload[1] = 0;
payload[2] = frag_offset >> 8;
frag_offset += payload_len;
if (avail == 0)
- gst_rtp_buffer_set_marker (outbuf, TRUE);
+ gst_rtp_buffer_set_marker (&rtp, TRUE);
+
+ gst_rtp_buffer_unmap (&rtp);
GST_BUFFER_TIMESTAMP (outbuf) = rtpmpapay->first_ts;
GST_BUFFER_DURATION (outbuf) = rtpmpapay->duration;
rtpmpapay = GST_RTP_MPA_PAY (basepayload);
- size = GST_BUFFER_SIZE (buffer);
+ size = gst_buffer_get_size (buffer);
duration = GST_BUFFER_DURATION (buffer);
timestamp = GST_BUFFER_TIMESTAMP (buffer);
GstBuffer *buffer;
} GstADUFrame;
-GST_BOILERPLATE (GstRtpMPARobustDepay, gst_rtp_mpa_robust_depay,
- GstBaseRTPDepayload, GST_TYPE_BASE_RTP_DEPAYLOAD);
+#define gst_rtp_mpa_robust_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpMPARobustDepay, gst_rtp_mpa_robust_depay,
+ GST_TYPE_BASE_RTP_DEPAYLOAD);
static GstStateChangeReturn gst_rtp_mpa_robust_change_state (GstElement *
element, GstStateChange transition);
depayload, GstBuffer * buf);
static void
-gst_rtp_mpa_robust_depay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_mpa_robust_depay_src_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_mpa_robust_depay_sink_template));
-
- gst_element_class_set_details_simple (element_class,
- "RTP MPEG audio depayloader", "Codec/Depayloader/Network/RTP",
- "Extracts MPEG audio from RTP packets (RFC 5219)",
- "Mark Nauwelaerts <mark.nauwelaerts@collabora.co.uk>");
-}
-
-static void
gst_rtp_mpa_robust_depay_finalize (GObject * object)
{
GstRtpMPARobustDepay *rtpmpadepay;
G_OBJECT_CLASS (parent_class)->finalize (object);
}
-
static void
gst_rtp_mpa_robust_depay_class_init (GstRtpMPARobustDepayClass * klass)
{
GstElementClass *gstelement_class;
GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
+ GST_DEBUG_CATEGORY_INIT (rtpmparobustdepay_debug, "rtpmparobustdepay", 0,
+ "Robust MPEG Audio RTP Depayloader");
+
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
gstelement_class->change_state =
GST_DEBUG_FUNCPTR (gst_rtp_mpa_robust_change_state);
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_mpa_robust_depay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_mpa_robust_depay_sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP MPEG audio depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts MPEG audio from RTP packets (RFC 5219)",
+ "Mark Nauwelaerts <mark.nauwelaerts@collabora.co.uk>");
+
gstbasertpdepayload_class->set_caps = gst_rtp_mpa_robust_depay_setcaps;
gstbasertpdepayload_class->process = gst_rtp_mpa_robust_depay_process;
-
- GST_DEBUG_CATEGORY_INIT (rtpmparobustdepay_debug, "rtpmparobustdepay", 0,
- "Robust MPEG Audio RTP Depayloader");
}
static void
-gst_rtp_mpa_robust_depay_init (GstRtpMPARobustDepay * rtpmpadepay,
- GstRtpMPARobustDepayClass * klass)
+gst_rtp_mpa_robust_depay_init (GstRtpMPARobustDepay * rtpmpadepay)
{
rtpmpadepay->adapter = gst_adapter_new ();
rtpmpadepay->adu_frames = g_queue_new ();
rtpmpadepay, GstADUFrame * frame)
{
GstADUFrame *dummy;
+ guint8 *data;
+ gsize size;
dummy = g_slice_dup (GstADUFrame, frame);
dummy->backpointer = 0;
dummy->buffer = gst_buffer_new_and_alloc (dummy->side_info + 4);
- memset (GST_BUFFER_DATA (dummy->buffer), 0, dummy->side_info + 4);
- GST_WRITE_UINT32_BE (GST_BUFFER_DATA (dummy->buffer), dummy->header);
+
+ data = gst_buffer_map (dummy->buffer, &size, NULL, GST_MAP_WRITE);
+ memset (data, 0, size);
+ GST_WRITE_UINT32_BE (data, dummy->header);
+ gst_buffer_unmap (dummy->buffer, data, size);
+
GST_BUFFER_TIMESTAMP (dummy->buffer) = GST_BUFFER_TIMESTAMP (frame->buffer);
return dummy;
GstADUFrame *frame = NULL;
guint version, layer, channels, size;
guint crc;
+ guint8 *bdata;
+ gsize bsize;
g_return_val_if_fail (buf != NULL, FALSE);
- if (GST_BUFFER_SIZE (buf) < 6) {
+ bdata = gst_buffer_map (buf, &bsize, NULL, GST_MAP_READ);
+
+ if (bsize < 6)
goto corrupt_frame;
- }
frame = g_slice_new0 (GstADUFrame);
- frame->header = GST_READ_UINT32_BE (GST_BUFFER_DATA (buf));
+ frame->header = GST_READ_UINT32_BE (bdata);
size = mp3_type_frame_length_from_header (GST_ELEMENT_CAST (rtpmpadepay),
frame->header, &version, &layer, &channels, NULL, NULL, NULL, &crc);
/* backpointer */
if (layer == 3) {
- frame->backpointer = GST_READ_UINT16_BE (GST_BUFFER_DATA (buf) + 4);
+ frame->backpointer = GST_READ_UINT16_BE (bdata + 4);
frame->backpointer >>= 7;
GST_LOG_OBJECT (rtpmpadepay, "backpointer: %d", frame->backpointer);
}
frame->data_size = frame->size - 4 - frame->side_info;
/* some size validation checks */
- if (4 + frame->side_info > GST_BUFFER_SIZE (buf))
+ if (4 + frame->side_info > bsize)
goto corrupt_frame;
/* ADU data would then extend past MP3 frame,
* even using past byte reservoir */
- if (-frame->backpointer + (gint) (GST_BUFFER_SIZE (buf)) > frame->size)
+ if (-frame->backpointer + (gint) (bsize) > frame->size)
goto corrupt_frame;
+ gst_buffer_unmap (buf, bdata, bsize);
+
/* ok, take buffer and queue */
frame->buffer = buf;
g_queue_push_tail (rtpmpadepay->adu_frames, frame);
corrupt_frame:
{
GST_DEBUG_OBJECT (rtpmpadepay, "frame is corrupt");
+ gst_buffer_unmap (buf, bdata, bsize);
gst_buffer_unref (buf);
if (frame)
g_slice_free (GstADUFrame, frame);
{
gboolean ret = FALSE;
guint8 *data;
+ gsize size;
guint val, iindex, icc;
- data = GST_BUFFER_DATA (buf);
+ data = gst_buffer_map (buf, &size, NULL, GST_MAP_READ);
val = GST_READ_UINT16_BE (data) >> 5;
+ gst_buffer_unmap (buf, data, size);
+
iindex = val >> 3;
icc = val & 0x7;
GstFlowReturn ret = GST_FLOW_OK;
while (1) {
+ guint8 *data;
+ gsize size;
if (G_UNLIKELY (!rtpmpadepay->cur_adu_frame)) {
rtpmpadepay->cur_adu_frame = rtpmpadepay->adu_frames->head;
continue;
}
- if (rtpmpadepay->offset == GST_BUFFER_SIZE (frame->buffer)) {
+ if (rtpmpadepay->offset == gst_buffer_get_size (frame->buffer)) {
if (g_list_next (rtpmpadepay->cur_adu_frame)) {
GST_LOG_OBJECT (rtpmpadepay,
"moving to next ADU frame, size %d, side_info %d",
gst_byte_writer_set_pos (rtpmpadepay->mp3_frame, 0);
/* bytewriter corresponds to head frame,
* i.e. the header and the side info must match */
+ data = gst_buffer_map (head->buffer, &size, NULL, GST_MAP_READ);
gst_byte_writer_put_data (rtpmpadepay->mp3_frame,
- GST_BUFFER_DATA (head->buffer), 4 + head->side_info);
+ data, 4 + head->side_info);
+ gst_buffer_unmap (head->buffer, data, size);
}
buf = frame->buffer;
rtpmpadepay->size);
if (rtpmpadepay->offset) {
+ data = gst_buffer_map (buf, &size, NULL, GST_MAP_READ);
/* no need to position, simply append */
- g_assert (GST_BUFFER_SIZE (buf) > rtpmpadepay->offset);
- av = MIN (av, GST_BUFFER_SIZE (buf) - rtpmpadepay->offset);
+ g_assert (size > rtpmpadepay->offset);
+ av = MIN (av, size - rtpmpadepay->offset);
GST_LOG_OBJECT (rtpmpadepay,
"appending %d bytes from ADU frame at offset %d", av,
rtpmpadepay->offset);
gst_byte_writer_put_data (rtpmpadepay->mp3_frame,
- GST_BUFFER_DATA (buf) + rtpmpadepay->offset, av);
+ data + rtpmpadepay->offset, av);
rtpmpadepay->offset += av;
+ gst_buffer_unmap (buf, data, size);
} else {
gint pos, tpos;
gst_byte_writer_set_pos (rtpmpadepay->mp3_frame, pos + av);
} else {
/* position and append */
+ data = gst_buffer_map (buf, &size, NULL, GST_MAP_READ);
GST_LOG_OBJECT (rtpmpadepay, "adding to current MP3 frame");
gst_byte_writer_set_pos (rtpmpadepay->mp3_frame, tpos);
- av = MIN (av, GST_BUFFER_SIZE (buf) - 4 - frame->side_info);
+ av = MIN (av, size - 4 - frame->side_info);
gst_byte_writer_put_data (rtpmpadepay->mp3_frame,
- GST_BUFFER_DATA (buf) + 4 + frame->side_info, av);
+ data + 4 + frame->side_info, av);
rtpmpadepay->offset += av + 4 + frame->side_info;
+ gst_buffer_unmap (buf, data, size);
}
}
gboolean cont, dtype;
guint av, size;
GstClockTime timestamp;
+ GstRTPBuffer rtp = { NULL };
rtpmpadepay = GST_RTP_MPA_ROBUST_DEPAY (depayload);
- payload_len = gst_rtp_buffer_get_payload_len (buf);
timestamp = GST_BUFFER_TIMESTAMP (buf);
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
+
+ payload_len = gst_rtp_buffer_get_payload_len (&rtp);
if (payload_len <= 1)
goto short_read;
- payload = gst_rtp_buffer_get_payload (buf);
+ payload = gst_rtp_buffer_get_payload (&rtp);
offset = 0;
GST_LOG_OBJECT (rtpmpadepay, "payload_len: %d", payload_len);
GST_LOG_OBJECT (rtpmpadepay, "offset %d has cont: %d, dtype: %d, size: %d",
offset, cont, dtype, size);
- buf = gst_rtp_buffer_get_payload_subbuffer (buf, offset,
+ buf = gst_rtp_buffer_get_payload_subbuffer (&rtp, offset,
MIN (size, payload_len));
if (cont) {
"discarding continuation fragment without prior fragment");
gst_buffer_unref (buf);
} else {
- av += GST_BUFFER_SIZE (buf);
+ av += gst_buffer_get_size (buf);
gst_adapter_push (rtpmpadepay->adapter, buf);
if (av == size) {
timestamp = gst_adapter_prev_timestamp (rtpmpadepay->adapter, NULL);
/* timestamp applies to first payload, no idea for subsequent ones */
timestamp = GST_CLOCK_TIME_NONE;
}
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
{
GST_ELEMENT_WARNING (rtpmpadepay, STREAM, DECODE,
(NULL), ("Packet contains invalid data"));
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
}
"clock-rate = (int) 90000")
);
-GST_BOILERPLATE (GstRtpMPVDepay, gst_rtp_mpv_depay, GstBaseRTPDepayload,
- GST_TYPE_BASE_RTP_DEPAYLOAD);
+G_DEFINE_TYPE (GstRtpMPVDepay, gst_rtp_mpv_depay, GST_TYPE_BASE_RTP_DEPAYLOAD);
static gboolean gst_rtp_mpv_depay_setcaps (GstBaseRTPDepayload * depayload,
GstCaps * caps);
GstBuffer * buf);
static void
-gst_rtp_mpv_depay_base_init (gpointer klass)
+gst_rtp_mpv_depay_class_init (GstRtpMPVDepayClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstElementClass *gstelement_class;
+ GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
+
+ gstelement_class = (GstElementClass *) klass;
+ gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
- gst_element_class_add_pad_template (element_class,
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_mpv_depay_src_template));
- gst_element_class_add_pad_template (element_class,
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_mpv_depay_sink_template));
- gst_element_class_set_details_simple (element_class,
+ gst_element_class_set_details_simple (gstelement_class,
"RTP MPEG video depayloader", "Codec/Depayloader/Network/RTP",
"Extracts MPEG video from RTP packets (RFC 2250)",
"Wim Taymans <wim.taymans@gmail.com>");
-}
-
-static void
-gst_rtp_mpv_depay_class_init (GstRtpMPVDepayClass * klass)
-{
- GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
-
- gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
gstbasertpdepayload_class->set_caps = gst_rtp_mpv_depay_setcaps;
gstbasertpdepayload_class->process = gst_rtp_mpv_depay_process;
}
static void
-gst_rtp_mpv_depay_init (GstRtpMPVDepay * rtpmpvdepay,
- GstRtpMPVDepayClass * klass)
+gst_rtp_mpv_depay_init (GstRtpMPVDepay * rtpmpvdepay)
{
- /* needed because of GST_BOILERPLATE */
}
static gboolean
{
GstRtpMPVDepay *rtpmpvdepay;
GstBuffer *outbuf;
+ GstRTPBuffer rtp = { NULL };
rtpmpvdepay = GST_RTP_MPV_DEPAY (depayload);
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
+
{
gint payload_len, payload_header;
guint8 *payload;
guint8 T;
- payload_len = gst_rtp_buffer_get_payload_len (buf);
- payload = gst_rtp_buffer_get_payload (buf);
+ payload_len = gst_rtp_buffer_get_payload_len (&rtp);
+ payload = gst_rtp_buffer_get_payload (&rtp);
payload_header = 0;
if (payload_len <= 4)
payload += 4;
}
- outbuf = gst_rtp_buffer_get_payload_subbuffer (buf, payload_header, -1);
+ outbuf = gst_rtp_buffer_get_payload_subbuffer (&rtp, payload_header, -1);
if (outbuf) {
GST_DEBUG_OBJECT (rtpmpvdepay,
"gst_rtp_mpv_depay_chain: pushing buffer of size %d",
- GST_BUFFER_SIZE (outbuf));
+ gst_buffer_get_size (outbuf));
}
-
return outbuf;
}
payload, GstBuffer * buffer);
static gboolean gst_rtp_mpv_pay_handle_event (GstPad * pad, GstEvent * event);
-GST_BOILERPLATE (GstRTPMPVPay, gst_rtp_mpv_pay, GstBaseRTPPayload,
- GST_TYPE_BASE_RTP_PAYLOAD);
-
-static void
-gst_rtp_mpv_pay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_mpv_pay_sink_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_mpv_pay_src_template));
- gst_element_class_set_details_simple (element_class,
- "RTP MPEG2 ES video payloader", "Codec/Payloader/Network/RTP",
- "Payload-encodes MPEG2 ES into RTP packets (RFC 2250)",
- "Thijs Vermeir <thijsvermeir@gmail.com>");
-}
+#define gst_rtp_mpv_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRTPMPVPay, gst_rtp_mpv_pay, GST_TYPE_BASE_RTP_PAYLOAD);
static void
gst_rtp_mpv_pay_class_init (GstRTPMPVPayClass * klass)
gstelement_class->change_state = gst_rtp_mpv_pay_change_state;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_mpv_pay_sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_mpv_pay_src_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP MPEG2 ES video payloader", "Codec/Payloader/Network/RTP",
+ "Payload-encodes MPEG2 ES into RTP packets (RFC 2250)",
+ "Thijs Vermeir <thijsvermeir@gmail.com>");
+
gstbasertppayload_class->set_caps = gst_rtp_mpv_pay_setcaps;
gstbasertppayload_class->handle_buffer = gst_rtp_mpv_pay_handle_buffer;
gstbasertppayload_class->handle_event = gst_rtp_mpv_pay_handle_event;
}
static void
-gst_rtp_mpv_pay_init (GstRTPMPVPay * rtpmpvpay, GstRTPMPVPayClass * klass)
+gst_rtp_mpv_pay_init (GstRTPMPVPay * rtpmpvpay)
{
GST_BASE_RTP_PAYLOAD (rtpmpvpay)->clock_rate = 90000;
GST_BASE_RTP_PAYLOAD_PT (rtpmpvpay) = GST_RTP_PAYLOAD_MPV;
guint towrite;
guint packet_len;
guint payload_len;
+ GstRTPBuffer rtp = { NULL };
packet_len = gst_rtp_buffer_calc_packet_len (avail, 4, 0);
outbuf = gst_rtp_buffer_new_allocate (payload_len, 4, 0);
- payload = gst_rtp_buffer_get_payload (outbuf);
+ gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
+
+ payload = gst_rtp_buffer_get_payload (&rtp);
/* enable MPEG Video-specific header
*
* 0 1 2 3
avail -= payload_len;
- gst_rtp_buffer_set_marker (outbuf, avail == 0);
+ gst_rtp_buffer_set_marker (&rtp, avail == 0);
+ gst_rtp_buffer_unmap (&rtp);
GST_BUFFER_TIMESTAMP (outbuf) = rtpmpvpay->first_ts;
static gboolean gst_rtp_pcma_depay_setcaps (GstBaseRTPDepayload * depayload,
GstCaps * caps);
-GST_BOILERPLATE (GstRtpPcmaDepay, gst_rtp_pcma_depay, GstBaseRTPDepayload,
+#define gst_rtp_pcma_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpPcmaDepay, gst_rtp_pcma_depay,
GST_TYPE_BASE_RTP_DEPAYLOAD);
static void
-gst_rtp_pcma_depay_base_init (gpointer klass)
+gst_rtp_pcma_depay_class_init (GstRtpPcmaDepayClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstElementClass *gstelement_class;
+ GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
+
+ gstelement_class = (GstElementClass *) klass;
+ gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
- gst_element_class_add_pad_template (element_class,
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_pcma_depay_src_template));
- gst_element_class_add_pad_template (element_class,
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_pcma_depay_sink_template));
- gst_element_class_set_details_simple (element_class, "RTP PCMA depayloader",
- "Codec/Depayloader/Network/RTP",
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP PCMA depayloader", "Codec/Depayloader/Network/RTP",
"Extracts PCMA audio from RTP packets",
"Edgard Lima <edgard.lima@indt.org.br>, Zeeshan Ali <zeenix@gmail.com>");
-}
-
-static void
-gst_rtp_pcma_depay_class_init (GstRtpPcmaDepayClass * klass)
-{
- GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
-
- gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
gstbasertpdepayload_class->process = gst_rtp_pcma_depay_process;
gstbasertpdepayload_class->set_caps = gst_rtp_pcma_depay_setcaps;
}
static void
-gst_rtp_pcma_depay_init (GstRtpPcmaDepay * rtppcmadepay,
- GstRtpPcmaDepayClass * klass)
+gst_rtp_pcma_depay_init (GstRtpPcmaDepay * rtppcmadepay)
{
GstBaseRTPDepayload *depayload;
GstBuffer *outbuf = NULL;
gboolean marker;
guint len;
+ GstRTPBuffer rtp = { NULL };
+
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
- marker = gst_rtp_buffer_get_marker (buf);
+ marker = gst_rtp_buffer_get_marker (&rtp);
GST_DEBUG ("process : got %d bytes, mark %d ts %u seqn %d",
- GST_BUFFER_SIZE (buf), marker,
- gst_rtp_buffer_get_timestamp (buf), gst_rtp_buffer_get_seq (buf));
+ gst_buffer_get_size (buf), marker,
+ gst_rtp_buffer_get_timestamp (&rtp), gst_rtp_buffer_get_seq (&rtp));
- len = gst_rtp_buffer_get_payload_len (buf);
- outbuf = gst_rtp_buffer_get_payload_buffer (buf);
+ len = gst_rtp_buffer_get_payload_len (&rtp);
+ outbuf = gst_rtp_buffer_get_payload_buffer (&rtp);
+ gst_rtp_buffer_unmap (&rtp);
if (outbuf) {
GST_BUFFER_DURATION (outbuf) =
}
}
+
return outbuf;
}
static gboolean gst_rtp_pcma_pay_setcaps (GstBaseRTPPayload * payload,
GstCaps * caps);
-GST_BOILERPLATE (GstRtpPcmaPay, gst_rtp_pcma_pay, GstBaseRTPAudioPayload,
+#define gst_rtp_pcma_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpPcmaPay, gst_rtp_pcma_pay,
GST_TYPE_BASE_RTP_AUDIO_PAYLOAD);
static void
-gst_rtp_pcma_pay_base_init (gpointer klass)
+gst_rtp_pcma_pay_class_init (GstRtpPcmaPayClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstElementClass *gstelement_class;
+ GstBaseRTPPayloadClass *gstbasertppayload_class;
+
+ gstelement_class = (GstElementClass *) klass;
+ gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
- gst_element_class_add_pad_template (element_class,
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_pcma_pay_sink_template));
- gst_element_class_add_pad_template (element_class,
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_pcma_pay_src_template));
- gst_element_class_set_details_simple (element_class, "RTP PCMA payloader",
+
+ gst_element_class_set_details_simple (gstelement_class, "RTP PCMA payloader",
"Codec/Payloader/Network/RTP",
"Payload-encodes PCMA audio into a RTP packet",
"Edgard Lima <edgard.lima@indt.org.br>");
-}
-
-static void
-gst_rtp_pcma_pay_class_init (GstRtpPcmaPayClass * klass)
-{
- GstBaseRTPPayloadClass *gstbasertppayload_class;
-
- gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
gstbasertppayload_class->set_caps = gst_rtp_pcma_pay_setcaps;
}
static void
-gst_rtp_pcma_pay_init (GstRtpPcmaPay * rtppcmapay, GstRtpPcmaPayClass * klass)
+gst_rtp_pcma_pay_init (GstRtpPcmaPay * rtppcmapay)
{
GstBaseRTPAudioPayload *basertpaudiopayload;
static gboolean gst_rtp_pcmu_depay_setcaps (GstBaseRTPDepayload * depayload,
GstCaps * caps);
-GST_BOILERPLATE (GstRtpPcmuDepay, gst_rtp_pcmu_depay, GstBaseRTPDepayload,
+#define gst_rtp_pcmu_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpPcmuDepay, gst_rtp_pcmu_depay,
GST_TYPE_BASE_RTP_DEPAYLOAD);
static void
-gst_rtp_pcmu_depay_base_init (gpointer klass)
+gst_rtp_pcmu_depay_class_init (GstRtpPcmuDepayClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstElementClass *gstelement_class;
+ GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
+
+ gstelement_class = (GstElementClass *) klass;
+ gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
- gst_element_class_add_pad_template (element_class,
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_pcmu_depay_src_template));
- gst_element_class_add_pad_template (element_class,
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_pcmu_depay_sink_template));
- gst_element_class_set_details_simple (element_class, "RTP PCMU depayloader",
- "Codec/Depayloader/Network/RTP",
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP PCMU depayloader", "Codec/Depayloader/Network/RTP",
"Extracts PCMU audio from RTP packets",
"Edgard Lima <edgard.lima@indt.org.br>, Zeeshan Ali <zeenix@gmail.com>");
-}
-
-static void
-gst_rtp_pcmu_depay_class_init (GstRtpPcmuDepayClass * klass)
-{
- GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
-
- gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
gstbasertpdepayload_class->process = gst_rtp_pcmu_depay_process;
gstbasertpdepayload_class->set_caps = gst_rtp_pcmu_depay_setcaps;
}
static void
-gst_rtp_pcmu_depay_init (GstRtpPcmuDepay * rtppcmudepay,
- GstRtpPcmuDepayClass * klass)
+gst_rtp_pcmu_depay_init (GstRtpPcmuDepay * rtppcmudepay)
{
GstBaseRTPDepayload *depayload;
GstBuffer *outbuf = NULL;
guint len;
gboolean marker;
+ GstRTPBuffer rtp = { NULL };
+
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
- marker = gst_rtp_buffer_get_marker (buf);
+ marker = gst_rtp_buffer_get_marker (&rtp);
GST_DEBUG ("process : got %d bytes, mark %d ts %u seqn %d",
- GST_BUFFER_SIZE (buf), marker,
- gst_rtp_buffer_get_timestamp (buf), gst_rtp_buffer_get_seq (buf));
+ gst_buffer_get_size (buf), marker,
+ gst_rtp_buffer_get_timestamp (&rtp), gst_rtp_buffer_get_seq (&rtp));
- len = gst_rtp_buffer_get_payload_len (buf);
- outbuf = gst_rtp_buffer_get_payload_buffer (buf);
+ len = gst_rtp_buffer_get_payload_len (&rtp);
+ outbuf = gst_rtp_buffer_get_payload_buffer (&rtp);
+ gst_rtp_buffer_unmap (&rtp);
if (outbuf) {
GST_BUFFER_DURATION (outbuf) =
static gboolean gst_rtp_pcmu_pay_setcaps (GstBaseRTPPayload * payload,
GstCaps * caps);
-GST_BOILERPLATE (GstRtpPcmuPay, gst_rtp_pcmu_pay, GstBaseRTPAudioPayload,
+#define gst_rtp_pcmu_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpPcmuPay, gst_rtp_pcmu_pay,
GST_TYPE_BASE_RTP_AUDIO_PAYLOAD);
static void
-gst_rtp_pcmu_pay_base_init (gpointer klass)
+gst_rtp_pcmu_pay_class_init (GstRtpPcmuPayClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstElementClass *gstelement_class;
+ GstBaseRTPPayloadClass *gstbasertppayload_class;
+
+ gstelement_class = (GstElementClass *) klass;
+ gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
- gst_element_class_add_pad_template (element_class,
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_pcmu_pay_sink_template));
- gst_element_class_add_pad_template (element_class,
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_pcmu_pay_src_template));
- gst_element_class_set_details_simple (element_class, "RTP PCMU payloader",
+
+ gst_element_class_set_details_simple (gstelement_class, "RTP PCMU payloader",
"Codec/Payloader/Network/RTP",
"Payload-encodes PCMU audio into a RTP packet",
"Edgard Lima <edgard.lima@indt.org.br>");
-}
-
-static void
-gst_rtp_pcmu_pay_class_init (GstRtpPcmuPayClass * klass)
-{
- GstBaseRTPPayloadClass *gstbasertppayload_class;
-
- gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
gstbasertppayload_class->set_caps = gst_rtp_pcmu_pay_setcaps;
}
static void
-gst_rtp_pcmu_pay_init (GstRtpPcmuPay * rtppcmupay, GstRtpPcmuPayClass * klass)
+gst_rtp_pcmu_pay_init (GstRtpPcmuPay * rtppcmupay)
{
GstBaseRTPAudioPayload *basertpaudiopayload;
gboolean rtp, GstBuffer * buffer, GstClockTime current_time,
GstClockTime running_time, guint64 ntpnstime)
{
+ GstMetaNetAddress *meta;
+
/* get time of arrival */
arrival->current_time = current_time;
arrival->running_time = running_time;
}
/* for netbuffer we can store the IP address to check for collisions */
- arrival->have_address = GST_IS_NETBUFFER (buffer);
- if (arrival->have_address) {
- GstNetBuffer *netbuf = (GstNetBuffer *) buffer;
-
- memcpy (&arrival->address, &netbuf->from, sizeof (GstNetAddress));
+ meta = gst_buffer_get_meta_net_address (buffer);
+ if (meta) {
+ arrival->have_address = TRUE;
+ memcpy (&arrival->address, &meta->naddr, sizeof (GstNetAddress));
+ } else {
+ arrival->have_address = FALSE;
}
}
/* GStreamer
- * Copyright (C) <2005,2006> Wim Taymans <wim@fluendo.com>
+ * Copyright (C) <2005,2006> Wim Taymans <wim.taymans@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
static guint gst_rtp_dec_signals[LAST_SIGNAL] = { 0 };
-GST_BOILERPLATE (GstRTPDec, gst_rtp_dec, GstElement, GST_TYPE_ELEMENT);
+#define gst_rtp_dec_parent_class parent_class
+G_DEFINE_TYPE (GstRTPDec, gst_rtp_dec, GST_TYPE_ELEMENT);
-static void
-gst_rtp_dec_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- /* sink pads */
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_dec_recv_rtp_sink_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_dec_recv_rtcp_sink_template));
- /* src pads */
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_dec_recv_rtp_src_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_dec_rtcp_src_template));
-
- gst_element_class_set_details_simple (element_class, "RTP Decoder",
- "Codec/Parser/Network",
- "Accepts raw RTP and RTCP packets and sends them forward",
- "Wim Taymans <wim@fluendo.com>");
-}
/* BOXED:UINT,UINT */
#define g_marshal_value_peek_uint(v) g_value_get_uint (v)
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
+ GST_DEBUG_CATEGORY_INIT (rtpdec_debug, "rtpdec", 0, "RTP decoder");
+
gobject_class->finalize = gst_rtp_dec_finalize;
gobject_class->set_property = gst_rtp_dec_set_property;
gobject_class->get_property = gst_rtp_dec_get_property;
GST_DEBUG_FUNCPTR (gst_rtp_dec_request_new_pad);
gstelement_class->release_pad = GST_DEBUG_FUNCPTR (gst_rtp_dec_release_pad);
- GST_DEBUG_CATEGORY_INIT (rtpdec_debug, "rtpdec", 0, "RTP decoder");
+ /* sink pads */
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_dec_recv_rtp_sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_dec_recv_rtcp_sink_template));
+ /* src pads */
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_dec_recv_rtp_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_dec_rtcp_src_template));
+
+ gst_element_class_set_details_simple (gstelement_class, "RTP Decoder",
+ "Codec/Parser/Network",
+ "Accepts raw RTP and RTCP packets and sends them forward",
+ "Wim Taymans <wim.taymans@gmail.com>");
}
static void
-gst_rtp_dec_init (GstRTPDec * rtpdec, GstRTPDecClass * klass)
+gst_rtp_dec_init (GstRTPDec * rtpdec)
{
rtpdec->provided_clock = gst_system_clock_obtain ();
rtpdec->latency = DEFAULT_LATENCY_MS;
GstRTPDecSession *session;
guint32 ssrc;
guint8 pt;
+ GstRTPBuffer rtp = { NULL, };
rtpdec = GST_RTP_DEC (GST_PAD_PARENT (pad));
if (!gst_rtp_buffer_validate (buffer))
goto bad_packet;
- ssrc = gst_rtp_buffer_get_ssrc (buffer);
- pt = gst_rtp_buffer_get_payload_type (buffer);
+
+ gst_rtp_buffer_map (buffer, GST_MAP_READ, &rtp);
+ ssrc = gst_rtp_buffer_get_ssrc (&rtp);
+ pt = gst_rtp_buffer_get_payload_type (&rtp);
+ gst_rtp_buffer_unmap (&rtp);
GST_DEBUG_OBJECT (rtpdec, "SSRC %08x, PT %d", ssrc, pt);
} G_STMT_END
/*static guint gst_rtspsrc_signals[LAST_SIGNAL] = { 0 }; */
-
-static void
-_do_init (GType rtspsrc_type)
-{
- static const GInterfaceInfo urihandler_info = {
- gst_rtspsrc_uri_handler_init,
- NULL,
- NULL
- };
-
- GST_DEBUG_CATEGORY_INIT (rtspsrc_debug, "rtspsrc", 0, "RTSP src");
-
- g_type_add_interface_static (rtspsrc_type, GST_TYPE_URI_HANDLER,
- &urihandler_info);
-}
-
-GST_BOILERPLATE_FULL (GstRTSPSrc, gst_rtspsrc, GstBin, GST_TYPE_BIN, _do_init);
-
-static void
-gst_rtspsrc_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&rtptemplate));
-
- gst_element_class_set_details_simple (element_class, "RTSP packet receiver",
- "Source/Network",
- "Receive data over the network via RTSP (RFC 2326)",
- "Wim Taymans <wim@fluendo.com>, "
- "Thijs Vermeir <thijs.vermeir@barco.com>, "
- "Lutz Mueller <lutz@topfrose.de>");
-}
+#define gst_rtspsrc_parent_class parent_class
+G_DEFINE_TYPE_WITH_CODE (GstRTSPSrc, gst_rtspsrc, GST_TYPE_BIN,
+ G_IMPLEMENT_INTERFACE (GST_TYPE_URI_HANDLER, gst_rtspsrc_uri_handler_init));
static void
gst_rtspsrc_class_init (GstRTSPSrcClass * klass)
gstelement_class = (GstElementClass *) klass;
gstbin_class = (GstBinClass *) klass;
+ GST_DEBUG_CATEGORY_INIT (rtspsrc_debug, "rtspsrc", 0, "RTSP src");
+
gobject_class->set_property = gst_rtspsrc_set_property;
gobject_class->get_property = gst_rtspsrc_get_property;
gstelement_class->send_event = gst_rtspsrc_send_event;
gstelement_class->change_state = gst_rtspsrc_change_state;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&rtptemplate));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTSP packet receiver", "Source/Network",
+ "Receive data over the network via RTSP (RFC 2326)",
+ "Wim Taymans <wim@fluendo.com>, "
+ "Thijs Vermeir <thijs.vermeir@barco.com>, "
+ "Lutz Mueller <lutz@topfrose.de>");
+
gstbin_class->handle_message = gst_rtspsrc_handle_message;
gst_rtsp_ext_list_init ();
static void
-gst_rtspsrc_init (GstRTSPSrc * src, GstRTSPSrcClass * g_class)
+gst_rtspsrc_init (GstRTSPSrc * src)
{
#ifdef G_OS_WIN32
WSADATA wsa_data;
/* we keep these elements, we configure all in configure_transport when the
* server told us to really use the UDP ports. */
- stream->udpsrc[0] = gst_object_ref (udpsrc0);
- stream->udpsrc[1] = gst_object_ref (udpsrc1);
+ stream->udpsrc[0] = gst_object_ref_sink (udpsrc0);
+ stream->udpsrc[1] = gst_object_ref_sink (udpsrc1);
/* keep track of next available port number when we have a range
* configured */
if (src->next_port_num != 0)
src->next_port_num = tmp_rtcp + 1;
- /* they are ours now */
- gst_object_sink (udpsrc0);
- gst_object_sink (udpsrc1);
-
return TRUE;
/* ERRORS */
GstFlowReturn res = GST_FLOW_OK;
guint8 *data;
guint size;
+ gsize bsize;
GstRTSPResult ret;
GstRTSPMessage message = { 0 };
GstRTSPConnection *conn;
stream = (GstRTSPStream *) gst_pad_get_element_private (pad);
src = stream->parent;
- data = GST_BUFFER_DATA (buffer);
- size = GST_BUFFER_SIZE (buffer);
+ data = gst_buffer_map (buffer, &bsize, NULL, GST_MAP_READ);
+ size = bsize;
gst_rtsp_message_init_data (&message, stream->channel[1]);
gst_rtsp_message_steal_body (&message, &data, &size);
gst_rtsp_message_unset (&message);
+ gst_buffer_unmap (buffer, data, size);
gst_buffer_unref (buffer);
return res;
goto no_element;
/* take ownership */
- gst_object_ref (stream->udpsrc[0]);
- gst_object_sink (stream->udpsrc[0]);
+ gst_object_ref_sink (stream->udpsrc[0]);
/* change state */
gst_element_set_state (stream->udpsrc[0], GST_STATE_PAUSED);
goto no_element;
/* take ownership */
- gst_object_ref (stream->udpsrc[1]);
- gst_object_sink (stream->udpsrc[1]);
+ gst_object_ref_sink (stream->udpsrc[1]);
gst_element_set_state (stream->udpsrc[1], GST_STATE_PAUSED);
}
size -= 1;
buf = gst_buffer_new ();
- GST_BUFFER_DATA (buf) = data;
- GST_BUFFER_MALLOCDATA (buf) = data;
- GST_BUFFER_SIZE (buf) = size;
+ gst_buffer_take_memory (buf,
+ gst_memory_new_wrapped (0, data, g_free, size, 0, size));
/* don't need message anymore */
gst_rtsp_message_unset (&message);
udpctx->sock = -1; \
} G_STMT_END
-static void gst_dynudpsink_base_init (gpointer g_class);
-static void gst_dynudpsink_class_init (GstDynUDPSink * klass);
-static void gst_dynudpsink_init (GstDynUDPSink * udpsink);
static void gst_dynudpsink_finalize (GObject * object);
static GstFlowReturn gst_dynudpsink_render (GstBaseSink * sink,
static void gst_dynudpsink_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
-static GstElementClass *parent_class = NULL;
-
static guint gst_dynudpsink_signals[LAST_SIGNAL] = { 0 };
-GType
-gst_dynudpsink_get_type (void)
-{
- static GType dynudpsink_type = 0;
-
- if (!dynudpsink_type) {
- static const GTypeInfo dynudpsink_info = {
- sizeof (GstDynUDPSinkClass),
- gst_dynudpsink_base_init,
- NULL,
- (GClassInitFunc) gst_dynudpsink_class_init,
- NULL,
- NULL,
- sizeof (GstDynUDPSink),
- 0,
- (GInstanceInitFunc) gst_dynudpsink_init,
- NULL
- };
-
- dynudpsink_type =
- g_type_register_static (GST_TYPE_BASE_SINK, "GstDynUDPSink",
- &dynudpsink_info, 0);
- }
- return dynudpsink_type;
-}
+#define gst_dynudpsink_parent_class parent_class
+G_DEFINE_TYPE (GstDynUDPSink, gst_dynudpsink, GST_TYPE_BASE_SINK);
static void
-gst_dynudpsink_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&sink_template));
-
- gst_element_class_set_details_simple (element_class, "UDP packet sender",
- "Sink/Network",
- "Send data over the network via UDP",
- "Philippe Khalaf <burger@speedy.org>");
-}
-
-static void
-gst_dynudpsink_class_init (GstDynUDPSink * klass)
+gst_dynudpsink_class_init (GstDynUDPSinkClass * klass)
{
GObjectClass *gobject_class;
GstElementClass *gstelement_class;
gstelement_class->change_state = gst_dynudpsink_change_state;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class, "UDP packet sender",
+ "Sink/Network",
+ "Send data over the network via UDP",
+ "Philippe Khalaf <burger@speedy.org>");
+
gstbasesink_class->render = gst_dynudpsink_render;
GST_DEBUG_CATEGORY_INIT (dynudpsink_debug, "dynudpsink", 0, "UDP sink");
gst_dynudpsink_render (GstBaseSink * bsink, GstBuffer * buffer)
{
GstDynUDPSink *sink;
- gint ret, size;
+ gint ret;
+ gsize size;
guint8 *data;
- GstNetBuffer *netbuf;
+ GstMetaNetAddress *meta;
struct sockaddr_in theiraddr;
guint16 destport;
guint32 destaddr;
memset (&theiraddr, 0, sizeof (theiraddr));
- if (GST_IS_NETBUFFER (buffer)) {
- netbuf = GST_NETBUFFER (buffer);
- } else {
+ meta = gst_buffer_get_meta_net_address (buffer);
+
+ if (meta == NULL) {
GST_DEBUG ("Received buffer is not a GstNetBuffer, skipping");
return GST_FLOW_OK;
}
sink = GST_DYNUDPSINK (bsink);
- size = GST_BUFFER_SIZE (netbuf);
- data = GST_BUFFER_DATA (netbuf);
+ data = gst_buffer_map (buffer, &size, NULL, GST_MAP_READ);
GST_DEBUG ("about to send %d bytes", size);
- // let's get the address from the netbuffer
- gst_netaddress_get_ip4_address (&netbuf->to, &destaddr, &destport);
+ /* let's get the address from the metaata */
+ gst_netaddress_get_ip4_address (&meta->naddr, &destaddr, &destport);
GST_DEBUG ("sending %d bytes to client %d port %d", size, destaddr, destport);
#endif
(struct sockaddr *) &theiraddr, sizeof (theiraddr));
+ gst_buffer_unmap (buffer, data, size);
+
if (ret < 0) {
if (errno != EINTR && errno != EAGAIN) {
goto send_error;
udpctx->sock = DEFAULT_SOCK; \
} G_STMT_END
-static void gst_multiudpsink_base_init (gpointer g_class);
-static void gst_multiudpsink_class_init (GstMultiUDPSinkClass * klass);
-static void gst_multiudpsink_init (GstMultiUDPSink * udpsink);
static void gst_multiudpsink_finalize (GObject * object);
static GstFlowReturn gst_multiudpsink_render (GstBaseSink * sink,
GstBuffer * buffer);
+#if 0
#ifndef G_OS_WIN32 /* sendmsg() is not available on Windows */
static GstFlowReturn gst_multiudpsink_render_list (GstBaseSink * bsink,
GstBufferList * list);
#endif
+#endif
static GstStateChangeReturn gst_multiudpsink_change_state (GstElement *
element, GstStateChange transition);
static void gst_multiudpsink_clear_internal (GstMultiUDPSink * sink,
gboolean lock);
-static GstElementClass *parent_class = NULL;
-
static guint gst_multiudpsink_signals[LAST_SIGNAL] = { 0 };
-GType
-gst_multiudpsink_get_type (void)
-{
- static GType multiudpsink_type = 0;
-
- if (!multiudpsink_type) {
- static const GTypeInfo multiudpsink_info = {
- sizeof (GstMultiUDPSinkClass),
- gst_multiudpsink_base_init,
- NULL,
- (GClassInitFunc) gst_multiudpsink_class_init,
- NULL,
- NULL,
- sizeof (GstMultiUDPSink),
- 0,
- (GInstanceInitFunc) gst_multiudpsink_init,
- NULL
- };
-
- multiudpsink_type =
- g_type_register_static (GST_TYPE_BASE_SINK, "GstMultiUDPSink",
- &multiudpsink_info, 0);
- }
- return multiudpsink_type;
-}
-
-static void
-gst_multiudpsink_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&sink_template));
-
- gst_element_class_set_details_simple (element_class, "UDP packet sender",
- "Sink/Network",
- "Send data over the network via UDP",
- "Wim Taymans <wim.taymans@gmail.com>");
-}
+#define gst_multiudpsink_parent_class parent_class
+G_DEFINE_TYPE (GstMultiUDPSink, gst_multiudpsink, GST_TYPE_BASE_SINK);
static void
gst_multiudpsink_class_init (GstMultiUDPSinkClass * klass)
gstelement_class->change_state = gst_multiudpsink_change_state;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class, "UDP packet sender",
+ "Sink/Network",
+ "Send data over the network via UDP",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
gstbasesink_class->render = gst_multiudpsink_render;
+#if 0
#ifndef G_OS_WIN32
gstbasesink_class->render_list = gst_multiudpsink_render_list;
#endif
+#endif
klass->add = gst_multiudpsink_add;
klass->remove = gst_multiudpsink_remove;
klass->clear = gst_multiudpsink_clear;
#endif
}
+#ifdef G_OS_WIN32
+/* version without sendmsg */
static GstFlowReturn
gst_multiudpsink_render (GstBaseSink * bsink, GstBuffer * buffer)
{
GstMultiUDPSink *sink;
- gint ret, size, num = 0, no_clients = 0;
+ gint ret, num = 0, no_clients = 0;
+ gsize size;
guint8 *data;
GList *clients;
gint len;
sink = GST_MULTIUDPSINK (bsink);
- size = GST_BUFFER_SIZE (buffer);
- data = GST_BUFFER_DATA (buffer);
+ data = gst_buffer_map (buffer, &size, NULL, GST_MAP_READ);
if (size > UDP_MAX_SIZE) {
GST_WARNING ("Attempting to send a UDP packet larger than maximum "
}
g_mutex_unlock (sink->client_lock);
+ gst_buffer_unmap (buffer, data, size);
+
GST_LOG_OBJECT (sink, "sent %d bytes to %d (of %d) clients", size, num,
no_clients);
return GST_FLOW_OK;
}
-
-#ifndef G_OS_WIN32
+#else /* !G_OS_WIN32 */
+/* version with sendmsg */
static GstFlowReturn
-gst_multiudpsink_render_list (GstBaseSink * bsink, GstBufferList * list)
+gst_multiudpsink_render (GstBaseSink * bsink, GstBuffer * buffer)
{
GstMultiUDPSink *sink;
GList *clients;
gint ret, size = 0, num = 0, no_clients = 0;
struct iovec *iov;
struct msghdr msg = { 0 };
-
- GstBufferListIterator *it;
- guint gsize;
- GstBuffer *buf;
+ guint n_mem, i;
+ gpointer bdata;
+ gsize bsize;
+ GstMemory *mem;
sink = GST_MULTIUDPSINK (bsink);
- g_return_val_if_fail (list != NULL, GST_FLOW_ERROR);
+ msg.msg_iovlen = 0;
+ size = 0;
+
+ n_mem = gst_buffer_n_memory (buffer);
+ if (n_mem == 0)
+ goto no_data;
- it = gst_buffer_list_iterate (list);
- g_return_val_if_fail (it != NULL, GST_FLOW_ERROR);
+ iov = (struct iovec *) g_malloc (n_mem * sizeof (struct iovec));
+ msg.msg_iov = iov;
- while (gst_buffer_list_iterator_next_group (it)) {
- msg.msg_iovlen = 0;
- size = 0;
+ for (i = 0; i < n_mem; i++) {
+ mem = gst_buffer_peek_memory (buffer, i, GST_MAP_READ);
+ bdata = gst_memory_map (mem, &bsize, NULL, GST_MAP_READ);
- if ((gsize = gst_buffer_list_iterator_n_buffers (it)) == 0) {
- goto invalid_list;
+ if (bsize > UDP_MAX_SIZE) {
+ GST_WARNING ("Attempting to send a UDP packet larger than maximum "
+ "size (%d > %d)", bsize, UDP_MAX_SIZE);
}
- iov = (struct iovec *) g_malloc (gsize * sizeof (struct iovec));
- msg.msg_iov = iov;
+ msg.msg_iov[msg.msg_iovlen].iov_len = bsize;
+ msg.msg_iov[msg.msg_iovlen].iov_base = bdata;
+ msg.msg_iovlen++;
- while ((buf = gst_buffer_list_iterator_next (it))) {
- if (GST_BUFFER_SIZE (buf) > UDP_MAX_SIZE) {
- GST_WARNING ("Attempting to send a UDP packet larger than maximum "
- "size (%d > %d)", GST_BUFFER_SIZE (buf), UDP_MAX_SIZE);
- }
+ size += bsize;
+ }
- msg.msg_iov[msg.msg_iovlen].iov_len = GST_BUFFER_SIZE (buf);
- msg.msg_iov[msg.msg_iovlen].iov_base = GST_BUFFER_DATA (buf);
- msg.msg_iovlen++;
- size += GST_BUFFER_SIZE (buf);
- }
+ sink->bytes_to_serve += size;
- sink->bytes_to_serve += size;
+ /* grab lock while iterating and sending to clients, this should be
+ * fast as UDP never blocks */
+ g_mutex_lock (sink->client_lock);
+ GST_LOG_OBJECT (bsink, "about to send %d bytes", size);
- /* grab lock while iterating and sending to clients, this should be
- * fast as UDP never blocks */
- g_mutex_lock (sink->client_lock);
- GST_LOG_OBJECT (bsink, "about to send %d bytes", size);
-
- for (clients = sink->clients; clients; clients = g_list_next (clients)) {
- GstUDPClient *client;
- gint count;
-
- client = (GstUDPClient *) clients->data;
- no_clients++;
- GST_LOG_OBJECT (sink, "sending %d bytes to client %p", size, client);
-
- count = sink->send_duplicates ? client->refcount : 1;
-
- while (count--) {
- while (TRUE) {
- msg.msg_name = (void *) &client->theiraddr;
- msg.msg_namelen = sizeof (client->theiraddr);
- ret = sendmsg (*client->sock, &msg, 0);
-
- if (ret < 0) {
- if (!socket_error_is_ignorable ()) {
- break;
- }
- } else {
- num++;
- client->bytes_sent += ret;
- client->packets_sent++;
- sink->bytes_served += ret;
+ for (clients = sink->clients; clients; clients = g_list_next (clients)) {
+ GstUDPClient *client;
+ gint count;
+
+ client = (GstUDPClient *) clients->data;
+ no_clients++;
+ GST_LOG_OBJECT (sink, "sending %d bytes to client %p", size, client);
+
+ count = sink->send_duplicates ? client->refcount : 1;
+
+ while (count--) {
+ while (TRUE) {
+ msg.msg_name = (void *) &client->theiraddr;
+ msg.msg_namelen = sizeof (client->theiraddr);
+ ret = sendmsg (*client->sock, &msg, 0);
+
+ if (ret < 0) {
+ if (!socket_error_is_ignorable ()) {
+ gchar *errormessage = socket_last_error_message ();
+ GST_WARNING_OBJECT (sink, "client %p gave error %d (%s)", client,
+ socket_last_error_code (), errormessage);
+ g_free (errormessage);
+ break;
break;
}
+ } else {
+ num++;
+ client->bytes_sent += ret;
+ client->packets_sent++;
+ sink->bytes_served += ret;
+ break;
}
}
}
- g_mutex_unlock (sink->client_lock);
+ }
+ g_mutex_unlock (sink->client_lock);
- g_free (iov);
- msg.msg_iov = NULL;
+ /* unmap all memory again */
+ for (i = 0; i < n_mem; i++) {
+ mem = gst_buffer_peek_memory (buffer, i, GST_MAP_READ);
- GST_LOG_OBJECT (sink, "sent %d bytes to %d (of %d) clients", size, num,
- no_clients);
+ bsize = msg.msg_iov[i].iov_len;
+ bdata = msg.msg_iov[i].iov_base;
+
+ gst_memory_unmap (mem, bdata, bsize);
}
+ g_free (iov);
- gst_buffer_list_iterator_free (it);
+ GST_LOG_OBJECT (sink, "sent %d bytes to %d (of %d) clients", size, num,
+ no_clients);
return GST_FLOW_OK;
-invalid_list:
- gst_buffer_list_iterator_free (it);
- return GST_FLOW_ERROR;
+no_data:
+ {
+ return GST_FLOW_OK;
+ }
+}
+#endif
+
+#if 0
+/* DISABLED, core sends buffers to our render one by one, we can't really do
+ * much better */
+static GstFlowReturn
+gst_multiudpsink_render_list (GstBaseSink * bsink, GstBufferList * list)
+{
}
#endif
return FALSE;
#endif
- /* register type of the netbuffer so that we can use it from multiple threads
- * right away. Note that the plugin loading is always serialized */
- gst_netbuffer_get_type ();
+ /* register info of the netaddress metadata so that we can use it from
+ * multiple threads right away. Note that the plugin loading is always
+ * serialized */
+ gst_meta_net_address_get_info ();
if (!gst_element_register (plugin, "udpsink", GST_RANK_NONE,
GST_TYPE_UDPSINK))
/* FILL ME */
};
-static void gst_udpsink_base_init (gpointer g_class);
-static void gst_udpsink_class_init (GstUDPSink * klass);
-static void gst_udpsink_init (GstUDPSink * udpsink);
static void gst_udpsink_finalize (GstUDPSink * udpsink);
static void gst_udpsink_uri_handler_init (gpointer g_iface,
static void gst_udpsink_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
-static GstElementClass *parent_class = NULL;
-
/*static guint gst_udpsink_signals[LAST_SIGNAL] = { 0 }; */
-
-GType
-gst_udpsink_get_type (void)
-{
- static GType udpsink_type = 0;
-
- if (!udpsink_type) {
- static const GTypeInfo udpsink_info = {
- sizeof (GstUDPSinkClass),
- gst_udpsink_base_init,
- NULL,
- (GClassInitFunc) gst_udpsink_class_init,
- NULL,
- NULL,
- sizeof (GstUDPSink),
- 0,
- (GInstanceInitFunc) gst_udpsink_init,
- NULL
- };
- static const GInterfaceInfo urihandler_info = {
- gst_udpsink_uri_handler_init,
- NULL,
- NULL
- };
-
- udpsink_type =
- g_type_register_static (GST_TYPE_MULTIUDPSINK, "GstUDPSink",
- &udpsink_info, 0);
-
- g_type_add_interface_static (udpsink_type, GST_TYPE_URI_HANDLER,
- &urihandler_info);
-
- }
- return udpsink_type;
-}
-
-static void
-gst_udpsink_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_set_details_simple (element_class, "UDP packet sender",
- "Sink/Network",
- "Send data over the network via UDP", "Wim Taymans <wim@fluendo.com>");
-}
+#define gst_udpsink_parent_class parent_class
+G_DEFINE_TYPE_WITH_CODE (GstUDPSink, gst_udpsink, GST_TYPE_MULTIUDPSINK,
+ G_IMPLEMENT_INTERFACE (GST_TYPE_URI_HANDLER, gst_udpsink_uri_handler_init));
static void
-gst_udpsink_class_init (GstUDPSink * klass)
+gst_udpsink_class_init (GstUDPSinkClass * klass)
{
GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
gobject_class = (GObjectClass *) klass;
-
- parent_class = g_type_class_peek_parent (klass);
+ gstelement_class = (GstElementClass *) klass;
gobject_class->set_property = gst_udpsink_set_property;
gobject_class->get_property = gst_udpsink_get_property;
g_param_spec_int ("port", "port", "The port to send the packets to",
0, 65535, UDP_DEFAULT_PORT,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
-}
+ gst_element_class_set_details_simple (gstelement_class, "UDP packet sender",
+ "Sink/Network",
+ "Send data over the network via UDP", "Wim Taymans <wim@fluendo.com>");
+}
static void
gst_udpsink_init (GstUDPSink * udpsink)
static void gst_udpsrc_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
-static void
-_do_init (GType type)
-{
- static const GInterfaceInfo urihandler_info = {
- gst_udpsrc_uri_handler_init,
- NULL,
- NULL
- };
-
- g_type_add_interface_static (type, GST_TYPE_URI_HANDLER, &urihandler_info);
-
- GST_DEBUG_CATEGORY_INIT (udpsrc_debug, "udpsrc", 0, "UDP src");
-}
-
-GST_BOILERPLATE_FULL (GstUDPSrc, gst_udpsrc, GstPushSrc, GST_TYPE_PUSH_SRC,
- _do_init);
-
-static void
-gst_udpsrc_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&src_template));
-
- gst_element_class_set_details_simple (element_class, "UDP packet receiver",
- "Source/Network",
- "Receive data over the network via UDP",
- "Wim Taymans <wim@fluendo.com>, "
- "Thijs Vermeir <thijs.vermeir@barco.com>");
-}
+#define gst_udpsrc_parent_class parent_class
+G_DEFINE_TYPE_WITH_CODE (GstUDPSrc, gst_udpsrc, GST_TYPE_PUSH_SRC,
+ G_IMPLEMENT_INTERFACE (GST_TYPE_URI_HANDLER, gst_udpsrc_uri_handler_init));
static void
gst_udpsrc_class_init (GstUDPSrcClass * klass)
{
GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
GstBaseSrcClass *gstbasesrc_class;
GstPushSrcClass *gstpushsrc_class;
gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
gstbasesrc_class = (GstBaseSrcClass *) klass;
gstpushsrc_class = (GstPushSrcClass *) klass;
+ GST_DEBUG_CATEGORY_INIT (udpsrc_debug, "udpsrc", 0, "UDP src");
+
gobject_class->set_property = gst_udpsrc_set_property;
gobject_class->get_property = gst_udpsrc_get_property;
gobject_class->finalize = gst_udpsrc_finalize;
g_param_spec_boolean ("reuse", "Reuse", "Enable reuse of the port",
UDP_DEFAULT_REUSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&src_template));
+
+ gst_element_class_set_details_simple (gstelement_class, "UDP packet receiver",
+ "Source/Network",
+ "Receive data over the network via UDP",
+ "Wim Taymans <wim@fluendo.com>, "
+ "Thijs Vermeir <thijs.vermeir@barco.com>");
+
gstbasesrc_class->start = gst_udpsrc_start;
gstbasesrc_class->stop = gst_udpsrc_stop;
gstbasesrc_class->unlock = gst_udpsrc_unlock;
}
static void
-gst_udpsrc_init (GstUDPSrc * udpsrc, GstUDPSrcClass * g_class)
+gst_udpsrc_init (GstUDPSrc * udpsrc)
{
WSA_STARTUP (udpsrc);
gst_udpsrc_create (GstPushSrc * psrc, GstBuffer ** buf)
{
GstUDPSrc *udpsrc;
- GstNetBuffer *outbuf;
+ GstMetaNetAddress *meta;
+ GstBuffer *outbuf;
union gst_sockaddr
{
struct sockaddr sa;
socklen_t slen;
guint8 *pktdata;
gint pktsize;
+ gsize offset;
#ifdef G_OS_UNIX
gint readsize;
#elif defined G_OS_WIN32
pktdata = g_malloc (readsize);
pktsize = readsize;
+ offset = 0;
while (TRUE) {
slen = sizeof (sa);
break;
}
- /* special case buffer so receivers can also track the address */
- outbuf = gst_netbuffer_new ();
- GST_BUFFER_MALLOCDATA (outbuf) = pktdata;
-
/* patch pktdata and len when stripping off the headers */
if (G_UNLIKELY (udpsrc->skip_first_bytes != 0)) {
if (G_UNLIKELY (readsize <= udpsrc->skip_first_bytes))
goto skip_error;
- pktdata += udpsrc->skip_first_bytes;
+ offset += udpsrc->skip_first_bytes;
ret -= udpsrc->skip_first_bytes;
}
- GST_BUFFER_DATA (outbuf) = pktdata;
- GST_BUFFER_SIZE (outbuf) = ret;
+
+ outbuf = gst_buffer_new ();
+ gst_buffer_take_memory (outbuf,
+ gst_memory_new_wrapped (0, pktdata, g_free, pktsize, offset, ret));
+
+ /* use buffer metadata so receivers can also track the address */
+ meta = gst_buffer_add_meta_net_address (outbuf);
switch (sa.sa.sa_family) {
case AF_INET:
{
- gst_netaddress_set_ip4_address (&outbuf->from, sa.sa_in.sin_addr.s_addr,
+ gst_netaddress_set_ip4_address (&meta->naddr, sa.sa_in.sin_addr.s_addr,
sa.sa_in.sin_port);
}
break;
guint8 ip6[16];
memcpy (ip6, &sa.sa_in6.sin6_addr, sizeof (ip6));
- gst_netaddress_set_ip6_address (&outbuf->from, ip6, sa.sa_in6.sin6_port);
+ gst_netaddress_set_ip6_address (&meta->naddr, ip6, sa.sa_in6.sin6_port);
}
break;
default:
static void gst_gamma_calculate_tables (GstGamma * gamma);
-GST_BOILERPLATE (GstGamma, gst_gamma, GstVideoFilter, GST_TYPE_VIDEO_FILTER);
-
-static void
-gst_gamma_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_set_details_simple (element_class, "Video gamma correction",
- "Filter/Effect/Video",
- "Adjusts gamma on a video stream",
- "Arwed v. Merkatz <v.merkatz@gmx.net>");
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_gamma_sink_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_gamma_src_template));
-}
+G_DEFINE_TYPE (GstGamma, gst_gamma, GST_TYPE_VIDEO_FILTER);
static void
gst_gamma_class_init (GstGammaClass * g_class)
{
GObjectClass *gobject_class = (GObjectClass *) g_class;
+ GstElementClass *gstelement_class = (GstElementClass *) g_class;
GstBaseTransformClass *trans_class = (GstBaseTransformClass *) g_class;
GST_DEBUG_CATEGORY_INIT (gamma_debug, "gamma", 0, "gamma");
0.01, 10, DEFAULT_PROP_GAMMA,
GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS | G_PARAM_READWRITE));
+ gst_element_class_set_details_simple (gstelement_class,
+ "Video gamma correction", "Filter/Effect/Video",
+ "Adjusts gamma on a video stream", "Arwed v. Merkatz <v.merkatz@gmx.net");
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_gamma_sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_gamma_src_template));
+
trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_gamma_set_caps);
trans_class->transform_ip = GST_DEBUG_FUNCPTR (gst_gamma_transform_ip);
trans_class->before_transform =
}
static void
-gst_gamma_init (GstGamma * gamma, GstGammaClass * g_class)
+gst_gamma_init (GstGamma * gamma)
{
/* properties */
gamma->gamma = DEFAULT_PROP_GAMMA;
{
GstGamma *gamma = GST_GAMMA (base);
guint8 *data;
- guint size;
+ gsize size;
if (!gamma->process)
goto not_negotiated;
if (base->passthrough)
goto done;
- data = GST_BUFFER_DATA (outbuf);
- size = GST_BUFFER_SIZE (outbuf);
+ data = gst_buffer_map (outbuf, &size, NULL, GST_MAP_READWRITE);
if (size != gamma->size)
goto wrong_size;
gamma->process (gamma, data);
GST_OBJECT_UNLOCK (gamma);
+ gst_buffer_unmap (outbuf, data, size);
+
done:
return GST_FLOW_OK;
{
GST_ELEMENT_ERROR (gamma, STREAM, FORMAT,
(NULL), ("Invalid buffer size %d, expected %d", size, gamma->size));
+ gst_buffer_unmap (outbuf, data, size);
return GST_FLOW_ERROR;
}
not_negotiated:
static void gst_video_balance_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
-static void
-_do_init (GType video_balance_type)
-{
- static const GInterfaceInfo iface_info = {
- (GInterfaceInitFunc) gst_video_balance_interface_init,
- NULL,
- NULL,
- };
- static const GInterfaceInfo colorbalance_info = {
- (GInterfaceInitFunc) gst_video_balance_colorbalance_init,
- NULL,
- NULL,
- };
-
- g_type_add_interface_static (video_balance_type,
- GST_TYPE_IMPLEMENTS_INTERFACE, &iface_info);
- g_type_add_interface_static (video_balance_type, GST_TYPE_COLOR_BALANCE,
- &colorbalance_info);
-}
-
-GST_BOILERPLATE_FULL (GstVideoBalance, gst_video_balance, GstVideoFilter,
- GST_TYPE_VIDEO_FILTER, _do_init);
+#define gst_video_balance_parent_class parent_class
+G_DEFINE_TYPE_WITH_CODE (GstVideoBalance, gst_video_balance,
+ GST_TYPE_VIDEO_FILTER,
+ G_IMPLEMENT_INTERFACE (GST_TYPE_IMPLEMENTS_INTERFACE,
+ gst_video_balance_interface_init);
+ G_IMPLEMENT_INTERFACE (GST_TYPE_COLOR_BALANCE,
+ gst_video_balance_colorbalance_init));
/*
* look-up tables (LUT).
{
GstVideoBalance *videobalance = GST_VIDEO_BALANCE (base);
guint8 *data;
- guint size;
+ gsize size;
if (!videobalance->process)
goto not_negotiated;
if (base->passthrough)
goto done;
- data = GST_BUFFER_DATA (outbuf);
- size = GST_BUFFER_SIZE (outbuf);
+ data = gst_buffer_map (outbuf, &size, NULL, GST_MAP_READWRITE);
if (size != videobalance->size)
goto wrong_size;
videobalance->process (videobalance, data);
GST_OBJECT_UNLOCK (videobalance);
+ gst_buffer_unmap (outbuf, data, size);
+
done:
return GST_FLOW_OK;
GST_ELEMENT_ERROR (videobalance, STREAM, FORMAT,
(NULL), ("Invalid buffer size %d, expected %d", size,
videobalance->size));
+ gst_buffer_unmap (outbuf, data, size);
return GST_FLOW_ERROR;
}
not_negotiated:
}
static void
-gst_video_balance_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_set_details_simple (element_class, "Video balance",
- "Filter/Effect/Video",
- "Adjusts brightness, contrast, hue, saturation on a video stream",
- "David Schleef <ds@schleef.org>");
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_video_balance_sink_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_video_balance_src_template));
-}
-
-static void
gst_video_balance_finalize (GObject * object)
{
GList *channels = NULL;
gst_video_balance_class_init (GstVideoBalanceClass * klass)
{
GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass;
GST_DEBUG_CATEGORY_INIT (videobalance_debug, "videobalance", 0,
DEFAULT_PROP_SATURATION,
GST_PARAM_CONTROLLABLE | G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ gst_element_class_set_details_simple (gstelement_class, "Video balance",
+ "Filter/Effect/Video",
+ "Adjusts brightness, contrast, hue, saturation on a video stream",
+ "David Schleef <ds@schleef.org>");
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_video_balance_sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_video_balance_src_template));
+
trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_video_balance_set_caps);
trans_class->transform_ip =
GST_DEBUG_FUNCPTR (gst_video_balance_transform_ip);
}
static void
-gst_video_balance_init (GstVideoBalance * videobalance,
- GstVideoBalanceClass * klass)
+gst_video_balance_init (GstVideoBalance * videobalance)
{
const gchar *channels[4] = { "HUE", "SATURATION",
"BRIGHTNESS", "CONTRAST"
return video_flip_method_type;
}
-GST_BOILERPLATE (GstVideoFlip, gst_video_flip, GstVideoFilter,
- GST_TYPE_VIDEO_FILTER);
+#define gst_video_flip_parent_class parent_class
+G_DEFINE_TYPE (GstVideoFlip, gst_video_flip, GST_TYPE_VIDEO_FILTER);
static GstCaps *
gst_video_flip_transform_caps (GstBaseTransform * trans,
static gboolean
gst_video_flip_get_unit_size (GstBaseTransform * btrans, GstCaps * caps,
- guint * size)
+ gsize * size)
{
GstVideoFormat format;
gint width, height;
{
GstVideoFlip *videoflip = GST_VIDEO_FLIP (trans);
guint8 *dest;
- const guint8 *src;
+ guint8 *src;
+ gsize srcsize, destsize;
if (G_UNLIKELY (videoflip->process == NULL))
goto not_negotiated;
- src = GST_BUFFER_DATA (in);
- dest = GST_BUFFER_DATA (out);
+ src = gst_buffer_map (in, &srcsize, NULL, GST_MAP_READ);
+ dest = gst_buffer_map (out, &destsize, NULL, GST_MAP_WRITE);
GST_LOG_OBJECT (videoflip, "videoflip: flipping %dx%d to %dx%d (%s)",
videoflip->from_width, videoflip->from_height, videoflip->to_width,
videoflip->process (videoflip, dest, src);
GST_OBJECT_UNLOCK (videoflip);
+ gst_buffer_unmap (in, src, srcsize);
+ gst_buffer_unmap (out, dest, destsize);
+
return GST_FLOW_OK;
not_negotiated:
}
static void
-gst_video_flip_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_set_details_simple (element_class, "Video flipper",
- "Filter/Effect/Video",
- "Flips and rotates video", "David Schleef <ds@schleef.org>");
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_video_flip_sink_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_video_flip_src_template));
-}
-
-static void
gst_video_flip_class_init (GstVideoFlipClass * klass)
{
GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass;
GST_DEBUG_CATEGORY_INIT (video_flip_debug, "videoflip", 0, "videoflip");
GST_TYPE_VIDEO_FLIP_METHOD, PROP_METHOD_DEFAULT,
GST_PARAM_CONTROLLABLE | G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ gst_element_class_set_details_simple (gstelement_class, "Video flipper",
+ "Filter/Effect/Video",
+ "Flips and rotates video", "David Schleef <ds@schleef.org>");
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_video_flip_sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_video_flip_src_template));
+
trans_class->transform_caps =
GST_DEBUG_FUNCPTR (gst_video_flip_transform_caps);
trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_video_flip_set_caps);
}
static void
-gst_video_flip_init (GstVideoFlip * videoflip, GstVideoFlipClass * klass)
+gst_video_flip_init (GstVideoFlip * videoflip)
{
videoflip->method = PROP_METHOD_DEFAULT;
gst_base_transform_set_passthrough (GST_BASE_TRANSFORM (videoflip), TRUE);
/* Do something only on a change and if not locked */
if (!GST_COLLECT_PADS2_STATE_IS_SET (data, GST_COLLECT_PADS2_STATE_LOCKED) &&
(GST_COLLECT_PADS2_STATE_IS_SET (data, GST_COLLECT_PADS2_STATE_WAITING) !=
- ! !waiting)) {
+ !!waiting)) {
/* Set waiting state for this pad */
if (waiting)
GST_COLLECT_PADS2_STATE_SET (data, GST_COLLECT_PADS2_STATE_WAITING);
}
ret = gst_oss4_mixer_set_control_val (s->mixer, s->mc, volume);
} else {
- ret = gst_oss4_mixer_set_control_val (s->mixer, s->mc->mute, ! !mute);
+ ret = gst_oss4_mixer_set_control_val (s->mixer, s->mc->mute, !!mute);
}
if (mute) {
if (s->mc->mute != NULL && s->mc->mute->changed) {
gst_mixer_mute_toggled (GST_MIXER (s->mixer), track,
- ! !s->mc->mute->last_val);
+ !!s->mc->mute->last_val);
} else {
/* nothing to do here, since we don't/can't easily implement the record
* flag */
GST_DEBUG_CATEGORY_EXTERN (v4l2_debug);
#define GST_CAT_DEFAULT v4l2_debug
-
/*
* GstV4l2Buffer:
*/
-
-static GstBufferClass *v4l2buffer_parent_class = NULL;
+const GstMetaInfo *
+gst_meta_v4l2_get_info (void)
+{
+ static const GstMetaInfo *meta_info = NULL;
+
+ if (meta_info == NULL) {
+ meta_info =
+ gst_meta_register ("GstMetaV4l2", "GstMetaV4l2",
+ sizeof (GstMetaV4l2), (GstMetaInitFunction) NULL,
+ (GstMetaFreeFunction) NULL, (GstMetaTransformFunction) NULL,
+ (GstMetaSerializeFunction) NULL, (GstMetaDeserializeFunction) NULL);
+ }
+ return meta_info;
+}
static void
-gst_v4l2_buffer_finalize (GstV4l2Buffer * buffer)
+gst_v4l2_buffer_dispose (GstBuffer * buffer)
{
GstV4l2BufferPool *pool;
gboolean resuscitated = FALSE;
gint index;
+ GstMetaV4l2 *meta;
- pool = buffer->pool;
+ meta = GST_META_V4L2_GET (buffer);
+ g_assert (meta != NULL);
- index = buffer->vbuffer.index;
+ pool = meta->pool;
+ index = meta->vbuffer.index;
GST_LOG_OBJECT (pool->v4l2elem, "finalizing buffer %p %d", buffer, index);
if (resuscitated) {
/* FIXME: check that the caps didn't change */
GST_LOG_OBJECT (pool->v4l2elem, "reviving buffer %p, %d", buffer, index);
- gst_buffer_ref (GST_BUFFER (buffer));
+ gst_buffer_ref (buffer);
GST_BUFFER_SIZE (buffer) = 0;
pool->buffers[index] = buffer;
}
if (!resuscitated) {
GST_LOG_OBJECT (pool->v4l2elem,
"buffer %p (data %p, len %u) not recovered, unmapping",
- buffer, GST_BUFFER_DATA (buffer), buffer->vbuffer.length);
- gst_mini_object_unref (GST_MINI_OBJECT (pool));
- v4l2_munmap ((void *) GST_BUFFER_DATA (buffer), buffer->vbuffer.length);
+ buffer, GST_BUFFER_DATA (buffer), meta->vbuffer.length);
+ v4l2_munmap ((void *) GST_BUFFER_DATA (buffer), meta->vbuffer.length);
- GST_MINI_OBJECT_CLASS (v4l2buffer_parent_class)->finalize (GST_MINI_OBJECT
- (buffer));
+ g_object_unref (pool);
}
}
-static void
-gst_v4l2_buffer_class_init (gpointer g_class, gpointer class_data)
-{
- GstMiniObjectClass *mini_object_class = GST_MINI_OBJECT_CLASS (g_class);
-
- v4l2buffer_parent_class = g_type_class_peek_parent (g_class);
-
- mini_object_class->finalize = (GstMiniObjectFinalizeFunction)
- gst_v4l2_buffer_finalize;
-}
-
-GType
-gst_v4l2_buffer_get_type (void)
-{
- static GType _gst_v4l2_buffer_type;
-
- if (G_UNLIKELY (_gst_v4l2_buffer_type == 0)) {
- static const GTypeInfo v4l2_buffer_info = {
- sizeof (GstBufferClass),
- NULL,
- NULL,
- gst_v4l2_buffer_class_init,
- NULL,
- NULL,
- sizeof (GstV4l2Buffer),
- 0,
- NULL,
- NULL
- };
- _gst_v4l2_buffer_type = g_type_register_static (GST_TYPE_BUFFER,
- "GstV4l2Buffer", &v4l2_buffer_info, 0);
- }
- return _gst_v4l2_buffer_type;
-}
-
-static GstV4l2Buffer *
+static GstBuffer *
gst_v4l2_buffer_new (GstV4l2BufferPool * pool, guint index, GstCaps * caps)
{
- GstV4l2Buffer *ret;
- guint8 *data;
+ GstBuffer *ret;
+ guint8 *mem;
+ GstMetaV4l2 *meta;
+
+ ret = gst_buffer_new ();
+ GST_MINI_OBJECT_CAST (ret)->dispose =
+ (GstMiniObjectDisposeFunction) gst_v4l2_buffer_dispose;
- ret = (GstV4l2Buffer *) gst_mini_object_new (GST_TYPE_V4L2_BUFFER);
+ meta = GST_META_V4L2_ADD (ret);
GST_LOG_OBJECT (pool->v4l2elem, "creating buffer %u, %p in pool %p", index,
ret, pool);
- ret->pool =
- (GstV4l2BufferPool *) gst_mini_object_ref (GST_MINI_OBJECT (pool));
+ meta->pool = (GstV4l2BufferPool *) g_object_ref (pool);
- ret->vbuffer.index = index;
- ret->vbuffer.type = pool->type;
- ret->vbuffer.memory = V4L2_MEMORY_MMAP;
+ meta->vbuffer.index = index;
+ meta->vbuffer.type = pool->type;
+ meta->vbuffer.memory = V4L2_MEMORY_MMAP;
- if (v4l2_ioctl (pool->video_fd, VIDIOC_QUERYBUF, &ret->vbuffer) < 0)
+ if (v4l2_ioctl (pool->video_fd, VIDIOC_QUERYBUF, &meta->vbuffer) < 0)
goto querybuf_failed;
- GST_LOG_OBJECT (pool->v4l2elem, " index: %u", ret->vbuffer.index);
- GST_LOG_OBJECT (pool->v4l2elem, " type: %d", ret->vbuffer.type);
- GST_LOG_OBJECT (pool->v4l2elem, " bytesused: %u", ret->vbuffer.bytesused);
- GST_LOG_OBJECT (pool->v4l2elem, " flags: %08x", ret->vbuffer.flags);
- GST_LOG_OBJECT (pool->v4l2elem, " field: %d", ret->vbuffer.field);
- GST_LOG_OBJECT (pool->v4l2elem, " memory: %d", ret->vbuffer.memory);
- if (ret->vbuffer.memory == V4L2_MEMORY_MMAP)
+ GST_LOG_OBJECT (pool->v4l2elem, " index: %u", meta->vbuffer.index);
+ GST_LOG_OBJECT (pool->v4l2elem, " type: %d", meta->vbuffer.type);
+ GST_LOG_OBJECT (pool->v4l2elem, " bytesused: %u", meta->vbuffer.bytesused);
+ GST_LOG_OBJECT (pool->v4l2elem, " flags: %08x", meta->vbuffer.flags);
+ GST_LOG_OBJECT (pool->v4l2elem, " field: %d", meta->vbuffer.field);
+ GST_LOG_OBJECT (pool->v4l2elem, " memory: %d", meta->vbuffer.memory);
+ if (meta->vbuffer.memory == V4L2_MEMORY_MMAP)
GST_LOG_OBJECT (pool->v4l2elem, " MMAP offset: %u",
- ret->vbuffer.m.offset);
- GST_LOG_OBJECT (pool->v4l2elem, " length: %u", ret->vbuffer.length);
- GST_LOG_OBJECT (pool->v4l2elem, " input: %u", ret->vbuffer.input);
+ meta->vbuffer.m.offset);
+ GST_LOG_OBJECT (pool->v4l2elem, " length: %u", meta->vbuffer.length);
+ GST_LOG_OBJECT (pool->v4l2elem, " input: %u", meta->vbuffer.input);
- data = (guint8 *) v4l2_mmap (0, ret->vbuffer.length,
+ mem = (guint8 *) v4l2_mmap (0, meta->vbuffer.length,
PROT_READ | PROT_WRITE, MAP_SHARED, pool->video_fd,
- ret->vbuffer.m.offset);
+ meta->vbuffer.m.offset);
- if (data == MAP_FAILED)
+ if (mem == MAP_FAILED)
goto mmap_failed;
- GST_BUFFER_DATA (ret) = data;
- GST_BUFFER_SIZE (ret) = ret->vbuffer.length;
+ GST_BUFFER_DATA (ret) = mem;
+ GST_BUFFER_SIZE (ret) = meta->vbuffer.length;
GST_BUFFER_FLAG_SET (ret, GST_BUFFER_FLAG_READONLY);
- gst_buffer_set_caps (GST_BUFFER (ret), caps);
+ gst_buffer_set_caps (ret, caps);
return ret;
gint errnosave = errno;
GST_WARNING ("Failed QUERYBUF: %s", g_strerror (errnosave));
- gst_buffer_unref (GST_BUFFER (ret));
+ gst_buffer_unref (ret);
errno = errnosave;
return NULL;
}
gint errnosave = errno;
GST_WARNING ("Failed to mmap: %s", g_strerror (errnosave));
- gst_buffer_unref (GST_BUFFER (ret));
+ gst_buffer_unref (ret);
errno = errnosave;
return NULL;
}
* GstV4l2BufferPool:
*/
-static GstMiniObjectClass *buffer_pool_parent_class = NULL;
+static GObjectClass *buffer_pool_parent_class = NULL;
static void
-gst_v4l2_buffer_pool_finalize (GstV4l2BufferPool * pool)
+gst_v4l2_buffer_pool_finalize (GObject * object)
{
+ GstV4l2BufferPool *pool = GST_V4L2_BUFFER_POOL (object);
+
g_mutex_free (pool->lock);
pool->lock = NULL;
pool->buffers = NULL;
}
- GST_MINI_OBJECT_CLASS (buffer_pool_parent_class)->finalize (GST_MINI_OBJECT
- (pool));
+ buffer_pool_parent_class->finalize (object);
}
static void
static void
gst_v4l2_buffer_pool_class_init (gpointer g_class, gpointer class_data)
{
- GstMiniObjectClass *mini_object_class = GST_MINI_OBJECT_CLASS (g_class);
+ GObjectClass *object_class = G_OBJECT_CLASS (g_class);
buffer_pool_parent_class = g_type_class_peek_parent (g_class);
- mini_object_class->finalize = (GstMiniObjectFinalizeFunction)
- gst_v4l2_buffer_pool_finalize;
+ object_class->finalize = gst_v4l2_buffer_pool_finalize;
}
GType
if (G_UNLIKELY (_gst_v4l2_buffer_pool_type == 0)) {
static const GTypeInfo v4l2_buffer_pool_info = {
- sizeof (GstMiniObjectClass),
+ sizeof (GObjectClass),
NULL,
NULL,
gst_v4l2_buffer_pool_class_init,
(GInstanceInitFunc) gst_v4l2_buffer_pool_init,
NULL
};
- _gst_v4l2_buffer_pool_type = g_type_register_static (GST_TYPE_MINI_OBJECT,
+ _gst_v4l2_buffer_pool_type = g_type_register_static (G_TYPE_OBJECT,
"GstV4l2BufferPool", &v4l2_buffer_pool_info, 0);
}
return _gst_v4l2_buffer_pool_type;
gint n;
struct v4l2_requestbuffers breq;
- pool = (GstV4l2BufferPool *) gst_mini_object_new (GST_TYPE_V4L2_BUFFER_POOL);
+ pool = (GstV4l2BufferPool *) g_object_new (GST_TYPE_V4L2_BUFFER_POOL, NULL);
pool->video_fd = v4l2_dup (fd);
if (pool->video_fd < 0)
pool->requeuebuf = requeuebuf;
pool->type = type;
pool->buffer_count = num_buffers;
- pool->buffers = g_new0 (GstV4l2Buffer *, num_buffers);
+ pool->buffers = g_new0 (GstBuffer *, num_buffers);
pool->avail_buffers = g_async_queue_new ();
/* now, map the buffers: */
{
gint errnosave = errno;
- gst_mini_object_unref (GST_MINI_OBJECT (pool));
+ g_object_unref (pool);
errno = errnosave;
gst_buffer_unref (buf);
}
- gst_mini_object_unref (GST_MINI_OBJECT (pool));
+ g_object_unref (pool);
}
/**
*
* Get an available buffer in the pool
*/
-GstV4l2Buffer *
+GstBuffer *
gst_v4l2_buffer_pool_get (GstV4l2BufferPool * pool, gboolean blocking)
{
- GstV4l2Buffer *buf;
+ GstBuffer *buf;
if (blocking) {
buf = g_async_queue_pop (pool->avail_buffers);
}
if (buf) {
+ GstMetaV4l2 *meta = GST_META_V4L2_GET (buf);
+
GST_V4L2_BUFFER_POOL_LOCK (pool);
- GST_BUFFER_SIZE (buf) = buf->vbuffer.length;
+ GST_BUFFER_SIZE (buf) = meta->vbuffer.length;
GST_BUFFER_FLAG_UNSET (buf, 0xffffffff);
GST_V4L2_BUFFER_POOL_UNLOCK (pool);
}
* Returns: %TRUE for success
*/
gboolean
-gst_v4l2_buffer_pool_qbuf (GstV4l2BufferPool * pool, GstV4l2Buffer * buf)
+gst_v4l2_buffer_pool_qbuf (GstV4l2BufferPool * pool, GstBuffer * buf)
{
- GST_LOG_OBJECT (pool->v4l2elem, "enqueue pool buffer %d", buf->vbuffer.index);
+ GstMetaV4l2 *meta;
- if (v4l2_ioctl (pool->video_fd, VIDIOC_QBUF, &buf->vbuffer) < 0)
+ meta = GST_META_V4L2_GET (buf);
+
+ GST_LOG_OBJECT (pool->v4l2elem, "enqueue pool buffer %d",
+ meta->vbuffer.index);
+
+ if (v4l2_ioctl (pool->video_fd, VIDIOC_QBUF, &meta->vbuffer) < 0)
return FALSE;
pool->num_live_buffers--;
*
* Returns: a buffer
*/
-GstV4l2Buffer *
+GstBuffer *
gst_v4l2_buffer_pool_dqbuf (GstV4l2BufferPool * pool)
{
GstV4l2Object *v4l2object = get_v4l2_object (pool->v4l2elem);
- GstV4l2Buffer *pool_buffer;
+ GstBuffer *pool_buffer;
struct v4l2_buffer buffer;
memset (&buffer, 0x00, sizeof (buffer));
buffer.type = pool->type;
buffer.memory = V4L2_MEMORY_MMAP;
-
if (v4l2_ioctl (pool->video_fd, VIDIOC_DQBUF, &buffer) >= 0) {
GST_V4L2_BUFFER_POOL_LOCK (pool);
typedef struct _GstV4l2BufferPool GstV4l2BufferPool;
-typedef struct _GstV4l2Buffer GstV4l2Buffer;
+typedef struct _GstMetaV4l2 GstMetaV4l2;
struct _GstV4l2BufferPool
{
- GstMiniObject parent;
+ GObject parent;
GstElement *v4l2elem; /* the v4l2 src/sink that owns us.. maybe we should be owned by v4l2object? */
gboolean requeuebuf; /* if true, unusued buffers are automatically re-QBUF'd */
GAsyncQueue* avail_buffers;/* pool of available buffers, not with the driver and which aren't held outside the bufferpool */
gint video_fd; /* a dup(2) of the v4l2object's video_fd */
guint buffer_count;
- GstV4l2Buffer **buffers;
+ GstBuffer **buffers;
};
-struct _GstV4l2Buffer {
- GstBuffer buffer;
+struct _GstMetaV4l2 {
+ GstMeta meta;
struct v4l2_buffer vbuffer;
GstV4l2BufferPool *pool;
};
+const GstMetaInfo * gst_meta_v4l2_get_info (void);
+#define GST_META_V4L2_GET(buf) ((GstMetaV4l2 *)gst_buffer_get_meta(buf,gst_meta_v4l2_get_info()))
+#define GST_META_V4L2_ADD(buf) ((GstMetaV4l2 *)gst_buffer_add_meta(buf,gst_meta_v4l2_get_info(),NULL))
+
void gst_v4l2_buffer_pool_destroy (GstV4l2BufferPool * pool);
GstV4l2BufferPool *gst_v4l2_buffer_pool_new (GstElement *v4l2elem, gint fd, gint num_buffers, GstCaps * caps, gboolean requeuebuf, enum v4l2_buf_type type);
-GstV4l2Buffer *gst_v4l2_buffer_pool_get (GstV4l2BufferPool *pool, gboolean blocking);
-gboolean gst_v4l2_buffer_pool_qbuf (GstV4l2BufferPool *pool, GstV4l2Buffer *buf);
-GstV4l2Buffer *gst_v4l2_buffer_pool_dqbuf (GstV4l2BufferPool *pool);
+GstBuffer *gst_v4l2_buffer_pool_get (GstV4l2BufferPool *pool, gboolean blocking);
+gboolean gst_v4l2_buffer_pool_qbuf (GstV4l2BufferPool *pool, GstBuffer *buf);
+GstBuffer *gst_v4l2_buffer_pool_dqbuf (GstV4l2BufferPool *pool);
gint gst_v4l2_buffer_pool_available_buffers (GstV4l2BufferPool *pool);
V4L2_STD_OBJECT_PROPS,
};
+G_LOCK_DEFINE_STATIC (probe_lock);
+
const GList *
gst_v4l2_probe_get_properties (GstPropertyProbe * probe)
{
GObjectClass *klass = G_OBJECT_GET_CLASS (probe);
static GList *list = NULL;
- /* well, not perfect, but better than no locking at all.
- * In the worst case we leak a list node, so who cares? */
- GST_CLASS_LOCK (GST_OBJECT_CLASS (klass));
+ G_LOCK (probe_lock);
if (!list) {
list = g_list_append (NULL, g_object_class_find_property (klass, "device"));
}
- GST_CLASS_UNLOCK (GST_OBJECT_CLASS (klass));
+ G_UNLOCK (probe_lock);
return list;
}
g_assert (iface_type == GST_TYPE_X_OVERLAY ||
iface_type == GST_TYPE_NAVIGATION ||
iface_type == GST_TYPE_COLOR_BALANCE ||
- iface_type == GST_TYPE_VIDEO_ORIENTATION ||
- iface_type == GST_TYPE_TUNER);
+ iface_type == GST_TYPE_VIDEO_ORIENTATION || iface_type == GST_TYPE_TUNER);
#else
g_assert (iface_type == GST_TYPE_COLOR_BALANCE ||
- iface_type == GST_TYPE_VIDEO_ORIENTATION ||
- iface_type == GST_TYPE_TUNER);
+ iface_type == GST_TYPE_VIDEO_ORIENTATION || iface_type == GST_TYPE_TUNER);
#endif
if (v4l2object->video_fd == -1)
gst_v4l2src_buffer_pool_activate (GstV4l2BufferPool * pool,
GstV4l2Src * v4l2src)
{
- GstV4l2Buffer *buf;
+ GstBuffer *buf;
while ((buf = gst_v4l2_buffer_pool_get (pool, FALSE)) != NULL)
if (!gst_v4l2_buffer_pool_qbuf (pool, buf))
(_("Could not enqueue buffers in device '%s'."),
v4l2src->v4l2object->videodev),
("enqueing buffer %d/%d failed: %s",
- buf->vbuffer.index, v4l2src->num_buffers, g_strerror (errno)));
+ GST_META_V4L2_GET (buf)->vbuffer.index, v4l2src->num_buffers,
+ g_strerror (errno)));
return FALSE;
}
}
/* Called when a buffer is returned from the pipeline */
static void
-gst_ximage_src_return_buf (GstXImageSrc * ximagesrc,
- GstXImageSrcBuffer * ximage)
+gst_ximage_src_return_buf (GstXImageSrc * ximagesrc, GstBuffer * ximage)
{
+ GstMetaXImage *meta = GST_META_XIMAGE_GET (ximage);
+
/* If our geometry changed we can't reuse that image. */
- if ((ximage->width != ximagesrc->width) ||
- (ximage->height != ximagesrc->height)) {
+ if ((meta->width != ximagesrc->width) || (meta->height != ximagesrc->height)) {
GST_DEBUG_OBJECT (ximagesrc,
"destroy image %p as its size changed %dx%d vs current %dx%d",
- ximage, ximage->width, ximage->height,
- ximagesrc->width, ximagesrc->height);
+ ximage, meta->width, meta->height, ximagesrc->width, ximagesrc->height);
g_mutex_lock (ximagesrc->x_lock);
gst_ximageutil_ximage_destroy (ximagesrc->xcontext, ximage);
g_mutex_unlock (ximagesrc->x_lock);
/* In that case we can reuse the image and add it to our image pool. */
GST_LOG_OBJECT (ximagesrc, "recycling image %p in pool", ximage);
/* need to increment the refcount again to recycle */
- gst_buffer_ref (GST_BUFFER (ximage));
+ gst_buffer_ref (ximage);
g_mutex_lock (ximagesrc->pool_lock);
ximagesrc->buffer_pool = g_slist_prepend (ximagesrc->buffer_pool, ximage);
g_mutex_unlock (ximagesrc->pool_lock);
/* Retrieve an XImageSrcBuffer, preferably from our
* pool of existing images and populate it from the window */
-static GstXImageSrcBuffer *
+static GstBuffer *
gst_ximage_src_ximage_get (GstXImageSrc * ximagesrc)
{
- GstXImageSrcBuffer *ximage = NULL;
+ GstBuffer *ximage = NULL;
+ GstMetaXImage *meta;
g_mutex_lock (ximagesrc->pool_lock);
while (ximagesrc->buffer_pool != NULL) {
ximage = ximagesrc->buffer_pool->data;
- if ((ximage->width != ximagesrc->width) ||
- (ximage->height != ximagesrc->height)) {
+ meta = GST_META_XIMAGE_GET (ximage);
+
+ if ((meta->width != ximagesrc->width) ||
+ (meta->height != ximagesrc->height)) {
gst_ximage_buffer_free (ximage);
}
gst_value_get_fraction_numerator (xcontext->par),
gst_value_get_fraction_denominator (xcontext->par), NULL);
- gst_buffer_set_caps (GST_BUFFER (ximage), caps);
+ gst_buffer_set_caps (ximage, caps);
g_mutex_unlock (ximagesrc->x_lock);
gst_caps_unref (caps);
}
g_return_val_if_fail (GST_IS_XIMAGE_SRC (ximagesrc), NULL);
+
+ meta = GST_META_XIMAGE_GET (ximage);
+
#ifdef HAVE_XDAMAGE
if (ximagesrc->have_xdamage && ximagesrc->use_damage &&
ximagesrc->last_ximage != NULL) {
startx, starty, width, height);
XGetSubImage (ximagesrc->xcontext->disp, ximagesrc->xwindow,
startx, starty, width, height, AllPlanes, ZPixmap,
- ximage->ximage, startx - ximagesrc->startx,
+ meta->ximage, startx - ximagesrc->startx,
starty - ximagesrc->starty);
}
} else {
XGetSubImage (ximagesrc->xcontext->disp, ximagesrc->xwindow,
rects[i].x, rects[i].y,
rects[i].width, rects[i].height,
- AllPlanes, ZPixmap, ximage->ximage, rects[i].x, rects[i].y);
+ AllPlanes, ZPixmap, meta->ximage, rects[i].x, rects[i].y);
}
}
free (rects);
GST_DEBUG_OBJECT (ximagesrc, "Removing cursor from %d,%d", x, y);
XGetSubImage (ximagesrc->xcontext->disp, ximagesrc->xwindow,
startx, starty, iwidth, iheight, AllPlanes, ZPixmap,
- ximage->ximage, startx - ximagesrc->startx,
+ meta->ximage, startx - ximagesrc->startx,
starty - ximagesrc->starty);
}
} else {
GST_DEBUG_OBJECT (ximagesrc, "Removing cursor from %d,%d", x, y);
XGetSubImage (ximagesrc->xcontext->disp, ximagesrc->xwindow,
- x, y, width, height, AllPlanes, ZPixmap, ximage->ximage, x, y);
+ x, y, width, height, AllPlanes, ZPixmap, meta->ximage, x, y);
}
}
#endif
if (ximagesrc->xcontext->use_xshm) {
GST_DEBUG_OBJECT (ximagesrc, "Retrieving screen using XShm");
XShmGetImage (ximagesrc->xcontext->disp, ximagesrc->xwindow,
- ximage->ximage, ximagesrc->startx, ximagesrc->starty, AllPlanes);
+ meta->ximage, ximagesrc->startx, ximagesrc->starty, AllPlanes);
} else
#endif /* HAVE_XSHM */
if (ximagesrc->remote) {
XGetSubImage (ximagesrc->xcontext->disp, ximagesrc->xwindow,
ximagesrc->startx, ximagesrc->starty, ximagesrc->width,
- ximagesrc->height, AllPlanes, ZPixmap, ximage->ximage, 0, 0);
+ ximagesrc->height, AllPlanes, ZPixmap, meta->ximage, 0, 0);
} else {
- ximage->ximage =
+ meta->ximage =
XGetImage (ximagesrc->xcontext->disp, ximagesrc->xwindow,
ximagesrc->startx, ximagesrc->starty, ximagesrc->width,
ximagesrc->height, AllPlanes, ZPixmap);
(guint8 *) & (ximagesrc->cursor_image->pixels[((j -
cy) * ximagesrc->cursor_image->width + (i - cx))]);
dest =
- (guint8 *) & (ximage->ximage->data[((j -
+ (guint8 *) & (meta->ximage->data[((j -
ximagesrc->starty) * ximagesrc->width + (i -
ximagesrc->startx)) * (ximagesrc->xcontext->bpp /
8)]);
gst_ximage_src_create (GstPushSrc * bs, GstBuffer ** buf)
{
GstXImageSrc *s = GST_XIMAGE_SRC (bs);
- GstXImageSrcBuffer *image;
+ GstBuffer *image;
GstClockTime base_time;
GstClockTime next_capture_ts;
GstClockTime dur;
{
g_mutex_lock (ximagesrc->pool_lock);
while (ximagesrc->buffer_pool != NULL) {
- GstXImageSrcBuffer *ximage = ximagesrc->buffer_pool->data;
+ GstBuffer *ximage = ximagesrc->buffer_pool->data;
gst_ximage_buffer_free (ximage);
int damage_event_base;
XserverRegion damage_region;
GC damage_copy_gc;
- GstXImageSrcBuffer *last_ximage;
+ GstBuffer *last_ximage;
#endif
};
#include "ximageutil.h"
+const GstMetaInfo *
+gst_meta_ximage_get_info (void)
+{
+ static const GstMetaInfo *meta_ximage_info = NULL;
+
+ if (meta_ximage_info == NULL) {
+ meta_ximage_info =
+ gst_meta_register ("GstMetaXImageSrc", "GstMetaXImageSrc",
+ sizeof (GstMetaXImage), (GstMetaInitFunction) NULL,
+ (GstMetaFreeFunction) NULL, (GstMetaTransformFunction) NULL,
+ (GstMetaSerializeFunction) NULL, (GstMetaDeserializeFunction) NULL);
+ }
+ return meta_ximage_info;
+}
+
#ifdef HAVE_XSHM
static gboolean error_caught = FALSE;
gst_value_get_fraction_denominator (xcontext->par));
}
-static GstBufferClass *ximagesrc_buffer_parent_class = NULL;
-
static void
-gst_ximagesrc_buffer_finalize (GstXImageSrcBuffer * ximage)
+gst_ximagesrc_buffer_dispose (GstBuffer * ximage)
{
GstElement *parent;
+ GstMetaXImage *meta;
g_return_if_fail (ximage != NULL);
- parent = ximage->parent;
+ meta = GST_META_XIMAGE_GET (ximage);
+
+ parent = meta->parent;
if (parent == NULL) {
g_warning ("XImageSrcBuffer->ximagesrc == NULL");
goto beach;
}
- if (ximage->return_func)
- ximage->return_func (parent, ximage);
+ if (meta->return_func)
+ meta->return_func (parent, ximage);
beach:
-
- GST_MINI_OBJECT_CLASS (ximagesrc_buffer_parent_class)->finalize
- (GST_MINI_OBJECT (ximage));
-
return;
}
void
-gst_ximage_buffer_free (GstXImageSrcBuffer * ximage)
+gst_ximage_buffer_free (GstBuffer * ximage)
{
- /* make sure it is not recycled */
- ximage->width = -1;
- ximage->height = -1;
- gst_buffer_unref (GST_BUFFER (ximage));
-}
+ GstMetaXImage *meta;
-static void
-gst_ximagesrc_buffer_init (GstXImageSrcBuffer * ximage_buffer, gpointer g_class)
-{
-#ifdef HAVE_XSHM
- ximage_buffer->SHMInfo.shmaddr = ((void *) -1);
- ximage_buffer->SHMInfo.shmid = -1;
-#endif
-}
-
-static void
-gst_ximagesrc_buffer_class_init (gpointer g_class, gpointer class_data)
-{
- GstMiniObjectClass *mini_object_class = GST_MINI_OBJECT_CLASS (g_class);
-
- ximagesrc_buffer_parent_class = g_type_class_peek_parent (g_class);
-
- mini_object_class->finalize = (GstMiniObjectFinalizeFunction)
- gst_ximagesrc_buffer_finalize;
-}
+ meta = GST_META_XIMAGE_GET (ximage);
-static GType
-gst_ximagesrc_buffer_get_type (void)
-{
- static GType _gst_ximagesrc_buffer_type;
-
- if (G_UNLIKELY (_gst_ximagesrc_buffer_type == 0)) {
- static const GTypeInfo ximagesrc_buffer_info = {
- sizeof (GstBufferClass),
- NULL,
- NULL,
- gst_ximagesrc_buffer_class_init,
- NULL,
- NULL,
- sizeof (GstXImageSrcBuffer),
- 0,
- (GInstanceInitFunc) gst_ximagesrc_buffer_init,
- NULL
- };
- _gst_ximagesrc_buffer_type = g_type_register_static (GST_TYPE_BUFFER,
- "GstXImageSrcBuffer", &ximagesrc_buffer_info, 0);
- }
- return _gst_ximagesrc_buffer_type;
+ /* make sure it is not recycled */
+ meta->width = -1;
+ meta->height = -1;
+ gst_buffer_unref (ximage);
}
/* This function handles GstXImageSrcBuffer creation depending on XShm availability */
-GstXImageSrcBuffer *
+GstBuffer *
gst_ximageutil_ximage_new (GstXContext * xcontext,
GstElement * parent, int width, int height, BufferReturnFunc return_func)
{
- GstXImageSrcBuffer *ximage = NULL;
+ GstBuffer *ximage = NULL;
+ GstMetaXImage *meta;
gboolean succeeded = FALSE;
- ximage =
- (GstXImageSrcBuffer *) gst_mini_object_new (GST_TYPE_XIMAGESRC_BUFFER);
+ ximage = gst_buffer_new ();
+ GST_MINI_OBJECT_CAST (ximage)->dispose =
+ (GstMiniObjectDisposeFunction) gst_ximagesrc_buffer_dispose;
- ximage->width = width;
- ximage->height = height;
+ meta = GST_META_XIMAGE_ADD (ximage);
+ meta->width = width;
+ meta->height = height;
#ifdef HAVE_XSHM
+ meta->SHMInfo.shmaddr = ((void *) -1);
+ meta->SHMInfo.shmid = -1;
+
if (xcontext->use_xshm) {
- ximage->ximage = XShmCreateImage (xcontext->disp,
+ meta->ximage = XShmCreateImage (xcontext->disp,
xcontext->visual, xcontext->depth,
- ZPixmap, NULL, &ximage->SHMInfo, ximage->width, ximage->height);
- if (!ximage->ximage) {
+ ZPixmap, NULL, &meta->SHMInfo, meta->width, meta->height);
+ if (!meta->ximage) {
goto beach;
}
/* we have to use the returned bytes_per_line for our shm size */
- ximage->size = ximage->ximage->bytes_per_line * ximage->ximage->height;
- ximage->SHMInfo.shmid = shmget (IPC_PRIVATE, ximage->size,
- IPC_CREAT | 0777);
- if (ximage->SHMInfo.shmid == -1)
+ meta->size = meta->ximage->bytes_per_line * meta->ximage->height;
+ meta->SHMInfo.shmid = shmget (IPC_PRIVATE, meta->size, IPC_CREAT | 0777);
+ if (meta->SHMInfo.shmid == -1)
goto beach;
- ximage->SHMInfo.shmaddr = shmat (ximage->SHMInfo.shmid, 0, 0);
- if (ximage->SHMInfo.shmaddr == ((void *) -1))
+ meta->SHMInfo.shmaddr = shmat (meta->SHMInfo.shmid, 0, 0);
+ if (meta->SHMInfo.shmaddr == ((void *) -1))
goto beach;
/* Delete the SHM segment. It will actually go away automatically
* when we detach now */
- shmctl (ximage->SHMInfo.shmid, IPC_RMID, 0);
+ shmctl (meta->SHMInfo.shmid, IPC_RMID, 0);
- ximage->ximage->data = ximage->SHMInfo.shmaddr;
- ximage->SHMInfo.readOnly = FALSE;
+ meta->ximage->data = meta->SHMInfo.shmaddr;
+ meta->SHMInfo.readOnly = FALSE;
- if (XShmAttach (xcontext->disp, &ximage->SHMInfo) == 0)
+ if (XShmAttach (xcontext->disp, &meta->SHMInfo) == 0)
goto beach;
XSync (xcontext->disp, FALSE);
} else
#endif /* HAVE_XSHM */
{
- ximage->ximage = XCreateImage (xcontext->disp,
+ meta->ximage = XCreateImage (xcontext->disp,
xcontext->visual,
xcontext->depth,
- ZPixmap, 0, NULL, ximage->width, ximage->height, xcontext->bpp, 0);
- if (!ximage->ximage)
+ ZPixmap, 0, NULL, meta->width, meta->height, xcontext->bpp, 0);
+ if (!meta->ximage)
goto beach;
/* we have to use the returned bytes_per_line for our image size */
- ximage->size = ximage->ximage->bytes_per_line * ximage->ximage->height;
- ximage->ximage->data = g_malloc (ximage->size);
+ meta->size = meta->ximage->bytes_per_line * meta->ximage->height;
+ meta->ximage->data = g_malloc (meta->size);
XSync (xcontext->disp, FALSE);
}
succeeded = TRUE;
- GST_BUFFER_DATA (ximage) = (guchar *) ximage->ximage->data;
- GST_BUFFER_SIZE (ximage) = ximage->size;
+ GST_BUFFER_DATA (ximage) = (guchar *) meta->ximage->data;
+ GST_BUFFER_SIZE (ximage) = meta->size;
/* Keep a ref to our src */
- ximage->parent = gst_object_ref (parent);
- ximage->return_func = return_func;
+ meta->parent = gst_object_ref (parent);
+ meta->return_func = return_func;
beach:
if (!succeeded) {
gst_ximage_buffer_free (ximage);
/* This function destroys a GstXImageBuffer handling XShm availability */
void
-gst_ximageutil_ximage_destroy (GstXContext * xcontext,
- GstXImageSrcBuffer * ximage)
+gst_ximageutil_ximage_destroy (GstXContext * xcontext, GstBuffer * ximage)
{
+ GstMetaXImage *meta;
+
+ meta = GST_META_XIMAGE_GET (ximage);
+
/* We might have some buffers destroyed after changing state to NULL */
if (!xcontext)
goto beach;
#ifdef HAVE_XSHM
if (xcontext->use_xshm) {
- if (ximage->SHMInfo.shmaddr != ((void *) -1)) {
- XShmDetach (xcontext->disp, &ximage->SHMInfo);
+ if (meta->SHMInfo.shmaddr != ((void *) -1)) {
+ XShmDetach (xcontext->disp, &meta->SHMInfo);
XSync (xcontext->disp, 0);
- shmdt (ximage->SHMInfo.shmaddr);
+ shmdt (meta->SHMInfo.shmaddr);
}
- if (ximage->ximage)
- XDestroyImage (ximage->ximage);
+ if (meta->ximage)
+ XDestroyImage (meta->ximage);
} else
#endif /* HAVE_XSHM */
{
- if (ximage->ximage) {
- XDestroyImage (ximage->ximage);
+ if (meta->ximage) {
+ XDestroyImage (meta->ximage);
}
}
XSync (xcontext->disp, FALSE);
beach:
- if (ximage->parent) {
+ if (meta->parent) {
/* Release the ref to our parent */
- gst_object_unref (ximage->parent);
- ximage->parent = NULL;
+ gst_object_unref (meta->parent);
+ meta->parent = NULL;
}
return;
typedef struct _GstXContext GstXContext;
typedef struct _GstXWindow GstXWindow;
typedef struct _GstXImage GstXImage;
-typedef struct _GstXImageSrcBuffer GstXImageSrcBuffer;
+typedef struct _GstMetaXImage GstMetaXImage;
/* Global X Context stuff */
/**
/* custom ximagesrc buffer, copied from ximagesink */
/* BufferReturnFunc is called when a buffer is finalised */
-typedef void (*BufferReturnFunc) (GstElement *parent, GstXImageSrcBuffer *buf);
+typedef void (*BufferReturnFunc) (GstElement *parent, GstBuffer *buf);
/**
- * GstXImageSrcBuffer:
+ * GstMetaXImage:
* @parent: a reference to the element we belong to
* @ximage: the XImage of this buffer
* @width: the width in pixels of XImage @ximage
* @height: the height in pixels of XImage @ximage
* @size: the size in bytes of XImage @ximage
*
- * Subclass of #GstBuffer containing additional information about an XImage.
+ * Extra data attached to buffers containing additional information about an XImage.
*/
-struct _GstXImageSrcBuffer {
- GstBuffer buffer;
+struct _GstMetaXImage {
+ GstMeta meta;
/* Reference to the ximagesrc we belong to */
GstElement *parent;
gint width, height;
size_t size;
-
+
BufferReturnFunc return_func;
};
+const GstMetaInfo * gst_meta_ximage_get_info (void);
+#define GST_META_XIMAGE_GET(buf) ((GstMetaXImage *)gst_buffer_get_meta(buf,gst_meta_ximage_get_info()))
+#define GST_META_XIMAGE_ADD(buf) ((GstMetaXImage *)gst_buffer_add_meta(buf,gst_meta_ximage_get_info(),NULL))
-GstXImageSrcBuffer *gst_ximageutil_ximage_new (GstXContext *xcontext,
+GstBuffer *gst_ximageutil_ximage_new (GstXContext *xcontext,
GstElement *parent, int width, int height, BufferReturnFunc return_func);
void gst_ximageutil_ximage_destroy (GstXContext *xcontext,
- GstXImageSrcBuffer * ximage);
+ GstBuffer * ximage);
/* Call to manually release a buffer */
-void gst_ximage_buffer_free (GstXImageSrcBuffer *ximage);
-
-#define GST_TYPE_XIMAGESRC_BUFFER (gst_ximagesrc_buffer_get_type())
-#define GST_IS_XIMAGESRC_BUFFER(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_XIMAGESRC_BUFFER))
-#define GST_IS_XIMAGESRC_BUFFER_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_XIMAGESRC_BUFFER))
-#define GST_XIMAGESRC_BUFFER(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_XIMAGESRC_BUFFER, GstXImageSrcBuffer))
-#define GST_XIMAGESRC_BUFFER_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_XIMAGESRC_BUFFER, GstXImageSrcBufferClass))
-#define GST_XIMAGESRC_BUFFER_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_XIMAGESRC_BUFFER, GstXImageSrcBufferClass))
+void gst_ximage_buffer_free (GstBuffer *ximage);
G_END_DECLS
noinst_PROGRAMS = pulse
pulse_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_BASE_CFLAGS) $(GST_CFLAGS)
-pulse_LDADD = $(GST_PLUGINS_BASE_LIBS) -lgstinterfaces-0.10 $(GST_BASE_LIBS) $(GST_LIBS)
+pulse_LDADD = $(GST_PLUGINS_BASE_LIBS) -lgstinterfaces-$(GST_MAJORMINOR) $(GST_BASE_LIBS) $(GST_LIBS)
probe_SOURCES = probe.c
probe_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_BASE_CFLAGS) $(GST_CFLAGS)
-probe_LDADD = $(GST_PLUGINS_BASE_LIBS) -lgstinterfaces-0.10 $(GST_BASE_LIBS) $(GST_LIBS)
+probe_LDADD = $(GST_PLUGINS_BASE_LIBS) -lgstinterfaces-$(GST_MAJORMINOR) $(GST_BASE_LIBS) $(GST_LIBS)
test_oss4_SOURCES = test-oss4.c
test_oss4_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_CFLAGS)
-test_oss4_LDADD = $(GST_PLUGINS_BASE_LIBS) -lgstinterfaces-0.10 $(GST_LIBS)
+test_oss4_LDADD = $(GST_PLUGINS_BASE_LIBS) -lgstinterfaces-$(GST_MAJORMINOR) $(GST_LIBS)
test_oss4_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
else
OSS4_TESTS=