--- /dev/null
+#FIXME clean me up a bit
+
+filterdir = $(libdir)/gst
+filter_LTLIBRARIES = libgstmpegaudioparse.la libgstmp3types.la
+
+libgstmpegaudioparse_la_SOURCES = gstmpegaudioparse.c gstmp3types.c
+# FIXME is this useful?
+libgstmpegaudioparse_la_CFLAGS = -O3 $(FOMIT_FRAME_POINTER) -ffast-math -finline-functions $(GST_CFLAGS)
+
+libgstmp3types_la_SOURCES = gstmp3types.c
+libgstmp3types_la_CFLAGS = -O3 $(FOMIT_FRAME_POINTER) -ffast-math -finline-functions $(GST_CFLAGS)
+
+noinst_HEADERS = gstmpegaudioparse.h
+EXTRA_DIST = README
+
+# FIXME is this needed?
--- /dev/null
+MP3 Audio Parser
+================
+
+This element acts as a parser for mpeg audio data. It's called 'mp3' but
+in reality will work for any MPEG-1, MPEG-2, or MPEG-2.5 elemental audio
+stream of any of Layers I, II, and III. It will not (currently, ever?)
+handle MPEG-2 BC or NBC streams, as those have rather specialized needs
+best served be a different filter.
+
+It will take an mpeg audio stream in any form on its 'src' input, with any
+buffer size, and split it into buffers containing a single frame each.
+NOTE: ancillary data is not dealt with right now.
--- /dev/null
+/* Gnome-Streamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+//#define DEBUG_ENABLED
+#include <gst/gst.h>
+
+static GstCaps* mp3_typefind(GstBuffer *buf, gpointer private);
+
+static GstTypeDefinition mp3type_definitions[] = {
+ { "mp3types_audio/mp3", "audio/mp3", ".mp3 .mp2 .mp1 .mpga", mp3_typefind },
+ { NULL, NULL, NULL, NULL },
+};
+
+static GstCaps*
+mp3_typefind(GstBuffer *buf, gpointer private)
+{
+ gulong head = GULONG_FROM_BE(*((gulong *)GST_BUFFER_DATA(buf)));
+ GstCaps *caps;
+
+ GST_DEBUG (0,"mp3typefind: typefind\n");
+ if ((head & 0xffe00000) != 0xffe00000)
+ return NULL;
+ if (!((head >> 17) & 3))
+ return NULL;
+ if (((head >> 12) & 0xf) == 0xf)
+ return NULL;
+ if (!((head >> 12) & 0xf))
+ return NULL;
+ if (((head >> 10) & 0x3) == 0x3)
+ return NULL;
+
+ caps = gst_caps_new ("mp3_typefind", "audio/mp3", NULL);
+// gst_caps_set(caps,"layer",GST_PROPS_INT(4-((head>>17)&0x3)));
+
+ return caps;
+}
+
+static gboolean
+plugin_init (GModule *module, GstPlugin *plugin)
+{
+ gint i=0;
+
+ while (mp3type_definitions[i].name) {
+ GstTypeFactory *type;
+
+ type = gst_typefactory_new (&mp3type_definitions[i]);
+ gst_plugin_add_feature (plugin, GST_PLUGIN_FEATURE (type));
+ i++;
+ }
+
+// gst_info("gsttypes: loaded %d mp3 types\n",i);
+
+ return TRUE;
+}
+
+GstPluginDesc plugin_desc = {
+ GST_VERSION_MAJOR,
+ GST_VERSION_MINOR,
+ "mp3types",
+ plugin_init
+};
--- /dev/null
+/* Gnome-Streamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+//#define GST_DEBUG_ENABLED
+#include <gstmpegaudioparse.h>
+
+
+/* elementfactory information */
+static GstElementDetails mp3parse_details = {
+ "MP3 Parser",
+ "Filter/Parser/Audio",
+ "Parses and frames MP3 audio streams, provides seek",
+ VERSION,
+ "Erik Walthinsen <omega@cse.ogi.edu>",
+ "(C) 1999",
+};
+
+static GstPadTemplate*
+mp3_src_factory (void)
+{
+ return
+ gst_padtemplate_new (
+ "src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ gst_caps_new (
+ "mp3parse_src",
+ "audio/mp3",
+ gst_props_new (
+ "layer", GST_PROPS_INT_RANGE (1, 3),
+ "bitrate", GST_PROPS_INT_RANGE (8, 320),
+ "framed", GST_PROPS_BOOLEAN (TRUE),
+ NULL)),
+ NULL);
+}
+
+static GstPadTemplate*
+mp3_sink_factory (void)
+{
+ return
+ gst_padtemplate_new (
+ "sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ gst_caps_new (
+ "mp3parse_sink",
+ "audio/mp3",
+ NULL),
+ NULL);
+};
+
+/* GstMPEGAudioParse signals and args */
+enum {
+ /* FILL ME */
+ LAST_SIGNAL
+};
+
+enum {
+ ARG_0,
+ ARG_SKIP,
+ ARG_BIT_RATE,
+ /* FILL ME */
+};
+
+static GstPadTemplate *sink_temp, *src_temp;
+
+static void gst_mp3parse_class_init (GstMPEGAudioParseClass *klass);
+static void gst_mp3parse_init (GstMPEGAudioParse *mp3parse);
+
+static void gst_mp3parse_loop (GstElement *element);
+static void gst_mp3parse_chain (GstPad *pad,GstBuffer *buf);
+static long bpf_from_header (GstMPEGAudioParse *parse, unsigned long header);
+static int head_check (unsigned long head);
+
+static void gst_mp3parse_set_property (GObject *object, guint prop_id, const GValue *value, GParamSpec *pspec);
+static void gst_mp3parse_get_property (GObject *object, guint prop_id, GValue *value, GParamSpec *pspec);
+
+static GstElementClass *parent_class = NULL;
+//static guint gst_mp3parse_signals[LAST_SIGNAL] = { 0 };
+
+GType
+mp3parse_get_type(void) {
+ static GType mp3parse_type = 0;
+
+ if (!mp3parse_type) {
+ static const GTypeInfo mp3parse_info = {
+ sizeof(GstMPEGAudioParseClass), NULL,
+ NULL,
+ (GClassInitFunc)gst_mp3parse_class_init,
+ NULL,
+ NULL,
+ sizeof(GstMPEGAudioParse),
+ 0,
+ (GInstanceInitFunc)gst_mp3parse_init,
+ };
+ mp3parse_type = g_type_register_static(GST_TYPE_ELEMENT, "GstMPEGAudioParse", &mp3parse_info, 0);
+ }
+ return mp3parse_type;
+}
+
+static void
+gst_mp3parse_class_init (GstMPEGAudioParseClass *klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+
+ gobject_class = (GObjectClass*)klass;
+ gstelement_class = (GstElementClass*)klass;
+
+ g_object_class_install_property(G_OBJECT_CLASS(klass), ARG_SKIP,
+ g_param_spec_int("skip","skip","skip",
+ G_MININT,G_MAXINT,0,G_PARAM_READWRITE)); // CHECKME
+ g_object_class_install_property(G_OBJECT_CLASS(klass), ARG_BIT_RATE,
+ g_param_spec_int("bit_rate","bit_rate","bit_rate",
+ G_MININT,G_MAXINT,0,G_PARAM_READABLE)); // CHECKME
+
+ parent_class = g_type_class_ref(GST_TYPE_ELEMENT);
+
+ gobject_class->set_property = gst_mp3parse_set_property;
+ gobject_class->get_property = gst_mp3parse_get_property;
+}
+
+static void
+gst_mp3parse_init (GstMPEGAudioParse *mp3parse)
+{
+ mp3parse->sinkpad = gst_pad_new_from_template(sink_temp, "sink");
+ gst_pad_set_caps(mp3parse->sinkpad, gst_pad_get_padtemplate_caps (mp3parse->sinkpad));
+ gst_element_add_pad(GST_ELEMENT(mp3parse),mp3parse->sinkpad);
+// gst_pad_set_type_id(mp3parse->sinkpad, mp3type);
+
+#if 1 // set this to one to use the old chaining code
+ gst_pad_set_chain_function(mp3parse->sinkpad,gst_mp3parse_chain);
+#else // else you get the new loop-based code, which isn't complete yet
+ gst_element_set_loop_function (GST_ELEMENT(mp3parse),gst_mp3parse_loop);
+#endif
+
+ mp3parse->srcpad = gst_pad_new_from_template(src_temp, "src");
+ gst_element_add_pad(GST_ELEMENT(mp3parse),mp3parse->srcpad);
+ //gst_pad_set_type_id(mp3parse->srcpad, mp3frametype);
+
+ mp3parse->partialbuf = NULL;
+ mp3parse->skip = 0;
+ mp3parse->in_flush = FALSE;
+}
+
+static guint32
+gst_mp3parse_next_header (guchar *buf,guint32 len,guint32 start)
+{
+ guint32 offset = start;
+ int f = 0;
+
+ while (offset < (len - 4)) {
+ fprintf(stderr,"%02x ",buf[offset]);
+ if (buf[offset] == 0xff)
+ f = 1;
+ else if (f && ((buf[offset] >> 4) == 0x0f))
+ return offset - 1;
+ else
+ f = 0;
+ offset++;
+ }
+ return -1;
+}
+
+static void
+gst_mp3parse_loop (GstElement *element)
+{
+ GstMPEGAudioParse *parse = GST_MP3PARSE(element);
+ GstBuffer *inbuf, *outbuf;
+ guint32 size, offset;
+ guchar *data;
+ guint32 start;
+ guint32 header;
+ gint bpf;
+
+ while (1) {
+ // get a new buffer
+ inbuf = gst_pad_pull (parse->sinkpad);
+ size = GST_BUFFER_SIZE (inbuf);
+ data = GST_BUFFER_DATA (inbuf);
+ offset = 0;
+fprintf(stderr, "have buffer of %d bytes\n",size);
+
+ // loop through it and find all the frames
+ while (offset < (size - 4)) {
+ start = gst_mp3parse_next_header (data,size,offset);
+fprintf(stderr, "skipped %d bytes searching for the next header\n",start-offset);
+ header = GULONG_FROM_BE(*((guint32 *)(data+start)));
+fprintf(stderr, "header is 0x%08x\n",header);
+
+ // figure out how big the frame is supposed to be
+ bpf = bpf_from_header (parse, header);
+
+ // see if there are enough bytes in this buffer for the whole frame
+ if ((start + bpf) <= size) {
+ outbuf = gst_buffer_create_sub (inbuf,start,bpf);
+fprintf(stderr, "sending buffer of %d bytes\n",bpf);
+ gst_pad_push (parse->srcpad, outbuf);
+ offset = start + bpf;
+
+ // if not, we have to deal with it somehow
+ } else {
+fprintf(stderr,"don't have enough data for this frame\n");
+
+ break;
+ }
+ }
+ }
+}
+
+static void
+gst_mp3parse_chain (GstPad *pad, GstBuffer *buf)
+{
+ GstMPEGAudioParse *mp3parse;
+ guchar *data;
+ glong size,offset = 0;
+ unsigned long header;
+ int bpf;
+ GstBuffer *outbuf;
+ guint64 last_ts;
+
+ g_return_if_fail(pad != NULL);
+ g_return_if_fail(GST_IS_PAD(pad));
+ g_return_if_fail(buf != NULL);
+// g_return_if_fail(GST_IS_BUFFER(buf));
+
+ mp3parse = GST_MP3PARSE (gst_pad_get_parent (pad));
+
+ GST_DEBUG (0,"mp3parse: received buffer of %d bytes\n",GST_BUFFER_SIZE(buf));
+
+ last_ts = GST_BUFFER_TIMESTAMP(buf);
+
+ if (GST_BUFFER_FLAG_IS_SET(buf, GST_BUFFER_FLUSH)) {
+ if (mp3parse->partialbuf) {
+ gst_buffer_unref(mp3parse->partialbuf);
+ mp3parse->partialbuf = NULL;
+ }
+ mp3parse->in_flush = TRUE;
+ }
+
+ // if we have something left from the previous frame
+ if (mp3parse->partialbuf) {
+
+ mp3parse->partialbuf = gst_buffer_append(mp3parse->partialbuf, buf);
+ // and the one we received..
+ gst_buffer_unref(buf);
+ }
+ else {
+ mp3parse->partialbuf = buf;
+ }
+
+ size = GST_BUFFER_SIZE(mp3parse->partialbuf);
+ data = GST_BUFFER_DATA(mp3parse->partialbuf);
+
+ // while we still have bytes left -4 for the header
+ while (offset < size-4) {
+ int skipped = 0;
+
+ GST_DEBUG (0,"mp3parse: offset %ld, size %ld \n",offset, size);
+
+ // search for a possible start byte
+ for (;((data[offset] != 0xff) && (offset < size));offset++) skipped++;
+ if (skipped && !mp3parse->in_flush) {
+ GST_DEBUG (0,"mp3parse: **** now at %ld skipped %d bytes\n",offset,skipped);
+ }
+ // construct the header word
+ header = GULONG_FROM_BE(*((gulong *)(data+offset)));
+ // if it's a valid header, go ahead and send off the frame
+ if (head_check(header)) {
+ // calculate the bpf of the frame
+ bpf = bpf_from_header(mp3parse, header);
+
+ /********************************************************************************
+ * robust seek support
+ * - This performs additional frame validation if the in_flush flag is set
+ * (indicating a discontinuous stream).
+ * - The current frame header is not accepted as valid unless the NEXT frame
+ * header has the same values for most fields. This significantly increases
+ * the probability that we aren't processing random data.
+ * - It is not clear if this is sufficient for robust seeking of Layer III
+ * streams which utilize the concept of a "bit reservoir" by borrow bitrate
+ * from previous frames. In this case, seeking may be more complicated because
+ * the frames are not independently coded.
+ ********************************************************************************/
+ if ( mp3parse->in_flush ) {
+ unsigned long header2;
+
+ if ((size-offset)<(bpf+4)) { if (mp3parse->in_flush) break; } // wait until we have the the entire current frame as well as the next frame header
+
+ header2 = GULONG_FROM_BE(*((gulong *)(data+offset+bpf)));
+ GST_DEBUG(0,"mp3parse: header=%08lX, header2=%08lX, bpf=%d\n", header, header2, bpf );
+
+ #define HDRMASK ~( (0xF<<12)/*bitrate*/ | (1<<9)/*padding*/ | (3<<4)/*mode extension*/ ) // mask the bits which are allowed to differ between frames
+
+ if ( (header2&HDRMASK) != (header&HDRMASK) ) { // require 2 matching headers in a row
+ GST_DEBUG(0,"mp3parse: next header doesn't match (header=%08lX, header2=%08lX, bpf=%d)\n", header, header2, bpf );
+ offset++; // This frame is invalid. Start looking for a valid frame at the next position in the stream
+ continue;
+ }
+
+ }
+
+ // if we don't have the whole frame...
+ if ((size - offset) < bpf) {
+ GST_DEBUG (0,"mp3parse: partial buffer needed %ld < %d \n",(size-offset), bpf);
+ break;
+ } else {
+
+ outbuf = gst_buffer_create_sub(mp3parse->partialbuf,offset,bpf);
+
+ offset += bpf;
+ if (mp3parse->skip == 0) {
+ GST_DEBUG (0,"mp3parse: pushing buffer of %d bytes\n",GST_BUFFER_SIZE(outbuf));
+ if (mp3parse->in_flush) {
+ GST_BUFFER_FLAG_SET(outbuf, GST_BUFFER_FLUSH);
+ mp3parse->in_flush = FALSE;
+ }
+ else {
+ GST_BUFFER_FLAG_UNSET(outbuf, GST_BUFFER_FLUSH);
+ }
+ GST_BUFFER_TIMESTAMP(outbuf) = last_ts;
+ gst_pad_push(mp3parse->srcpad,outbuf);
+ }
+ else {
+ GST_DEBUG (0,"mp3parse: skipping buffer of %d bytes\n",GST_BUFFER_SIZE(outbuf));
+ gst_buffer_unref(outbuf);
+ mp3parse->skip--;
+ }
+ }
+ } else {
+ offset++;
+ if (!mp3parse->in_flush) GST_DEBUG (0,"mp3parse: *** wrong header, skipping byte (FIXME?)\n");
+ }
+ }
+ // if we have processed this block and there are still
+ // bytes left not in a partial block, copy them over.
+ if (size-offset > 0) {
+ glong remainder = (size - offset);
+ GST_DEBUG (0,"mp3parse: partial buffer needed %ld for trailing bytes\n",remainder);
+
+ outbuf = gst_buffer_create_sub(mp3parse->partialbuf,offset,remainder);
+ gst_buffer_unref(mp3parse->partialbuf);
+ mp3parse->partialbuf = outbuf;
+ }
+ else {
+ gst_buffer_unref(mp3parse->partialbuf);
+ mp3parse->partialbuf = NULL;
+ }
+}
+
+static int mp3parse_tabsel[2][3][16] =
+{ { {0, 32, 64, 96, 128, 160, 192, 224, 256, 288, 320, 352, 384, 416, 448, },
+ {0, 32, 48, 56, 64, 80, 96, 112, 128, 160, 192, 224, 256, 320, 384, },
+ {0, 32, 40, 48, 56, 64, 80, 96, 112, 128, 160, 192, 224, 256, 320, } },
+ { {0, 32, 48, 56, 64, 80, 96, 112, 128, 144, 160, 176, 192, 224, 256, },
+ {0, 8, 16, 24, 32, 40, 48, 56, 64, 80, 96, 112, 128, 144, 160, },
+ {0, 8, 16, 24, 32, 40, 48, 56, 64, 80, 96, 112, 128, 144, 160, } },
+};
+
+static long mp3parse_freqs[9] =
+{44100, 48000, 32000, 22050, 24000, 16000, 11025, 12000, 8000};
+
+
+static long
+bpf_from_header (GstMPEGAudioParse *parse, unsigned long header)
+{
+ int layer_index,layer,lsf,samplerate_index,padding;
+ long bpf;
+
+ //mpegver = (header >> 19) & 0x3; // don't need this for bpf
+ layer_index = (header >> 17) & 0x3;
+ layer = 4 - layer_index;
+ lsf = (header & (1 << 20)) ? ((header & (1 << 19)) ? 0 : 1) : 1;
+ parse->bit_rate = mp3parse_tabsel[lsf][layer - 1][((header >> 12) & 0xf)];
+ samplerate_index = (header >> 10) & 0x3;
+ padding = (header >> 9) & 0x1;
+
+ if (layer == 1) {
+ bpf = parse->bit_rate * 12000;
+ bpf /= mp3parse_freqs[samplerate_index];
+ bpf = ((bpf + padding) << 2);
+ } else {
+ bpf = parse->bit_rate * 144000;
+ bpf /= mp3parse_freqs[samplerate_index];
+ bpf += padding;
+ }
+
+ //g_print("%08x: layer %d lsf %d bitrate %d samplerate_index %d padding %d - bpf %d\n",
+//header,layer,lsf,bitrate,samplerate_index,padding,bpf);
+
+ return bpf;
+}
+
+static gboolean
+head_check (unsigned long head)
+{
+ GST_DEBUG (0,"checking mp3 header 0x%08lx\n",head);
+ /* if it's not a valid sync */
+ if ((head & 0xffe00000) != 0xffe00000) {
+ GST_DEBUG (0,"invalid sync\n");return FALSE; }
+ /* if it's an invalid MPEG version */
+ if (((head >> 19) & 3) == 0x1) {
+ GST_DEBUG (0,"invalid MPEG version\n");return FALSE; }
+ /* if it's an invalid layer */
+ if (!((head >> 17) & 3)) {
+ GST_DEBUG (0,"invalid layer\n");return FALSE; }
+ /* if it's an invalid bitrate */
+ if (((head >> 12) & 0xf) == 0x0) {
+ GST_DEBUG (0,"invalid bitrate\n");return FALSE; }
+ if (((head >> 12) & 0xf) == 0xf) {
+ GST_DEBUG (0,"invalid bitrate\n");return FALSE; }
+ /* if it's an invalid samplerate */
+ if (((head >> 10) & 0x3) == 0x3) {
+ GST_DEBUG (0,"invalid samplerate\n");return FALSE; }
+ if ((head & 0xffff0000) == 0xfffe0000) {
+ GST_DEBUG (0,"invalid sync\n");return FALSE; }
+ if (head & 0x00000002) {
+ GST_DEBUG (0,"invalid emphasis\n");return FALSE; }
+
+ return TRUE;
+}
+
+static void
+gst_mp3parse_set_property (GObject *object, guint prop_id, const GValue *value, GParamSpec *pspec)
+{
+ GstMPEGAudioParse *src;
+
+ /* it's not null if we got it, but it might not be ours */
+ g_return_if_fail(GST_IS_MP3PARSE(object));
+ src = GST_MP3PARSE(object);
+
+ switch (prop_id) {
+ case ARG_SKIP:
+ src->skip = g_value_get_int (value);
+ break;
+ default:
+ break;
+ }
+}
+
+static void
+gst_mp3parse_get_property (GObject *object, guint prop_id, GValue *value, GParamSpec *pspec)
+{
+ GstMPEGAudioParse *src;
+
+ /* it's not null if we got it, but it might not be ours */
+ g_return_if_fail(GST_IS_MP3PARSE(object));
+ src = GST_MP3PARSE(object);
+
+ switch (prop_id) {
+ case ARG_SKIP:
+ g_value_set_int (value, src->skip);
+ break;
+ case ARG_BIT_RATE:
+ g_value_set_int (value, src->bit_rate * 1000);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static gboolean
+plugin_init (GModule *module, GstPlugin *plugin)
+{
+ GstElementFactory *factory;
+
+ /* create an elementfactory for the mp3parse element */
+ factory = gst_elementfactory_new ("mp3parse",
+ GST_TYPE_MP3PARSE,
+ &mp3parse_details);
+ g_return_val_if_fail (factory != NULL, FALSE);
+
+ sink_temp = mp3_sink_factory ();
+ gst_elementfactory_add_padtemplate (factory, sink_temp);
+
+ src_temp = mp3_src_factory ();
+ gst_elementfactory_add_padtemplate (factory, src_temp);
+
+ gst_plugin_add_feature (plugin, GST_PLUGIN_FEATURE (factory));
+
+ return TRUE;
+}
+
+GstPluginDesc plugin_desc = {
+ GST_VERSION_MAJOR,
+ GST_VERSION_MINOR,
+ "mp3parse",
+ plugin_init
+};
--- /dev/null
+/* Gnome-Streamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+
+#ifndef __MP3PARSE_H__
+#define __MP3PARSE_H__
+
+
+#include <config.h>
+#include <gst/gst.h>
+
+
+#ifdef __cplusplus
+extern "C" {
+#endif /* __cplusplus */
+
+
+#define GST_TYPE_MP3PARSE \
+ (gst_mp3parse_get_type())
+#define GST_MP3PARSE(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_MP3PARSE,GstMPEGAudioParse))
+#define GST_MP3PARSE_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_MP3PARSE,GstMPEGAudioParse))
+#define GST_IS_MP3PARSE(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_MP3PARSE))
+#define GST_IS_MP3PARSE_CLASS(obj) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_MP3PARSE))
+
+typedef struct _GstMPEGAudioParse GstMPEGAudioParse;
+typedef struct _GstMPEGAudioParseClass GstMPEGAudioParseClass;
+
+struct _GstMPEGAudioParse {
+ GstElement element;
+
+ GstPad *sinkpad,*srcpad;
+
+ GstBuffer *partialbuf; // previous buffer (if carryover)
+ guint skip; /* number of frames to skip */
+ guint bit_rate;
+ gboolean in_flush;
+};
+
+struct _GstMPEGAudioParseClass {
+ GstElementClass parent_class;
+};
+
+GType gst_mp3parse_get_type(void);
+
+
+#ifdef __cplusplus
+}
+#endif /* __cplusplus */
+
+
+#endif /* __MP3PARSE_H__ */
--- /dev/null
+Makefile
+Makefile.in
+*.o
+*.lo
+*.la
+.deps
+.libs
--- /dev/null
+filterdir = $(libdir)/gst
+
+filter_LTLIBRARIES = libgstmpegstream.la
+
+libgstmpegstream_la_SOURCES = gstmpegstream.c gstmpegparse.c gstmpegdemux.c gstmpegpacketize.c
+libgstmpegstream_la_CFLAGS = $(GST_CFLAGS) -O3 $(FOMIT_FRAME_POINTER) -ffast-math
+
+noinst_HEADERS = gstmpegparse.h gstmpegdemux.h gstmpegpacketize.h
+
+
+EXTRA_DIST = README notes
--- /dev/null
+MPEG-2 System Stream Parser
+===========================
+
+This element will parse MPEG-2 Program Streams (and eventually Transport
+Streams, though likely as a second element) into its elemental streams.
+
--- /dev/null
+/* Gnome-Streamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+
+/*#define GST_DEBUG_ENABLED*/
+#include <gstmpegdemux.h>
+
+/* elementfactory information */
+static GstElementDetails mpeg_demux_details = {
+ "MPEG System Parser",
+ "Filter/Parser/System",
+ "Demultiplexes MPEG1 and MPEG2 System Streams",
+ VERSION,
+ "Erik Walthinsen <omega@cse.ogi.edu>\n"
+ "Wim Taymans <wim.taymans@chello.be>",
+ "(C) 1999",
+};
+
+/* MPEG2Demux signals and args */
+enum {
+ /* FILL ME */
+ LAST_SIGNAL
+};
+
+enum {
+ ARG_0,
+ ARG_BIT_RATE,
+ ARG_MPEG2,
+ /* FILL ME */
+};
+
+GST_PADTEMPLATE_FACTORY (sink_factory,
+ "sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_CAPS_NEW (
+ "mpeg_demux_sink",
+ "video/mpeg",
+ "mpegversion", GST_PROPS_INT_RANGE (1, 2),
+ "systemstream", GST_PROPS_BOOLEAN (TRUE)
+ )
+);
+
+GST_PADTEMPLATE_FACTORY (audio_factory,
+ "audio_[1-32]",
+ GST_PAD_SRC,
+ GST_PAD_SOMETIMES,
+ GST_CAPS_NEW (
+ "mpeg_demux_audio",
+ "audio/mp3",
+ NULL
+ )
+);
+
+GST_PADTEMPLATE_FACTORY (video_mpeg1_factory,
+ "video_[0-15]",
+ GST_PAD_SRC,
+ GST_PAD_SOMETIMES,
+ GST_CAPS_NEW (
+ "mpeg_demux_video_mpeg1",
+ "video/mpeg",
+ "mpegversion", GST_PROPS_INT (1),
+ "systemstream", GST_PROPS_BOOLEAN (FALSE)
+ )
+);
+
+GST_PADTEMPLATE_FACTORY (video_mpeg2_factory,
+ "video_[0-15]",
+ GST_PAD_SRC,
+ GST_PAD_SOMETIMES,
+ GST_CAPS_NEW (
+ "mpeg_demux_video_mpeg2",
+ "video/mpeg",
+ "mpegversion", GST_PROPS_INT (2),
+ "systemstream", GST_PROPS_BOOLEAN (FALSE)
+ )
+);
+
+
+GST_PADTEMPLATE_FACTORY (private1_factory,
+ "private_stream_1.[0-7]",
+ GST_PAD_SRC,
+ GST_PAD_SOMETIMES,
+ GST_CAPS_NEW (
+ "mpeg_demux_private1",
+ "audio/a52",
+ NULL
+ )
+);
+
+GST_PADTEMPLATE_FACTORY (private2_factory,
+ "private_stream_2",
+ GST_PAD_SRC,
+ GST_PAD_SOMETIMES,
+ GST_CAPS_NEW (
+ "mpeg_demux_private2",
+ "unknown/unknown",
+ NULL
+ )
+);
+
+GST_PADTEMPLATE_FACTORY (subtitle_factory,
+ "subtitle_stream_[0-15]",
+ GST_PAD_SRC,
+ GST_PAD_SOMETIMES,
+ GST_CAPS_NEW (
+ "mpeg_demux_subtitle",
+ "video/mpeg",
+ NULL
+ )
+);
+
+static void gst_mpeg_demux_class_init (GstMPEGDemuxClass *klass);
+static void gst_mpeg_demux_init (GstMPEGDemux *mpeg_demux);
+
+static gboolean gst_mpeg_demux_parse_packhead (GstMPEGParse *mpeg_parse, GstBuffer *buffer);
+static gboolean gst_mpeg_demux_parse_syshead (GstMPEGParse *mpeg_parse, GstBuffer *buffer);
+static gboolean gst_mpeg_demux_parse_packet (GstMPEGParse *mpeg_parse, GstBuffer *buffer);
+static gboolean gst_mpeg_demux_parse_pes (GstMPEGParse *mpeg_parse, GstBuffer *buffer);
+static void gst_mpeg_demux_send_data (GstMPEGParse *mpeg_parse, GstData *data);
+
+static GstMPEGParseClass *parent_class = NULL;
+/*static guint gst_mpeg_demux_signals[LAST_SIGNAL] = { 0 };*/
+
+GType
+mpeg_demux_get_type (void)
+{
+ static GType mpeg_demux_type = 0;
+
+ if (!mpeg_demux_type) {
+ static const GTypeInfo mpeg_demux_info = {
+ sizeof(GstMPEGDemuxClass),
+ NULL,
+ NULL,
+ (GClassInitFunc)gst_mpeg_demux_class_init,
+ NULL,
+ NULL,
+ sizeof(GstMPEGDemux),
+ 0,
+ (GInstanceInitFunc)gst_mpeg_demux_init,
+ };
+ mpeg_demux_type = g_type_register_static(GST_TYPE_MPEG_PARSE, "GstMPEGDemux", &mpeg_demux_info, 0);
+ }
+ return mpeg_demux_type;
+}
+
+static void
+gst_mpeg_demux_class_init (GstMPEGDemuxClass *klass)
+{
+ GstMPEGParseClass *mpeg_parse_class;
+
+ parent_class = g_type_class_ref (GST_TYPE_MPEG_PARSE);
+
+ mpeg_parse_class = (GstMPEGParseClass *) klass;
+
+ mpeg_parse_class->parse_packhead = gst_mpeg_demux_parse_packhead;
+ mpeg_parse_class->parse_syshead = gst_mpeg_demux_parse_syshead;
+ mpeg_parse_class->parse_packet = gst_mpeg_demux_parse_packet;
+ mpeg_parse_class->parse_pes = gst_mpeg_demux_parse_pes;
+ mpeg_parse_class->send_data = gst_mpeg_demux_send_data;
+}
+
+static void
+gst_mpeg_demux_init (GstMPEGDemux *mpeg_demux)
+{
+ gint i;
+ GstMPEGParse *mpeg_parse = GST_MPEG_PARSE (mpeg_demux);
+
+ gst_element_remove_pad (GST_ELEMENT (mpeg_parse), mpeg_parse->sinkpad);
+ mpeg_parse->sinkpad = gst_pad_new_from_template(
+ GST_PADTEMPLATE_GET (sink_factory), "sink");
+ gst_element_add_pad (GST_ELEMENT (mpeg_parse), mpeg_parse->sinkpad);
+ gst_element_remove_pad (GST_ELEMENT (mpeg_parse), mpeg_parse->srcpad);
+
+ /* zero counters (should be done at RUNNING?)*/
+ mpeg_demux->last_pts = 0;
+
+ /* i think everything is already zero'd, but oh well*/
+ for (i=0;i<NUM_PRIVATE_1_PADS;i++) {
+ mpeg_demux->private_1_pad[i] = NULL;
+ mpeg_demux->private_1_offset[i] = 0;
+ }
+ for (i=0;i<NUM_SUBTITLE_PADS;i++) {
+ mpeg_demux->subtitle_pad[i] = NULL;
+ mpeg_demux->subtitle_offset[i] = 0;
+ }
+ mpeg_demux->private_2_pad = NULL;
+ mpeg_demux->private_2_offset = 0;
+ for (i=0;i<NUM_VIDEO_PADS;i++) {
+ mpeg_demux->video_pad[i] = NULL;
+ mpeg_demux->video_offset[i] = 0;
+ }
+ for (i=0;i<NUM_AUDIO_PADS;i++) {
+ mpeg_demux->audio_pad[i] = NULL;
+ mpeg_demux->audio_offset[i] = 0;
+ }
+
+ GST_FLAG_SET (mpeg_demux, GST_ELEMENT_EVENT_AWARE);
+}
+
+static void
+gst_mpeg_demux_send_data (GstMPEGParse *mpeg_parse, GstData *data)
+{
+ if (GST_IS_BUFFER (data)) {
+ gst_buffer_unref (GST_BUFFER (data));
+ }
+ else {
+ GstEvent *event = GST_EVENT (data);
+
+ gst_pad_event_default (mpeg_parse->sinkpad, event);
+ }
+}
+
+static gboolean
+gst_mpeg_demux_parse_packhead (GstMPEGParse *mpeg_parse, GstBuffer *buffer)
+{
+ guint8 *buf;
+
+ parent_class->parse_packhead (mpeg_parse, buffer);
+
+ GST_DEBUG (0, "mpeg_demux: in parse_packhead\n");
+
+ buf = GST_BUFFER_DATA (buffer);
+ /* do something usefull here */
+
+ return TRUE;
+}
+
+static gboolean
+gst_mpeg_demux_parse_syshead (GstMPEGParse *mpeg_parse, GstBuffer *buffer)
+{
+ GstMPEGDemux *mpeg_demux = GST_MPEG_DEMUX (mpeg_parse);
+ guint16 header_length;
+ guchar *buf;
+
+ GST_DEBUG (0, "mpeg_demux: in parse_syshead\n");
+
+ buf = GST_BUFFER_DATA (buffer);
+ buf += 4;
+
+ header_length = GUINT16_FROM_BE (*(guint16 *) buf);
+ GST_DEBUG (0, "mpeg_demux: header_length %d\n", header_length);
+ buf += 2;
+
+ /* marker:1==1 ! rate_bound:22 | marker:1==1*/
+ buf += 3;
+
+ /* audio_bound:6==1 ! fixed:1 | constrained:1*/
+ buf += 1;
+
+ /* audio_lock:1 | video_lock:1 | marker:1==1 | video_bound:5 */
+ buf += 1;
+
+ /* apacket_rate_restriction:1 | reserved:7==0x7F */
+ buf += 1;
+
+ if (!GST_MPEG_PARSE_IS_MPEG2 (mpeg_demux)) {
+ gint stream_count = (header_length - 6) / 3;
+ gint i, j=0;
+
+ GST_DEBUG (0, "mpeg_demux::parse_syshead: number of streams=%d \n",
+ stream_count);
+
+ for (i = 0; i < stream_count; i++) {
+ gint stream_num;
+ guint8 stream_id;
+ gboolean STD_buffer_bound_scale;
+ guint16 STD_buffer_size_bound;
+ guint32 buf_byte_size_bound;
+ gchar *name = NULL;
+ GstPad **outpad = NULL;
+ GstPadTemplate *newtemp = NULL;
+
+ stream_id = *buf++;
+ if (!(stream_id & 0x80)) {
+ GST_DEBUG (0, "mpeg_demux::parse_syshead: error in system header length\n");
+ return FALSE;
+ }
+
+ /* check marker bits */
+ if ((*buf & 0xC0) != 0xC0) {
+ GST_DEBUG (0,
+ "mpeg_demux::parse_syshead: expecting placeholder bit values '11' after stream id\n");
+ return FALSE;
+ }
+
+ STD_buffer_bound_scale = *buf & 0x20;
+ STD_buffer_size_bound = (*buf++ & 0x1F) << 8;
+ STD_buffer_size_bound |= *buf++;
+
+ if (STD_buffer_bound_scale == 0) {
+ buf_byte_size_bound = STD_buffer_size_bound * 128;
+ }
+ else {
+ buf_byte_size_bound = STD_buffer_size_bound * 1024;
+ }
+
+ /* private_stream_1 */
+ if (stream_id == 0xBD) {
+ name = NULL;
+ outpad = NULL;
+ }
+ /* private_stream_2 */
+ else if (stream_id == 0xBF) {
+ name = g_strdup_printf ("private_stream_2");
+ stream_num = 0;
+ outpad = &mpeg_demux->private_2_pad;
+ newtemp = GST_PADTEMPLATE_GET (private2_factory);
+ }
+ /* Audio */
+ else if ((stream_id >= 0xC0) && (stream_id <= 0xDF)) {
+ name = g_strdup_printf ("audio_%02d", stream_id & 0x1F);
+ stream_num = stream_id & 0x1F;
+ outpad = &mpeg_demux->audio_pad[stream_num];
+ newtemp = GST_PADTEMPLATE_GET (audio_factory);
+ }
+ /* Video */
+ else if ((stream_id >= 0xE0) && (stream_id <= 0xEF)) {
+ name = g_strdup_printf ("video_%02d", stream_id & 0x0F);
+ stream_num = stream_id & 0x0F;
+ outpad = &mpeg_demux->video_pad[stream_num];
+ if (!GST_MPEG_PARSE_IS_MPEG2 (mpeg_demux)) {
+ newtemp = GST_PADTEMPLATE_GET (video_mpeg1_factory);
+ }
+ else {
+ newtemp = GST_PADTEMPLATE_GET (video_mpeg2_factory);
+ }
+ }
+
+ GST_DEBUG (0, "mpeg_demux::parse_syshead: stream ID 0x%02X (%s)\n", stream_id, name);
+ GST_DEBUG (0, "mpeg_demux::parse_syshead: STD_buffer_bound_scale %d\n", STD_buffer_bound_scale);
+ GST_DEBUG (0, "mpeg_demux::parse_syshead: STD_buffer_size_bound %d or %d bytes\n",
+ STD_buffer_size_bound, buf_byte_size_bound);
+
+ /* create the pad and add it to self if it does not yet exist
+ * this should trigger the NEW_PAD signal, which should be caught by
+ * the app and used to attach to desired streams.
+ */
+ if (outpad && *outpad == NULL) {
+ *outpad = gst_pad_new_from_template (newtemp, name);
+ gst_pad_set_caps (*outpad, gst_pad_get_padtemplate_caps (*outpad));
+ gst_element_add_pad (GST_ELEMENT (mpeg_demux), (*outpad));
+ }
+ else {
+ /* we won't be needing this. */
+ if (name)
+ g_free (name);
+ }
+
+ mpeg_demux->STD_buffer_info[j].stream_id = stream_id;
+ mpeg_demux->STD_buffer_info[j].STD_buffer_bound_scale =
+ STD_buffer_bound_scale;
+ mpeg_demux->STD_buffer_info[j].STD_buffer_size_bound =
+ STD_buffer_size_bound;
+
+ j++;
+ }
+ }
+
+ return TRUE;
+}
+
+static gboolean
+gst_mpeg_demux_parse_packet (GstMPEGParse *mpeg_parse, GstBuffer *buffer)
+{
+ GstMPEGDemux *mpeg_demux = GST_MPEG_DEMUX (mpeg_parse);
+ guint8 id;
+ guint16 headerlen;
+
+ guint16 packet_length;
+ gboolean STD_buffer_bound_scale;
+ guint16 STD_buffer_size_bound;
+ guint64 dts;
+ guint8 ps_id_code;
+
+ guint16 datalen;
+ gulong outoffset = 0; /* wrong XXX FIXME */
+
+ GstPad **outpad = NULL;
+ GstBuffer *outbuf;
+ guint8 *buf, *basebuf;
+
+ GST_DEBUG (0,"mpeg_demux::parse_packet: in parse_packet\n");
+
+ basebuf = buf = GST_BUFFER_DATA (buffer);
+ id = *(buf+3);
+ buf += 4;
+
+ /* start parsing */
+ packet_length = GUINT16_FROM_BE (*((guint16 *)buf));
+
+ GST_DEBUG (0,"mpeg_demux: got packet_length %d\n", packet_length);
+ headerlen = 2;
+ buf += 2;
+
+ /* loop through looping for stuffing bits, STD, PTS, DTS, etc */
+ do {
+ guint8 bits = *buf++;
+
+ /* stuffing bytes */
+ switch (bits & 0xC0) {
+ case 0xC0:
+ if (bits == 0xff) {
+ GST_DEBUG (0,"mpeg_demux::parse_packet: have stuffing byte\n");
+ } else {
+ GST_DEBUG (0,"mpeg_demux::parse_packet: expected stuffing byte\n");
+ }
+ headerlen++;
+ break;
+ case 0x40:
+ GST_DEBUG (0,"mpeg_demux::parse_packet: have STD\n");
+
+ STD_buffer_bound_scale = bits & 0x20;
+ STD_buffer_size_bound = (bits & 0x1F) << 8;
+ STD_buffer_size_bound |= *buf++;
+
+ headerlen += 2;
+ break;
+ case 0x00:
+ switch (bits & 0x30) {
+ case 0x20:
+ /* pts:3 ! 1 ! pts:15 ! 1 | pts:15 ! 1 */
+ mpeg_demux->last_pts = (bits & 0x0E) << 29;
+ mpeg_demux->last_pts |= *buf++ << 22;
+ mpeg_demux->last_pts |= (*buf++ & 0xFE) << 14;
+ mpeg_demux->last_pts |= *buf++ << 7;
+ mpeg_demux->last_pts |= (*buf++ & 0xFE) >> 1;
+
+ GST_DEBUG (0,"mpeg_demux::parse_packet: PTS = %llu\n", mpeg_demux->last_pts);
+ headerlen += 5;
+ goto done;
+ case 0x30:
+ /* pts:3 ! 1 ! pts:15 ! 1 | pts:15 ! 1 */
+ mpeg_demux->last_pts = (bits & 0x0E) << 29;
+ mpeg_demux->last_pts |= *buf++ << 22;
+ mpeg_demux->last_pts |= (*buf++ & 0xFE) << 14;
+ mpeg_demux->last_pts |= *buf++ << 7;
+ mpeg_demux->last_pts |= (*buf++ & 0xFE) >> 1;
+
+ /* sync:4 ! pts:3 ! 1 ! pts:15 ! 1 | pts:15 ! 1 */
+ dts = (*buf++ & 0x0E) << 29;
+ dts |= *buf++ << 22;
+ dts |= (*buf++ & 0xFE) << 14;
+ dts |= *buf++ << 7;
+ dts |= (*buf++ & 0xFE) >> 1;
+
+ GST_DEBUG (0,"mpeg_demux::parse_packet: PTS = %llu, DTS = %llu\n", mpeg_demux->last_pts, dts);
+ headerlen += 10;
+ goto done;
+ case 0x00:
+ GST_DEBUG (0,"mpeg_demux::parse_packet: have no pts/dts\n");
+ GST_DEBUG (0,"mpeg_demux::parse_packet: got trailer bits %x\n", (bits & 0x0f));
+ if ((bits & 0x0f) != 0xf) {
+ GST_DEBUG (0,"mpeg_demux::parse_packet: not a valid packet time sequence\n");
+ return FALSE;
+ }
+ headerlen++;
+ default:
+ goto done;
+ }
+ default:
+ goto done;
+ }
+ } while (1);
+ GST_DEBUG (0,"mpeg_demux::parse_packet: done with header loop\n");
+
+done:
+ /* calculate the amount of real data in this packet */
+ datalen = packet_length - headerlen+2;
+ GST_DEBUG (0,"mpeg_demux::parse_packet: headerlen is %d, datalen is %d\n",
+ headerlen,datalen);
+
+ /* private_stream_1 */
+ if (id == 0xBD) {
+ /* first find the track code */
+ ps_id_code = *(basebuf + headerlen);
+ /* make sure it's valid */
+ if ((ps_id_code >= 0x80) && (ps_id_code <= 0x87)) {
+ GST_DEBUG (0,"mpeg_demux::parse_packet: 0x%02X: we have a private_stream_1 (AC3) packet, track %d\n",
+ id, ps_id_code - 0x80);
+ outpad = &mpeg_demux->private_1_pad[ps_id_code - 0x80];
+ /* scrap first 4 bytes (so-called "mystery AC3 tag") */
+ headerlen += 4;
+ datalen -= 4;
+ }
+ /* private_stream_1 */
+ } else if (id == 0xBF) {
+ GST_DEBUG (0,"mpeg_demux::parse_packet: 0x%02X: we have a private_stream_2 packet\n", id);
+ outpad = &mpeg_demux->private_2_pad;
+ /* audio */
+ } else if ((id >= 0xC0) && (id <= 0xDF)) {
+ GST_DEBUG (0,"mpeg_demux::parse_packet: 0x%02X: we have an audio packet\n", id);
+ outpad = &mpeg_demux->audio_pad[id & 0x1F];
+ outoffset = mpeg_demux->audio_offset[id & 0x1F];
+ mpeg_demux->audio_offset[id & 0x1F] += datalen;
+ /* video */
+ } else if ((id >= 0xE0) && (id <= 0xEF)) {
+ GST_DEBUG (0,"mpeg_demux::parse_packet: 0x%02X: we have a video packet\n", id);
+ outpad = &mpeg_demux->video_pad[id & 0x0F];
+ outoffset = mpeg_demux->video_offset[id & 0x1F];
+ mpeg_demux->video_offset[id & 0x1F] += datalen;
+ }
+
+ /* if we don't know what it is, bail */
+ if (outpad == NULL) {
+ GST_DEBUG (0,"mpeg_demux::parse_packet: unknown packet id 0x%02X !!\n", id);
+ /* return total number of bytes */
+ return FALSE;
+ }
+
+ /* FIXME, this should be done in parse_syshead */
+ if ((*outpad) == NULL) {
+ GST_DEBUG (0,"mpeg_demux::parse_packet: unexpected packet id 0x%02X!!\n", id);
+ /* return total number of bytes */
+ return FALSE;
+ }
+
+ /* create the buffer and send it off to the Other Side */
+ if (GST_PAD_CONNECTED(*outpad) && datalen > 0) {
+ /* if this is part of the buffer, create a subbuffer */
+ GST_DEBUG (0,"mpeg_demux::parse_packet: creating subbuffer len %d\n", datalen);
+
+ outbuf = gst_buffer_create_sub (buffer, headerlen+4, datalen);
+
+ GST_BUFFER_OFFSET (outbuf) = outoffset;
+ GST_BUFFER_TIMESTAMP (outbuf) = (mpeg_demux->last_pts * 100LL)/9LL;
+ GST_DEBUG (0,"mpeg_demux::parse_packet: pushing buffer of len %d id %d, ts %lld\n",
+ datalen, id, GST_BUFFER_TIMESTAMP (outbuf));
+ gst_pad_push ((*outpad),outbuf);
+ }
+
+ return TRUE;
+}
+
+static gboolean
+gst_mpeg_demux_parse_pes (GstMPEGParse *mpeg_parse, GstBuffer *buffer)
+{
+ GstMPEGDemux *mpeg_demux = GST_MPEG_DEMUX (mpeg_parse);
+ guint8 id;
+
+ guint16 packet_length;
+ guint8 header_data_length = 0;
+
+ guint16 datalen;
+ gulong outoffset = 0; /* wrong XXX FIXME */
+ guint16 headerlen;
+ guint8 ps_id_code = 0x80;
+
+ GstPad **outpad = NULL;
+ GstBuffer *outbuf;
+ GstPadTemplate *newtemp = NULL;
+ guint8 *buf, *basebuf;
+
+ GST_DEBUG (0,"mpeg_demux: in parse_pes\n");
+
+ basebuf = buf = GST_BUFFER_DATA (buffer);
+ id = *(buf+3);
+ buf += 4;
+
+ /* start parsing */
+ packet_length = GUINT16_FROM_BE (*((guint16 *)buf));
+
+ GST_DEBUG (0,"mpeg_demux: got packet_length %d\n", packet_length);
+ buf += 2;
+
+ /* we don't operate on: program_stream_map, padding_stream, */
+ /* private_stream_2, ECM, EMM, or program_stream_directory */
+ if ((id != 0xBC) && (id != 0xBE) && (id != 0xBF) && (id != 0xF0) &&
+ (id != 0xF1) && (id != 0xFF))
+ {
+ guchar flags1 = *buf++;
+ guchar flags2 = *buf++;
+
+ if ((flags1 & 0xC0) != 0x80) {
+ return FALSE;
+ }
+
+ header_data_length = *buf++;
+
+ GST_DEBUG (0,"mpeg_demux: header_data_length is %d\n",header_data_length);
+
+ /* check for PTS */
+ if ((flags2 & 0x80)) {
+ /*if ((flags2 & 0x80) && id == 0xe0) { */
+ mpeg_demux->last_pts = (*buf++ & 0x0E) << 29;
+ mpeg_demux->last_pts |= *buf++ << 22;
+ mpeg_demux->last_pts |= (*buf++ & 0xFE) << 14;
+ mpeg_demux->last_pts |= *buf++ << 7;
+ mpeg_demux->last_pts |= (*buf++ & 0xFE) >> 1;
+ GST_DEBUG (0, "mpeg_demux::parse_packet: %x PTS = %llu\n", id, (mpeg_demux->last_pts*1000000LL)/90000LL);
+ }
+ if ((flags2 & 0x40)) {
+ GST_DEBUG (0, "mpeg_demux::parse_packet: %x DTS foundu\n", id);
+ buf += 5;
+ }
+ if ((flags2 & 0x20)) {
+ GST_DEBUG (0, "mpeg_demux::parse_packet: %x ESCR foundu\n", id);
+ buf += 6;
+ }
+ if ((flags2 & 0x10)) {
+ guint32 es_rate;
+
+ es_rate = (*buf++ & 0x07) << 14;
+ es_rate |= (*buf++ ) << 7;
+ es_rate |= (*buf++ & 0xFE) >> 1;
+ GST_DEBUG (0, "mpeg_demux::parse_packet: %x ES Rate foundu\n", id);
+ }
+ /* FIXME: lots of PES parsing missing here... */
+
+ }
+
+ /* calculate the amount of real data in this PES packet */
+ /* constant is 2 bytes packet_length, 2 bytes of bits, 1 byte header len */
+ headerlen = 5 + header_data_length;
+ /* constant is 2 bytes of bits, 1 byte header len */
+ datalen = packet_length - (3 + header_data_length);
+ GST_DEBUG (0,"mpeg_demux: headerlen is %d, datalen is %d\n",
+ headerlen, datalen);
+
+ /* private_stream_1 */
+ if (id == 0xBD) {
+ /* first find the track code */
+ ps_id_code = *(basebuf + headerlen + 4);
+ /* make sure it's valid */
+ if ((ps_id_code >= 0x80) && (ps_id_code <= 0x87)) {
+ GST_DEBUG (0,"mpeg_demux: we have a private_stream_1 (AC3) packet, track %d\n",
+ ps_id_code - 0x80);
+ outpad = &mpeg_demux->private_1_pad[ps_id_code - 0x80];
+ /* scrap first 4 bytes (so-called "mystery AC3 tag") */
+ headerlen += 4;
+ datalen -= 4;
+ outoffset = mpeg_demux->private_1_offset[ps_id_code - 0x80];
+ mpeg_demux->private_1_offset[ps_id_code - 0x80] += datalen;
+ }
+ else if ((ps_id_code >= 0x20) && (ps_id_code <= 0x2f)) {
+ GST_DEBUG (0,"mpeg_demux: we have a subtitle_stream packet, track %d\n",
+ ps_id_code - 0x20);
+ outpad = &mpeg_demux->subtitle_pad[ps_id_code - 0x20];
+ headerlen += 1;
+ datalen -= 1;
+ outoffset = mpeg_demux->subtitle_offset[ps_id_code - 0x20];
+ mpeg_demux->subtitle_offset[ps_id_code - 0x20] += datalen;
+ }
+ /* private_stream_1 */
+ } else if (id == 0xBF) {
+ GST_DEBUG (0,"mpeg_demux: we have a private_stream_2 packet\n");
+ outpad = &mpeg_demux->private_2_pad;
+ outoffset = mpeg_demux->private_2_offset;
+ mpeg_demux->private_2_offset += datalen;
+ /* audio */
+ } else if ((id >= 0xC0) && (id <= 0xDF)) {
+ GST_DEBUG (0,"mpeg_demux: we have an audio packet\n");
+ outpad = &mpeg_demux->audio_pad[id - 0xC0];
+ outoffset = mpeg_demux->audio_offset[id & 0x1F];
+ mpeg_demux->audio_offset[id & 0x1F] += datalen;
+ /* video */
+ } else if ((id >= 0xE0) && (id <= 0xEF)) {
+ GST_DEBUG (0,"mpeg_demux: we have a video packet\n");
+ outpad = &mpeg_demux->video_pad[id - 0xE0];
+ outoffset = mpeg_demux->video_offset[id & 0x0F];
+ mpeg_demux->video_offset[id & 0x0F] += datalen;
+ }
+
+ /* if we don't know what it is, bail */
+ if (outpad == NULL)
+ return TRUE;
+
+ /* create the pad and add it if we don't already have one. */
+ /* this should trigger the NEW_PAD signal, which should be caught by */
+ /* the app and used to attach to desired streams. */
+ if ((*outpad) == NULL) {
+ gchar *name = NULL;
+
+ /* we have to name the stream approriately */
+ if (id == 0xBD) {
+ if (ps_id_code >= 0x80 && ps_id_code <= 0x87) {
+ name = g_strdup_printf("private_stream_1.%d",ps_id_code - 0x80);
+ newtemp = GST_PADTEMPLATE_GET (private1_factory);
+ }
+ else if (ps_id_code >= 0x20 && ps_id_code <= 0x2f) {
+ name = g_strdup_printf("subtitle_stream_%d",ps_id_code - 0x20);
+ newtemp = GST_PADTEMPLATE_GET (subtitle_factory);
+ }
+ else {
+ name = g_strdup_printf("unknown_stream_%d",ps_id_code);
+ }
+ }
+ else if (id == 0xBF) {
+ name = g_strdup ("private_stream_2");
+ newtemp = GST_PADTEMPLATE_GET (private2_factory);
+ }
+ else if ((id >= 0xC0) && (id <= 0xDF)) {
+ name = g_strdup_printf("audio_%02d",id - 0xC0);
+ newtemp = GST_PADTEMPLATE_GET (audio_factory);
+ }
+ else if ((id >= 0xE0) && (id <= 0xEF)) {
+ name = g_strdup_printf("video_%02d",id - 0xE0);
+ newtemp = GST_PADTEMPLATE_GET (video_mpeg2_factory);
+ }
+ else {
+ name = g_strdup_printf("unknown");
+ }
+
+ if (newtemp) {
+ /* create the pad and add it to self */
+ (*outpad) = gst_pad_new_from_template (newtemp, name);
+ gst_pad_set_caps((*outpad), gst_pad_get_padtemplate_caps (*outpad));
+ gst_element_add_pad(GST_ELEMENT(mpeg_demux),(*outpad));
+ }
+ else {
+ g_warning ("mpeg_demux: cannot create pad %s, no template for %02x", name, id);
+ }
+ if (name)
+ g_free (name);
+ }
+
+ /* create the buffer and send it off to the Other Side */
+ if (GST_PAD_CONNECTED(*outpad)) {
+ /* if this is part of the buffer, create a subbuffer */
+ GST_DEBUG (0,"mpeg_demux: creating subbuffer len %d\n", datalen);
+
+ outbuf = gst_buffer_create_sub (buffer, headerlen+4, datalen);
+ GST_BUFFER_OFFSET(outbuf) = outoffset;
+ GST_BUFFER_TIMESTAMP(outbuf) = (mpeg_demux->last_pts*100LL)/9LL;
+
+ gst_pad_push((*outpad),outbuf);
+ }
+
+ /* return total number of bytes */
+ return TRUE;
+}
+
+static void
+_queue_discontinuous (GstPad *pad, gpointer ign)
+{
+ /* ugh, GstBuffer cast is wrong here */
+ gst_pad_push (pad, (GstBuffer*) gst_event_new (GST_EVENT_DISCONTINUOUS));
+}
+
+static void
+_queue_eos (GstPad *pad, gpointer ign)
+{
+ /* ugh, GstBuffer cast is wrong here */
+ gst_pad_push (pad, (GstBuffer*) gst_event_new (GST_EVENT_EOS));
+}
+
+static void
+_forall_pads (GstMPEGDemux *mpeg_demux, GFunc fun, gpointer user_data)
+{
+ GstPad *pad;
+ gint i;
+
+ /* events should be refcnt'd XXX */
+
+ for (i=0;i<NUM_PRIVATE_1_PADS;i++)
+ {
+ pad = mpeg_demux->private_1_pad[i];
+ if (pad && GST_PAD_CONNECTED(pad))
+ (*fun) (pad, user_data);
+ }
+ for (i=0;i<NUM_SUBTITLE_PADS;i++)
+ {
+ pad = mpeg_demux->subtitle_pad[i];
+ if (pad && GST_PAD_CONNECTED(pad))
+ (*fun) (pad, user_data);
+ }
+
+ pad = mpeg_demux->private_2_pad;
+ if (pad && GST_PAD_CONNECTED(pad))
+ (*fun) (pad, user_data);
+
+ for (i=0;i<NUM_VIDEO_PADS;i++)
+ {
+ pad = mpeg_demux->video_pad[i];
+ if (pad && GST_PAD_CONNECTED(pad))
+ (*fun) (pad, user_data);
+ }
+ for (i=0;i<NUM_AUDIO_PADS;i++)
+ {
+ pad = mpeg_demux->audio_pad[i];
+ if (pad && GST_PAD_CONNECTED(pad))
+ (*fun) (pad, user_data);
+ }
+}
+
+gboolean
+gst_mpeg_demux_plugin_init (GModule *module, GstPlugin *plugin)
+{
+ GstElementFactory *factory;
+
+ /* this filter needs the bytestream package */
+ if (!gst_library_load ("gstbytestream")) {
+ gst_info ("mpeg_demux:: could not load support library: 'gstbytestream'\n");
+ return FALSE;
+ }
+
+ /* create an elementfactory for the mpeg_demux element */
+ factory = gst_elementfactory_new ("mpegdemux", GST_TYPE_MPEG_DEMUX,
+ &mpeg_demux_details);
+ g_return_val_if_fail (factory != NULL, FALSE);
+
+ gst_elementfactory_add_padtemplate (factory, GST_PADTEMPLATE_GET (sink_factory));
+ gst_elementfactory_add_padtemplate (factory, GST_PADTEMPLATE_GET (audio_factory));
+ gst_elementfactory_add_padtemplate (factory, GST_PADTEMPLATE_GET (video_mpeg1_factory));
+ gst_elementfactory_add_padtemplate (factory, GST_PADTEMPLATE_GET (video_mpeg2_factory));
+ gst_elementfactory_add_padtemplate (factory, GST_PADTEMPLATE_GET (private1_factory));
+ gst_elementfactory_add_padtemplate (factory, GST_PADTEMPLATE_GET (private2_factory));
+ gst_elementfactory_add_padtemplate (factory, GST_PADTEMPLATE_GET (subtitle_factory));
+
+ gst_plugin_add_feature (plugin, GST_PLUGIN_FEATURE (factory));
+
+ return TRUE;
+}
--- /dev/null
+/* Gnome-Streamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+
+#ifndef __MPEG_DEMUX_H__
+#define __MPEG_DEMUX_H__
+
+
+#include <config.h>
+#include <gst/gst.h>
+#include "gstmpegparse.h"
+
+
+#ifdef __cplusplus
+extern "C" {
+#endif /* __cplusplus */
+
+
+#define GST_TYPE_MPEG_DEMUX \
+ (mpeg_demux_get_type())
+#define GST_MPEG_DEMUX(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_MPEG_DEMUX,GstMPEGDemux))
+#define GST_MPEG_DEMUX_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_MPEG_DEMUX,GstMPEGDemux))
+#define GST_IS_MPEG_DEMUX(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_MPEG_DEMUX))
+#define GST_IS_MPEG_DEMUX_CLASS(obj) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_MPEG_DEMUX))
+
+typedef struct _GstMPEGDemux GstMPEGDemux;
+typedef struct _GstMPEGDemuxClass GstMPEGDemuxClass;
+
+typedef struct _MPEG1Stream MPEG1Stream;
+
+struct _MPEG1Stream {
+ guchar stream_id;
+ gint8 STD_buffer_bound_scale;
+ gint16 STD_buffer_size_bound;
+};
+
+struct _GstMPEGDemux {
+ GstMPEGParse parent;
+
+ /* current parse state */
+ guchar id;
+
+ /* previous partial chunk and bytes remaining in it */
+ gboolean in_flush;
+
+ /* counters */
+ gulong packs;
+
+ /* pack header values */
+ gboolean have_packhead;
+ guint64 scr_base;
+ guint16 scr_extension;
+ guint32 bit_rate;
+
+ /* program stream header values */
+ gboolean have_syshead;
+ guint16 header_length;
+ guint32 rate_bound;
+ guint8 audio_bound;
+ gboolean fixed;
+ gboolean constrained;
+ gboolean audio_lock;
+ gboolean video_lock;
+ guint8 video_bound;
+ gboolean packet_rate_restriction;
+ struct _MPEG1Stream STD_buffer_info[48];
+
+ guint64 last_pts;
+
+#define NUM_PRIVATE_1_PADS 8
+#define NUM_SUBTITLE_PADS 16
+#define NUM_VIDEO_PADS 16
+#define NUM_AUDIO_PADS 32
+
+ /* stream output pads */
+ GstPad *private_1_pad[NUM_PRIVATE_1_PADS]; /* up to 8 ac3 audio tracks */
+ gulong private_1_offset[NUM_PRIVATE_1_PADS];
+
+ GstPad *subtitle_pad[NUM_SUBTITLE_PADS];
+ gulong subtitle_offset[NUM_SUBTITLE_PADS];
+
+ GstPad *private_2_pad;
+ gulong private_2_offset;
+
+ GstPad *video_pad[NUM_VIDEO_PADS];
+ gulong video_offset[NUM_VIDEO_PADS];
+
+ GstPad *audio_pad[NUM_AUDIO_PADS];
+ gulong audio_offset[NUM_AUDIO_PADS];
+
+};
+
+struct _GstMPEGDemuxClass {
+ GstMPEGParseClass parent_class;
+};
+
+GType gst_mpeg_demux_get_type(void);
+
+gboolean gst_mpeg_demux_plugin_init (GModule *module, GstPlugin *plugin);
+
+#ifdef __cplusplus
+}
+#endif /* __cplusplus */
+
+
+#endif /* __MPEG_DEMUX_H__ */
--- /dev/null
+/* Gnome-Streamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+
+/*#define GST_DEBUG_ENABLED */
+#include <gstmpegpacketize.h>
+
+GstMPEGPacketize*
+gst_mpeg_packetize_new (GstByteStream *bs)
+{
+ GstMPEGPacketize *new;
+
+ g_return_val_if_fail (bs != NULL, NULL);
+
+ new = g_malloc (sizeof (GstMPEGPacketize));
+
+ new->id = 0;
+ new->bs = bs;
+ new->MPEG2 = FALSE;
+
+ return new;
+}
+
+void
+gst_mpeg_packetize_destroy (GstMPEGPacketize *packetize)
+{
+ g_return_if_fail (packetize != NULL);
+
+ g_free (packetize);
+}
+
+static GstData*
+parse_packhead (GstMPEGPacketize * packetize)
+{
+ gint length = 8 + 4;
+ guint8 *buf;
+ GstBuffer *outbuf;
+
+ GST_DEBUG (0, "packetize: in parse_packhead\n");
+
+ buf = gst_bytestream_peek_bytes (packetize->bs, length);
+ if (!buf) return NULL;
+ buf += 4;
+
+ GST_DEBUG (0, "code %02x\n", *buf);
+
+ /* start parsing the stream */
+ if ((*buf & 0xf0) == 0x40) {
+ GST_DEBUG (0, "packetize::parse_packhead setting mpeg2\n");
+ packetize->MPEG2 = TRUE;
+ length += 2;
+ buf = gst_bytestream_peek_bytes (packetize->bs, length);
+ if (!buf) return NULL;
+ }
+ else {
+ GST_DEBUG (0, "packetize::parse_packhead setting mpeg1\n");
+ packetize->MPEG2 = FALSE;
+ }
+
+ outbuf = gst_bytestream_read (packetize->bs, length);
+ if (!outbuf) return NULL;
+
+ return GST_DATA (outbuf);
+}
+
+static inline GstData*
+parse_generic (GstMPEGPacketize *packetize)
+{
+ guint16 length;
+ GstByteStream *bs = packetize->bs;
+ guchar *buf;
+ GstBuffer *outbuf;
+
+ GST_DEBUG (0, "packetize: in parse_syshead\n");
+
+ buf = gst_bytestream_peek_bytes (bs, 2 + 4);
+ if (!buf) return NULL;
+ buf += 4;
+
+ length = GUINT16_FROM_BE (*(guint16 *) buf);
+ GST_DEBUG (0, "packetize: header_length %d\n", length);
+
+ outbuf = gst_bytestream_read (packetize->bs, 2 + length + 4);
+ if (!outbuf) return NULL;
+
+ return GST_DATA (outbuf);
+}
+
+/* FIXME mmx-ify me */
+static inline gboolean
+find_start_code (GstMPEGPacketize *packetize)
+{
+ GstByteStream *bs = packetize->bs;
+ guchar *buf;
+ gint offset;
+ guint32 code;
+ const gint chunksize = 4096;
+
+ buf = gst_bytestream_peek_bytes (bs, chunksize);
+ if (!buf)
+ return FALSE;
+ offset = 4;
+
+ code = GUINT32_FROM_BE (*((guint32 *)buf));
+
+ GST_DEBUG (0, "code = %08x\n", code);
+
+ while ((code & 0xffffff00) != 0x100L) {
+ code = (code << 8) | buf[offset++];
+
+ GST_DEBUG (0, " code = %08x\n", code);
+ /* g_print (" code = %08x\n", code); */
+
+ if (offset == chunksize) {
+ if (!gst_bytestream_flush (bs, offset))
+ return FALSE;
+ buf = gst_bytestream_peek_bytes (bs, chunksize);
+ if (!buf)
+ return FALSE;
+ offset = 0;
+ }
+ }
+ packetize->id = code & 0xff;
+ if (offset > 4) {
+ if (!gst_bytestream_flush (bs, offset - 4))
+ return FALSE;
+ }
+ return TRUE;
+}
+
+GstData*
+gst_mpeg_packetize_read (GstMPEGPacketize *packetize)
+{
+ gboolean got_event = FALSE;
+ GstData *outbuf = NULL;
+
+ while (outbuf == NULL) {
+ if (!find_start_code (packetize))
+ got_event = TRUE;
+ else {
+ GST_DEBUG (0, "packetize: have chunk 0x%02X\n",packetize->id);
+ switch (packetize->id) {
+ case 0xBA:
+ outbuf = parse_packhead (packetize);
+ if (!outbuf)
+ got_event = TRUE;
+ break;
+ case 0xBB:
+ outbuf = parse_generic (packetize);
+ if (!outbuf)
+ got_event = TRUE;
+ break;
+ default:
+ if (packetize->MPEG2 && ((packetize->id < 0xBD) || (packetize->id > 0xFE))) {
+ g_warning ("packetize: ******** unknown id 0x%02X",packetize->id);
+ }
+ else {
+ outbuf = parse_generic (packetize);
+ if (!outbuf)
+ got_event = TRUE;
+ }
+ }
+ }
+
+ if (got_event) {
+ guint32 remaining;
+ GstEvent *event;
+ gint etype;
+
+ gst_bytestream_get_status (packetize->bs, &remaining, &event);
+ etype = event? GST_EVENT_TYPE (event) : GST_EVENT_EOS;
+
+ switch (etype) {
+ case GST_EVENT_DISCONTINUOUS:
+ gst_bytestream_flush_fast (packetize->bs, remaining);
+ break;
+ }
+
+ return GST_DATA (event);
+ }
+ }
+
+ return outbuf;
+}
--- /dev/null
+/* Gnome-Streamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+
+#ifndef __MPEGPACKETIZE_H__
+#define __MPEGPACKETIZE_H__
+
+
+#include <config.h>
+#include <gst/gst.h>
+#include <libs/bytestream/gstbytestream.h>
+
+
+#ifdef __cplusplus
+extern "C" {
+#endif /* __cplusplus */
+
+
+typedef struct _GstMPEGPacketize GstMPEGPacketize;
+
+#define GST_MPEG_PACKETIZE_ID(pack) ((pack)->id)
+#define GST_MPEG_PACKETIZE_IS_MPEG2(pack) ((pack)->MPEG2)
+
+
+struct _GstMPEGPacketize {
+ /* current parse state */
+ guchar id;
+
+ GstByteStream *bs;
+
+ gboolean MPEG2;
+};
+
+GstMPEGPacketize* gst_mpeg_packetize_new (GstByteStream *bs);
+void gst_mpeg_packetize_destroy (GstMPEGPacketize *packetize);
+
+GstData* gst_mpeg_packetize_read (GstMPEGPacketize *packetize);
+
+#ifdef __cplusplus
+}
+#endif /* __cplusplus */
+
+
+#endif /* __MPEGPACKETIZE_H__ */
--- /dev/null
+/* Gnome-Streamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+
+/*#define GST_DEBUG_ENABLED*/
+#include "gstmpegparse.h"
+
+/* elementfactory information */
+static GstElementDetails mpeg_parse_details = {
+ "MPEG System Parser",
+ "Filter/Parser/System",
+ "Demultiplexes MPEG1 and MPEG2 System Streams",
+ VERSION,
+ "Erik Walthinsen <omega@cse.ogi.edu>\n"
+ "Wim Taymans <wim.taymans@chello.be>",
+ "(C) 1999",
+};
+
+#define CLASS(o) GST_MPEG_PARSE_CLASS (G_OBJECT_GET_CLASS (o))
+
+/* GstMPEGParse signals and args */
+enum {
+ /* FILL ME */
+ LAST_SIGNAL
+};
+
+enum {
+ ARG_0,
+ ARG_BIT_RATE,
+ ARG_MPEG2,
+ /* FILL ME */
+};
+
+typedef enum {
+ PARSE_STATE_ERROR = 0,
+ PARSE_STATE_OK = 1,
+ PARSE_STATE_EVENT = 2,
+} GstMPEGParseState;
+
+GST_PADTEMPLATE_FACTORY (sink_factory,
+ "sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_CAPS_NEW (
+ "mpeg_parse_sink",
+ "video/mpeg",
+ "mpegversion", GST_PROPS_INT_RANGE (1, 2),
+ "systemstream", GST_PROPS_BOOLEAN (TRUE)
+ )
+);
+
+GST_PADTEMPLATE_FACTORY (src_factory,
+ "src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_CAPS_NEW (
+ "mpeg_parse_src",
+ "video/mpeg",
+ "mpegversion", GST_PROPS_INT_RANGE (1, 2),
+ "systemstream", GST_PROPS_BOOLEAN (TRUE),
+ "parsed", GST_PROPS_BOOLEAN (TRUE)
+ )
+);
+
+static void gst_mpeg_parse_class_init (GstMPEGParseClass *klass);
+static void gst_mpeg_parse_init (GstMPEGParse *mpeg_parse);
+
+static gboolean gst_mpeg_parse_parse_packhead (GstMPEGParse *mpeg_parse, GstBuffer *buffer);
+static void gst_mpeg_parse_send_data (GstMPEGParse *mpeg_parse, GstData *data);
+
+static void gst_mpeg_parse_loop (GstElement *element);
+
+static void gst_mpeg_parse_get_property (GObject *object, guint prop_id,
+ GValue *value, GParamSpec *pspec);
+
+static GstElementClass *parent_class = NULL;
+/*static guint gst_mpeg_parse_signals[LAST_SIGNAL] = { 0 };*/
+
+GType
+gst_mpeg_parse_get_type (void)
+{
+ static GType mpeg_parse_type = 0;
+
+ if (!mpeg_parse_type) {
+ static const GTypeInfo mpeg_parse_info = {
+ sizeof(GstMPEGParseClass), NULL,
+ NULL,
+ (GClassInitFunc)gst_mpeg_parse_class_init,
+ NULL,
+ NULL,
+ sizeof(GstMPEGParse),
+ 0,
+ (GInstanceInitFunc)gst_mpeg_parse_init,
+ };
+ mpeg_parse_type = g_type_register_static(GST_TYPE_ELEMENT, "GstMPEGParse", &mpeg_parse_info, 0);
+ }
+ return mpeg_parse_type;
+}
+
+static void
+gst_mpeg_parse_class_init (GstMPEGParseClass *klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+
+ gobject_class = (GObjectClass*)klass;
+ gstelement_class = (GstElementClass*)klass;
+
+ g_object_class_install_property(G_OBJECT_CLASS(klass), ARG_BIT_RATE,
+ g_param_spec_uint("bit_rate","bit_rate","bit_rate",
+ 0, G_MAXUINT, 0, G_PARAM_READABLE));
+ g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_MPEG2,
+ g_param_spec_boolean ("mpeg2", "mpeg2", "is this an mpeg2 stream",
+ FALSE, G_PARAM_READABLE));
+
+ gobject_class->get_property = gst_mpeg_parse_get_property;
+
+ klass->parse_packhead = gst_mpeg_parse_parse_packhead;
+ klass->parse_syshead = NULL;
+ klass->parse_packet = NULL;
+ klass->parse_pes = NULL;
+ klass->send_data = gst_mpeg_parse_send_data;
+
+ parent_class = g_type_class_ref(GST_TYPE_ELEMENT);
+}
+
+static void
+gst_mpeg_parse_init (GstMPEGParse *mpeg_parse)
+{
+ mpeg_parse->sinkpad = gst_pad_new_from_template(
+ GST_PADTEMPLATE_GET (sink_factory), "sink");
+ gst_element_add_pad(GST_ELEMENT(mpeg_parse),mpeg_parse->sinkpad);
+ gst_element_set_loop_function (GST_ELEMENT (mpeg_parse), gst_mpeg_parse_loop);
+ mpeg_parse->srcpad = gst_pad_new_from_template(
+ GST_PADTEMPLATE_GET (src_factory), "src");
+ gst_element_add_pad(GST_ELEMENT(mpeg_parse),mpeg_parse->srcpad);
+
+ /* initialize parser state */
+ mpeg_parse->bs = NULL;
+ mpeg_parse->packetize = NULL;
+ mpeg_parse->next_ts = 0;
+
+ /* zero counters (should be done at RUNNING?) */
+ mpeg_parse->bit_rate = 0;
+
+ GST_FLAG_SET (mpeg_parse, GST_ELEMENT_EVENT_AWARE);
+}
+
+static void
+gst_mpeg_parse_send_data (GstMPEGParse *mpeg_parse, GstData *data)
+{
+ if (GST_IS_EVENT (data)) {
+ gst_pad_push (mpeg_parse->srcpad, GST_BUFFER (data));
+ }
+ else {
+ guint64 size = GST_BUFFER_SIZE (data);
+
+ GST_BUFFER_TIMESTAMP (data) = mpeg_parse->next_ts;
+ gst_pad_push (mpeg_parse->srcpad, GST_BUFFER (data));
+ mpeg_parse->next_ts += ((size * 1000000.0) / (mpeg_parse->bit_rate));
+ GST_DEBUG (0, "mpeg_parse: next_ts %lld\n", mpeg_parse->next_ts);
+
+ }
+}
+
+static gboolean
+gst_mpeg_parse_parse_packhead (GstMPEGParse *mpeg_parse, GstBuffer *buffer)
+{
+ guint8 *buf;
+ guint64 scr;
+ guint32 scr1, scr2;
+
+ GST_DEBUG (0, "mpeg_parse: in parse_packhead\n");
+
+ buf = GST_BUFFER_DATA (buffer);
+ buf += 4;
+
+ scr1 = GUINT32_FROM_BE (*(guint32*) buf);
+ scr2 = GUINT32_FROM_BE (*(guint32*) (buf+4));
+
+ if (GST_MPEG_PACKETIZE_IS_MPEG2 (mpeg_parse->packetize)) {
+
+ /* :2=01 ! scr:3 ! marker:1==1 ! scr:15 ! marker:1==1 ! scr:15 */
+ scr = (scr1 & 0x38000000) << 3;
+ scr |= (scr1 & 0x03fff800) << 4;
+ scr |= (scr1 & 0x000003ff) << 5;
+ scr |= (scr2 & 0xf8000000) >> 27;
+
+ buf += 6;
+ mpeg_parse->bit_rate = (GUINT32_FROM_BE ((*(guint32 *) buf)) & 0xfffffc00) >> 10;
+ }
+ else {
+ scr = (scr1 & 0x0e000000) << 5;
+ scr |= (scr1 & 0x00fffe00) << 6;
+ scr |= (scr1 & 0x000000ff) << 7;
+ scr |= (scr2 & 0xfe000000) >> 25;
+
+ buf += 4;
+ mpeg_parse->bit_rate = (GUINT32_FROM_BE ((*(guint32 *) buf)) & 0x7ffffe00) >> 9;
+ }
+
+ GST_DEBUG (0, "mpeg_parse: SCR is %llu\n", scr);
+ mpeg_parse->next_ts = (scr*100)/9;
+ mpeg_parse->bit_rate *= 50;
+
+ GST_DEBUG (0, "mpeg_parse: stream is %1.3fMB/sec\n",
+ (mpeg_parse->bit_rate) / 1000000.0);
+
+ return TRUE;
+}
+
+static void
+gst_mpeg_parse_loop (GstElement *element)
+{
+ GstMPEGParse *mpeg_parse = GST_MPEG_PARSE (element);
+
+ if (!mpeg_parse->bs) {
+ mpeg_parse->bs = gst_bytestream_new (mpeg_parse->sinkpad);
+ mpeg_parse->packetize = gst_mpeg_packetize_new (mpeg_parse->bs);
+ }
+
+ do {
+ GstData *data;
+ guint id;
+ gboolean mpeg2;
+
+ data = gst_mpeg_packetize_read (mpeg_parse->packetize);
+
+ id = GST_MPEG_PACKETIZE_ID (mpeg_parse->packetize);
+ mpeg2 = GST_MPEG_PACKETIZE_IS_MPEG2 (mpeg_parse->packetize);
+
+ if (GST_IS_BUFFER (data)) {
+ GstBuffer *buffer = GST_BUFFER (data);
+
+ GST_DEBUG (0, "mpeg2demux: have chunk 0x%02X\n", id);
+
+ switch (id) {
+ case 0xba:
+ if (CLASS (mpeg_parse)->parse_packhead)
+ CLASS (mpeg_parse)->parse_packhead (mpeg_parse, buffer);
+ break;
+ case 0xbb:
+ if (CLASS (mpeg_parse)->parse_syshead)
+ CLASS (mpeg_parse)->parse_syshead (mpeg_parse, buffer);
+ break;
+ default:
+ if (mpeg2 && ((id < 0xBD) || (id > 0xFE))) {
+ g_warning ("mpeg2demux: ******** unknown id 0x%02X", id);
+ }
+ else {
+ if (mpeg2) {
+ if (CLASS (mpeg_parse)->parse_pes)
+ CLASS (mpeg_parse)->parse_pes (mpeg_parse, buffer);
+ }
+ else {
+ if (CLASS (mpeg_parse)->parse_packet)
+ CLASS (mpeg_parse)->parse_packet (mpeg_parse, buffer);
+ }
+ }
+ }
+ }
+
+ if (CLASS (mpeg_parse)->send_data)
+ CLASS (mpeg_parse)->send_data (mpeg_parse, data);
+
+ } while (!GST_ELEMENT_IS_COTHREAD_STOPPING(element));
+}
+
+static void
+gst_mpeg_parse_get_property (GObject *object, guint prop_id, GValue *value, GParamSpec *pspec)
+{
+ GstMPEGParse *mpeg_parse;
+
+ /* it's not null if we got it, but it might not be ours */
+ mpeg_parse = GST_MPEG_PARSE(object);
+
+ switch (prop_id) {
+ case ARG_BIT_RATE:
+ g_value_set_uint (value, mpeg_parse->bit_rate);
+ break;
+ case ARG_MPEG2:
+ if (mpeg_parse->packetize)
+ g_value_set_boolean (value, GST_MPEG_PACKETIZE_IS_MPEG2 (mpeg_parse->packetize));
+ else
+ g_value_set_boolean (value, FALSE);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+
+gboolean
+gst_mpeg_parse_plugin_init (GModule *module, GstPlugin *plugin)
+{
+ GstElementFactory *factory;
+
+ /* this filter needs the bytestream package */
+ if (!gst_library_load("gstbytestream")) {
+ gst_info("mpeg_parse:: could not load support library: 'gstbytestream'\n");
+ return FALSE;
+ }
+
+ /* create an elementfactory for the mpeg_parse element */
+ factory = gst_elementfactory_new("mpegparse",GST_TYPE_MPEG_PARSE,
+ &mpeg_parse_details);
+ g_return_val_if_fail(factory != NULL, FALSE);
+
+ gst_elementfactory_add_padtemplate (factory, GST_PADTEMPLATE_GET (src_factory));
+ gst_elementfactory_add_padtemplate (factory, GST_PADTEMPLATE_GET (sink_factory));
+
+ gst_plugin_add_feature (plugin, GST_PLUGIN_FEATURE (factory));
+
+ return TRUE;
+}
--- /dev/null
+/* Gnome-Streamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+
+#ifndef __MPEG_PARSE_H__
+#define __MPEG_PARSE_H__
+
+
+#include <config.h>
+#include <gst/gst.h>
+#include <libs/bytestream/gstbytestream.h>
+#include "gstmpegpacketize.h"
+
+
+#ifdef __cplusplus
+extern "C" {
+#endif /* __cplusplus */
+
+
+#define GST_TYPE_MPEG_PARSE \
+ (gst_mpeg_parse_get_type())
+#define GST_MPEG_PARSE(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_MPEG_PARSE,GstMPEGParse))
+#define GST_MPEG_PARSE_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_MPEG_PARSE,GstMPEGParseClass))
+#define GST_IS_MPEG_PARSE(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_MPEG_PARSE))
+#define GST_IS_MPEG_PARSE_CLASS(obj) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_MPEG_PARSE))
+
+#define GST_MPEG_PARSE_IS_MPEG2(parse) (GST_MPEG_PACKETIZE_IS_MPEG2 (GST_MPEG_PARSE (parse)->packetize))
+
+typedef struct _GstMPEGParse GstMPEGParse;
+typedef struct _GstMPEGParseClass GstMPEGParseClass;
+
+struct _GstMPEGParse {
+ GstElement element;
+
+ GstPad *sinkpad, *srcpad;
+
+ GstByteStream *bs;
+ GstMPEGPacketize *packetize;
+
+ /* pack header values */
+ guint32 bit_rate;
+ guint64 next_ts;
+};
+
+struct _GstMPEGParseClass {
+ GstElementClass parent_class;
+
+ /* process packet types */
+ gboolean (*parse_packhead) (GstMPEGParse *parse, GstBuffer *buffer);
+ gboolean (*parse_syshead) (GstMPEGParse *parse, GstBuffer *buffer);
+ gboolean (*parse_packet) (GstMPEGParse *parse, GstBuffer *buffer);
+ gboolean (*parse_pes) (GstMPEGParse *parse, GstBuffer *buffer);
+
+ /* optional method to send out the data */
+ void (*send_data) (GstMPEGParse *parse, GstData *data);
+};
+
+GType gst_mpeg_parse_get_type(void);
+
+gboolean gst_mpeg_parse_plugin_init (GModule *module, GstPlugin *plugin);
+
+
+#ifdef __cplusplus
+}
+#endif /* __cplusplus */
+
+
+#endif /* __MPEG_PARSE_H__ */
--- /dev/null
+/* Gnome-Streamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+
+#include "gstmpegparse.h"
+#include "gstmpegdemux.h"
+
+static gboolean
+plugin_init (GModule *module, GstPlugin *plugin)
+{
+ gst_mpeg_parse_plugin_init (module, plugin);
+ gst_mpeg_demux_plugin_init (module, plugin);
+
+ return TRUE;
+}
+
+GstPluginDesc plugin_desc = {
+ GST_VERSION_MAJOR,
+ GST_VERSION_MINOR,
+ "mpegstream",
+ plugin_init
+};
--- /dev/null
+Basic parsing method
+====================
+
+In an MPEG-2 Program Stream, every chunk of data starts with at least 23
+zeros and a one. This is followed by a byte of ID. At any given point I
+can search for the next 3-byte string equal to 1 to find the next chunk.
+I assume these start codes are aligned on byte boundaries. I might be
+wrong, in which case this thing has to be rewritten at some point in the
+future.
+
+This means there are two basic modes of operation. The first is the
+simple search for the next start code. The second is a continuation mode,
+where data from the previous buffer is available to attempt to complete a
+chunk. Hopefully the majority of time will be spent in the first mode, as
+that is where the most efficiency is, since there's no copying of partial
+chunks.
+
+The parsing is done as a state machine, as long as there's data left in
+the buffer, something is attempted. What is attempted is based on the
+state of the parser (gee, so that's why they call it a state machine <g>).
+The stages are:
+
+1) looking for sync (have_sync == FALSE)
+ a) have some zeros (zeros > 0)
+2) getting ID (have_sync == TRUE, id == 0)
+3) decoding the chunk contents (have_sync == TRUE, id != 0)
+
+Mechanism for handling cross-buffer chunks of data
+==================================================
+
+The problem: if data were to come to the parser in 64-byte chunks, the
+pack head would be split across at least two buffers, possibly three. Up
+front I will make the assumption that no one will be sending buffers of
+less size than the largest chunk (header, PES packet), such that no chunk
+will be split across more than two buffers.
+
+If we take the pack header as an example, when the stream starts out, we
+can assume that the it starts at the beginning of the buffer and doesn't
+exceed the bounds of it. However, if we're mid-stream and starting
+another pack, it can be split across two buffers.
--- /dev/null
+Makefile
+Makefile.in
+*.o
+*.lo
+*.la
+.deps
+.libs
--- /dev/null
+filterdir = $(libdir)/gst
+
+filter_LTLIBRARIES = libgstsynaesthesia.la
+
+libgstsynaesthesia_la_SOURCES = core.c gstsynaesthesia.c
+
+noinst_HEADERS = core.h gstsynaesthesia.h
+
+EXTRA_DIST = README README-syna
--- /dev/null
+This is a nifty visualization based on synaesthesia-2.0 (see README-syna).
+I've librarified the program to a fair degree, but don't have all the
+output stuff working yet. I'll be looking at ALSAPlayer's modifications
+to synaesthesia to see what they did to merge it into another
+architecture. It shouldn't be too hard to get this working, I just
+haven't had a need yet.
--- /dev/null
+SYNAESTHESIA v2.0
+
+Introduction
+============
+
+This is a program for representing sounds visually from a CD or line
+input or piped from another program. It goes beyond the usual oscilliscope
+style program by combining an FFT and stereo positioning information to
+give a two dimensional display. Some of the shapes I have observed are:
+ * Drums: clouds of color, fairly high
+ * Clean guitar: several horizontal lines, low down
+ * Rough guitar: a cloud, low down
+ * Trumpet: Lots of horizontal lines everywhere
+ * Flute: A single horizontal line, low down
+ * Voice: A vertical line with some internal structure
+ * Synthesizer: All kinds of weird shapes!
+
+Synaesthesia can run in a window in X or full screen using SVGAlib.
+
+The display represents frequency as vertical position on screen,
+left-right position as left-right position on screen. It can also
+understand surround sound encoded music, and shows ambient noise
+in orange.
+
+X-Windows support was added in version 1.3, as well as a major redesign
+of the interface. You can use Synaesthesia as a fully functional
+CD player, suitable for use while working.
+
+There is command line support for play lists and piping from another
+program (such as an mp3 player).
+
+Usage
+=====
+
+Synaesthesia should work on Linux and BSD systems. (Note: I don't
+have access to a BSD system myself, I have to rely on patches -- if it
+doesn't work, please tell me!) LinuxPPC users may have to use the pipe
+mode rather than taking sound input from the CD player, as I believe
+sound recording is not yet implemented.
+
+Compile Synaesthesia by typing
+
+ make
+
+then install it by typing
+
+ make install
+
+This will create three versions of Synaesthesia:
+
+ synaesthesia - full screen SVGAlib version (Linux only)
+ xsynaesthesia - Version that runs as a window in X
+ sdlsynaesthesia - Version that uses the SDL graphics library
+
+If you want to use the SDL version, you need to get SDL from
+http://www.devolution.com/~slouken/SDL.
+
+You will need to run Synaesthesia as root to run it full screen
+with SVGAlib. Other varieties can be run by any user providing you
+provide permissions on /dev/dsp, /dev/cdrom, and /dev/mixer.
+
+Synaesthesia creates a configuration file, named ~/.synaesthesia,
+to store settings such as brightness, color, and window size, as
+well as which devices to use to control sound input.
+
+BSD users will have to edit this file to set the CD-ROM device name
+before using Synaesthesia in order to control the CD.
+
+Run Synaesthesia with no parameters for further information on how to
+use it.
+
+Notes for code rippers
+======================
+
+This program contains code that you may wish to use in your own projects.
+If you want to, please do. (For example, you might want to add some
+snazzy visual effects to your favourite MP3 or CD player)
+
+The actual code to do the mapping from sound to visual display is
+all in core.cpp, it should be fairly easy to disentangle from other
+parts of the program. It does make reference to some globals defined
+in syna.h, namely the #defines m (log2 of the sample size for each
+frame) and brightness, data (which stores the sound input), outputBmp,
+lastOutputBmp and lastLastOutputBmp (which hold the output), outWidth
+and outHeight (size of the bitmaps), and fadeMode, brightnessTwiddler,
+starSize and pointsAreDiamonds (various parameters affecting the display).
+
+The normal way to use it would be:
+
+ Call coreInit() to set up some look-up tables
+ Call setStarSize(starSize) to set up some more look-up tables
+ Loop
+ Put data into the data array
+ Call fade() to apply the fade/wave/heat effect to the output
+ Call coreGo() to add the next fragment of sound input to the output
+ Display contents of outputBmp to screen
+
+There is a simple anti-aliased polygon drawing engine in the file
+polygon.h. sound.cpp contains code for driving the CD. xlib.c and
+xlibwrap.cpp contain code for setting up a window under X (originally
+ripped from the Xaos fractal viewer program :-) ).
+
+Authors
+=======
+
+This program is free. If you like it, or have any suggestions, please
+send me (Paul Harrison) an email (pfh@yoyo.cc.monash.edu.au).
+
+Thanks to Asger Alstrup Nielsen for many great suggestions, and for
+writing optimized 32 bit loops for fading and drawing to screen.
+
+Thanks to Roger Knobbe for porting Synaesthesia to FreeBSD.
+
+Thanks to Ben Gertzfield and Martin Mitchell for some small fixes to the
+CD controlling code.
+
+Thanks to Simon Budig for an improvement to the X code.
+
+Changes
+=======
+
+1.1 - Added surround sound decoding.
+1.2 - Fixed a bug in the ioctl calls to /dev/dsp.
+1.3 - Asger Alstrup Nielsen's optimizations added.
+ Added X-Windows support.
+ More options, redesigned interface.
+1.4 - Bug fixes, including a great reduction in
+ "Sound: Recording overrun" warnings.
+ New command line options: play lists and piping.
+ Support for SDL.
+2.0 - Bug fixes: Fixed problem in xlib.c that caused occasional segfaults,
+ several endianness problems fixed.
+ New effects: Wave, heat, diamond shaped points.
+ Piping sound now longer requires the twiddle factor.
+ Yet another interface redesign.
+ Partial support for LinuxPPC (pipe mode only)
+
--- /dev/null
+/* Synaesthesia - program to display sound graphically
+ Copyright (C) 1997 Paul Francis Harrison
+
+ This program is free software; you can redistribute it and/or modify it
+ under the terms of the GNU General Public License as published by the
+ Free Software Foundation; either version 2 of the License, or (at your
+ option) any later version.
+
+ This program is distributed in the hope that it will be useful, but
+ WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ General Public License for more details.
+
+ You should have received a copy of the GNU General Public License along
+ with this program; if not, write to the Free Software Foundation, Inc.,
+ 675 Mass Ave, Cambridge, MA 02139, USA.
+
+ The author may be contacted at:
+ pfh@yoyo.cc.monash.edu.au
+ or
+ 27 Bond St., Mt. Waverley, 3149, Melbourne, Australia
+*/
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <math.h>
+#include <string.h>
+
+#include <core.h>
+
+inline int bitReverser(int i) {
+ int sum=0,j;
+ for(j=0;j<SYNA_BITS;j++) {
+ sum = (i&1)+sum*2;
+ i >>= 1;
+ }
+ return sum;
+}
+
+static void fft(struct syna_priv *sp,double *x,double *y) {
+ int n2 = SYNA_SIZE, n1;
+ int twoToTheK;
+ int i,j;
+ for(twoToTheK=1;twoToTheK<SYNA_SIZE;twoToTheK*=2) {
+ n1 = n2;
+ n2 /= 2;
+ for(j=0;j<n2;j++) {
+ double c = sp->cosTable[j*twoToTheK&(SYNA_SIZE-1)],
+ s = sp->negSinTable[j*twoToTheK&(SYNA_SIZE-1)];
+ for(i=j;i<SYNA_SIZE;i+=n1) {
+ int l = i+n2;
+ double xt = x[i] - x[l];
+ double yt = y[i] - y[l];
+ x[i] = (x[i] + x[l]);
+ y[i] = (y[i] + y[l]);
+ x[l] = xt*c - yt*s;
+ y[l] = xt*s + yt*c;
+ }
+ }
+ }
+}
+
+void coreInit(struct syna_priv *sp,int w,int h) {
+ gint i;
+
+ for(i=0;i<SYNA_SIZE;i++) {
+ sp->negSinTable[i] = -sin(3.141592*2.0/SYNA_SIZE*i);
+ sp->cosTable[i] = cos(3.141592*2.0/SYNA_SIZE*i);
+ sp->bitReverse[i] = bitReverser(i);
+ }
+
+ sp->outWidth = w;
+ sp->outHeight = h;
+
+ sp->output = g_malloc(w*h);
+ sp->lastOutput = g_malloc(w*h);
+ sp->lastLastOutput = g_malloc(w*h);
+ memset(sp->output,0,w*h);
+ memset(sp->lastOutput,0,w*h);
+ memset(sp->lastLastOutput,0,w*h);
+
+ sp->fadeMode = FADE_STARS;
+ sp->pointsAreDiamonds = TRUE;
+ sp->brightnessTwiddler = 0.33;
+ sp->starSize = 0.125;
+ sp->fgRedSlider = 0.0;
+ sp->fgGreenSlider = 0.5;
+ sp->bgRedSlider = 1.0;
+ sp->bgGreenSlider = 0.2;
+}
+
+void setStarSize(struct syna_priv *sp,gdouble size) {
+ gdouble fadeModeFudge = (sp->fadeMode == FADE_WAVE ? 0.4 :
+ (sp->fadeMode == FADE_FLAME ? 0.6 : 0.78));
+
+ gint factor;
+ gint i;
+ if (size > 0.0)
+ factor = (int)(exp(log(fadeModeFudge) / (size*8.0))*255);
+ else
+ factor = 0;
+
+ if (factor > 255) factor = 255;
+
+ for(i=0;i<256;i++)
+ sp->scaleDown[i] = i*factor>>8;
+
+ sp->maxStarRadius = 1;
+ for(i=255;i;i = sp->scaleDown[i])
+ sp->maxStarRadius++;
+}
+
+inline void addPixel(struct syna_priv *sp,int x,int y,int br1,int br2)
+{
+ unsigned char *p;
+
+ if (x < 0 || x >= sp->outWidth || y < 0 || y >= sp->outHeight) return;
+
+ p = sp->output+x*2+y*sp->outWidth*2;
+ if (p[0] < 255-br1) p[0] += br1; else p[0] = 255;
+ if (p[1] < 255-br2) p[1] += br2; else p[1] = 255;
+}
+
+inline void addPixelFast(unsigned char *p,int br1,int br2) {
+ if (p[0] < 255-br1) p[0] += br1; else p[0] = 255;
+ if (p[1] < 255-br2) p[1] += br2; else p[1] = 255;
+}
+
+void fadeFade(struct syna_priv *sp) {
+ register unsigned long *ptr = (unsigned long*)sp->output;
+ int i = sp->outWidth*sp->outHeight*2/4;
+ do {
+ if (*ptr)
+ *(ptr++) -= ((*ptr & 0xf0f0f0f0ul) >> 4) + ((*ptr & 0xe0e0e0e0ul) >> 5);
+ else
+ ptr++;
+ } while(--i > 0);
+}
+
+inline unsigned char getPixel(struct syna_priv *sp,int x,int y,int where) {
+ if (x < 0 || y < 0 || x >= sp->outWidth || y >= sp->outHeight) return 0;
+ return sp->lastOutput[where];
+}
+
+inline void fadePixelWave(struct syna_priv *sp,int x,int y,int where,int step) {
+ short j =
+ (short)((getPixel(sp,x-1,y,where-2)+
+ getPixel(sp,x+1,y,where+2)+
+ getPixel(sp,x,y-1,where-step)+
+ getPixel(sp,x,y+1,where+step)) >> 2)
+ + sp->lastOutput[where];
+ if (!j) { sp->output[where] = 0; return; }
+ j = j - sp->lastLastOutput[where] - 1;
+ if (j < 0) sp->output[where] = 0;
+ else if (j & (255*256)) sp->output[where] = 255;
+ else sp->output[where] = j;
+}
+
+void fadeWave(struct syna_priv *sp) {
+ int x,y,i,j,start,end;
+ int step = sp->outWidth*2;
+ unsigned char *t = sp->lastLastOutput;
+ sp->lastLastOutput = sp->lastOutput;
+ sp->lastOutput = sp->output;
+ sp->output = t;
+
+ for(x=0,i=0,j=sp->outWidth*(sp->outHeight-1)*2;x<sp->outWidth;x++,i+=2,j+=2) {
+ fadePixelWave(sp,x,0,i,step);
+ fadePixelWave(sp,x,0,i+1,step);
+ fadePixelWave(sp,x,sp->outHeight-1,j,step);
+ fadePixelWave(sp,x,sp->outHeight-1,j+1,step);
+ }
+
+ for(y=1,i=sp->outWidth*2,j=sp->outWidth*4-2;y<sp->outHeight;y++,i+=step,j+=step) {
+ fadePixelWave(sp,0,y,i,step);
+ fadePixelWave(sp,0,y,i+1,step);
+ fadePixelWave(sp,sp->outWidth-1,y,j,step);
+ fadePixelWave(sp,sp->outWidth-1,y,j+1,step);
+ }
+
+ for(y=1,
+ start=sp->outWidth*2+2,
+ end=sp->outWidth*4-2; y<sp->outHeight-1; y++,start+=step,end+=step) {
+ int i = start;
+ do {
+ short j =
+ (short)((sp->lastOutput[i-2]+
+ sp->lastOutput[i+2]+
+ sp->lastOutput[i-step]+
+ sp->lastOutput[i+step]) >> 2)
+ + sp->lastOutput[i];
+ if (!j) {
+ sp->output[i] = 0;
+ } else {
+ j = j - sp->lastLastOutput[i] - 1;
+ if (j < 0) sp->output[i] = 0;
+ else if (j & (255*256)) sp->output[i] = 255;
+ else sp->output[i] = j;
+ }
+ } while(++i < end);
+ }
+}
+
+inline void fadePixelHeat(struct syna_priv *sp,int x,int y,int where,int step) {
+ short j =
+ (short)((getPixel(sp,x-1,y,where-2)+
+ getPixel(sp,x+1,y,where+2)+
+ getPixel(sp,x,y-1,where-step)+
+ getPixel(sp,x,y+1,where+step)) >> 2)
+ + sp->lastOutput[where];
+ if (!j) { sp->output[where] = 0; return; }
+ j = j - sp->lastLastOutput[where] - 1;
+ if (j < 0) sp->output[where] = 0;
+ else if (j & (255*256)) sp->output[where] = 255;
+ else sp->output[where] = j;
+}
+
+void fadeHeat(struct syna_priv *sp) {
+ int x,y,i,j,start,end;
+ int step = sp->outWidth*2;
+ unsigned char *t = sp->lastLastOutput;
+ sp->lastLastOutput = sp->lastOutput;
+ sp->lastOutput = sp->output;
+ sp->output = t;
+
+ for(x=0,i=0,j=sp->outWidth*(sp->outHeight-1)*2;x<sp->outWidth;x++,i+=2,j+=2) {
+ fadePixelHeat(sp,x,0,i,step);
+ fadePixelHeat(sp,x,0,i+1,step);
+ fadePixelHeat(sp,x,sp->outHeight-1,j,step);
+ fadePixelHeat(sp,x,sp->outHeight-1,j+1,step);
+ }
+
+ for(y=1,i=sp->outWidth*2,j=sp->outWidth*4-2;y<sp->outHeight;y++,i+=step,j+=step) {
+ fadePixelHeat(sp,0,y,i,step);
+ fadePixelHeat(sp,0,y,i+1,step);
+ fadePixelHeat(sp,sp->outWidth-1,y,j,step);
+ fadePixelHeat(sp,sp->outWidth-1,y,j+1,step);
+ }
+
+ for (y=1,start=sp->outWidth*2+2,
+ end=sp->outWidth*4-2; y<sp->outHeight-1; y++,start+=step,end+=step) {
+ int i = start;
+ do {
+ short j =
+ (short)((sp->lastOutput[i-2]+
+ sp->lastOutput[i+2]+
+ sp->lastOutput[i-step]+
+ sp->lastOutput[i+step]) >> 2)
+ + sp->lastOutput[i];
+ if (!j) {
+ sp->output[i] = 0;
+ } else {
+ j = j - sp->lastLastOutput[i] +
+ (sp->lastLastOutput[i] - ((sp->lastOutput[i])>>2)) - 1;
+ if (j < 0) sp->output[i] = 0;
+ else if (j & (255*256)) sp->output[i] = 255;
+ else sp->output[i] = j;
+ }
+ } while(++i < end);
+ }
+}
+
+void fade(struct syna_priv *sp) {
+ switch(sp->fadeMode) {
+ case FADE_STARS :
+ fadeFade(sp);
+ break;
+ case FADE_FLAME :
+ fadeHeat(sp);
+ break;
+ case FADE_WAVE :
+ fadeWave(sp);
+ break;
+ default:
+ break;
+ }
+}
+
+int coreGo(struct syna_priv *sp,guchar *data,gint len) {
+ double x[SYNA_SIZE], y[SYNA_SIZE];
+ double a[SYNA_SIZE], b[SYNA_SIZE];
+ int clarity[SYNA_SIZE]; //Surround sound
+ int i,j,k;
+ int heightFactor = SYNA_SIZE / 2 / sp->outHeight + 1;
+ int actualHeight = SYNA_SIZE / 2 / heightFactor;
+ int heightAdd = sp->outHeight + (actualHeight >> 1);
+
+ int brightFactor = (int)(150 * sp->brightnessTwiddler / (sp->starSize+0.01));
+ double brightFactor2;
+
+ for(i=0;i<SYNA_SIZE;i++) {
+ x[i] = data[i*2];
+ y[i] = data[i*2+1];
+ }
+
+ fft(sp,x,y);
+
+ for(i=0 +1;i<SYNA_SIZE;i++) {
+ double x1 = x[sp->bitReverse[i]],
+ y1 = y[sp->bitReverse[i]],
+ x2 = x[sp->bitReverse[SYNA_SIZE-i]],
+ y2 = y[sp->bitReverse[SYNA_SIZE-i]],
+ aa,bb;
+ a[i] = sqrt(aa= (x1+x2)*(x1+x2) + (y1-y2)*(y1-y2) );
+ b[i] = sqrt(bb= (x1-x2)*(x1-x2) + (y1+y2)*(y1+y2) );
+ if (aa+bb != 0.0)
+ clarity[i] = (int)(
+ ( (x1+x2) * (x1-x2) + (y1+y2) * (y1-y2) )/(aa+bb) * 256 );
+ else
+ clarity[i] = 0;
+ }
+
+ /* Correct for window size */
+ brightFactor2 = (brightFactor/65536.0/SYNA_SIZE)*
+ sqrt(actualHeight*sp->outWidth/(320.0*200.0));
+
+ for(i=1;i<SYNA_SIZE/2;i++) {
+ if (a[i] > 0 || b[i] > 0) {
+ int h = (int)( b[i]*sp->outWidth / (a[i]+b[i]) );
+ int br1, br2, br = (int)(
+ (a[i]+b[i])*i*brightFactor2 );
+ int px = h,
+ py = heightAdd - i / heightFactor;
+ br1 = br*(clarity[i]+128)>>8;
+ br2 = br*(128-clarity[i])>>8;
+ if (br1 < 0) br1 = 0; else if (br1 > 255) br1 = 255;
+ if (br2 < 0) br2 = 0; else if (br2 > 255) br2 = 255;
+
+ if (sp->pointsAreDiamonds) {
+ addPixel(sp,px,py,br1,br2);
+ br1=sp->scaleDown[br1];br2=sp->scaleDown[br2];
+
+ //TODO: Use addpixelfast
+ for(j=1;br1>0||br2>0;j++,br1=sp->scaleDown[br1],
+ br2=sp->scaleDown[br2]) {
+ for(k=0;k<j;k++) {
+ addPixel(sp,px-j+k,py-k,br1,br2);
+ addPixel(sp,px+k,py-j+k,br1,br2);
+ addPixel(sp,px+j-k,py+k,br1,br2);
+ addPixel(sp,px-k,py+j-k,br1,br2);
+ }
+ }
+ } else {
+ if (px < sp->maxStarRadius || py < sp->maxStarRadius ||
+ px > sp->outWidth-sp->maxStarRadius ||
+ py > sp->outHeight-sp->maxStarRadius) {
+ addPixel(sp,px,py,br1,br2);
+ for (j=1;(br1>0) || (br2>0);j++,br1=sp->scaleDown[br1],
+ br2=sp->scaleDown[br2]) {
+ addPixel(sp,px+j,py,br1,br2);
+ addPixel(sp,px,py+j,br1,br2);
+ addPixel(sp,px-j,py,br1,br2);
+ addPixel(sp,px,py-j,br1,br2);
+ }
+ } else {
+ unsigned char *p = sp->output+px*2+py*sp->outWidth*2;
+ unsigned char *p1=p, *p2=p, *p3=p, *p4=p;
+ addPixelFast(p,br1,br2);
+ for(;br1>0||br2>0;br1=sp->scaleDown[br1],br2=sp->scaleDown[br2]) {
+ p1 += 2;
+ addPixelFast(p1,br1,br2);
+ p2 -= 2;
+ addPixelFast(p2,br1,br2);
+ p3 += sp->outWidth*2;
+ addPixelFast(p3,br1,br2);
+ p4 -= sp->outWidth*2;
+ addPixelFast(p4,br1,br2);
+ }
+ }
+ }
+ }
+ }
+ return 0;
+}
+
+void setupPalette(struct syna_priv *sp,guchar *palette) {
+ #define BOUND(x) ((x) > 255 ? 255 : (x))
+ #define PEAKIFY(x) (int)(BOUND((x) - (x)*(255-(x))/255/2))
+#ifndef MAX
+ #define MAX(x,y) ((x) > (y) ? (x) : (y))
+#endif /* MAX */
+ int i,f,b;
+
+ double scale, fgRed, fgGreen, fgBlue, bgRed, bgGreen, bgBlue;
+ fgRed = sp->fgRedSlider;
+ fgGreen = sp->fgGreenSlider;
+ fgBlue = 1.0 - MAX(fgRed,fgGreen);
+ scale = MAX(MAX(fgRed,fgGreen),fgBlue);
+ fgRed /= scale;
+ fgGreen /= scale;
+ fgBlue /= scale;
+
+ bgRed = sp->bgRedSlider;
+ bgGreen = sp->bgGreenSlider;
+ bgBlue = 1.0 - MAX(sp->bgRedSlider,sp->bgGreenSlider);
+ scale = MAX(MAX(bgRed,bgGreen),bgBlue);
+ bgRed /= scale;
+ bgGreen /= scale;
+ bgBlue /= scale;
+
+ for(i=0;i<256;i++) {
+ f = i&15;
+ b = i/16;
+ palette[i*4+0] = PEAKIFY(b*bgRed*16+f*fgRed*16);
+ palette[i*4+1] = PEAKIFY(b*bgGreen*16+f*fgGreen*16);
+ palette[i*4+2] = PEAKIFY(b*bgBlue*16+f*fgBlue*16);
+ }
+}
--- /dev/null
+
+#ifndef HGUARD_SYNAESTHESIA_CORE_H
+#define HGUARD_SYNAESTHESIA_CORE_H
+
+#include <glib.h>
+
+#define SYNA_BITS 8
+#define SYNA_SIZE (1 << SYNA_BITS)
+
+struct syna_priv {
+ gdouble cosTable[SYNA_SIZE],negSinTable[SYNA_SIZE];
+ gint bitReverse[SYNA_SIZE];
+ gint scaleDown[256];
+ gint maxStarRadius;
+ gint outWidth,outHeight;
+ gint fadeMode;
+ gint brightnessTwiddler;
+ gint starSize;
+ gint pointsAreDiamonds;
+
+ gdouble fgRedSlider, fgGreenSlider, bgRedSlider, bgGreenSlider;
+
+ guchar *output,*lastOutput,*lastLastOutput;
+};
+
+#define FADE_WAVE 1
+#define FADE_HEAT 2
+#define FADE_STARS 3
+#define FADE_FLAME 4
+
+void setStarSize(struct syna_priv *sp, gdouble size);
+void coreInit(struct syna_priv *sp, int w, int h);
+int coreGo(struct syna_priv *sp, guchar *data, gint len);
+void fade(struct syna_priv *sp);
+void setupPalette(struct syna_priv *sp, guchar *palette);
+
+
+#endif /* HGUARD_SYNAESTHESIA_CORE_H */
--- /dev/null
+/* Gnome-Streamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+
+#include "config.h"
+
+#include "gstsynaesthesia.h"
+#include "core.h"
+
+static gboolean gst_synaesthesia_start(GstElement *element);
+
+GstElementDetails gst_synaesthesia_details = {
+ "Synaesthesia display",
+ "Sink/Visualization",
+ "Cool color display based on stereo info",
+ VERSION,
+ "Erik Walthinsen <omega@cse.ogi.edu>",
+ "(C) 1999",
+};
+
+static GstElementClass *parent_class = NULL;
+//static guint gst_synaesthesia_signals[LAST_SIGNAL] = { 0 };
+
+/* Synaesthesia signals and args */
+enum {
+ /* FILL ME */
+ LAST_SIGNAL
+};
+
+enum {
+ ARG_0,
+ ARG_WIDTH,
+ ARG_HEIGHT,
+ ARG_WIDGET,
+};
+
+static GstPadTemplate*
+sink_factory (void)
+{
+ return
+ gst_padtemplate_new (
+ "sink", /* the name of the pads */
+ GST_PAD_SINK, /* type of the pad */
+ GST_PAD_ALWAYS, /* ALWAYS/SOMETIMES */
+ gst_caps_new (
+ "synaesthesia_sink16", /* the name of the caps */
+ "audio/raw", /* the mime type of the caps */
+ gst_props_new (
+ /* Properties follow: */
+ "format", GST_PROPS_INT (16),
+ "depth", GST_PROPS_INT (16),
+ NULL)),
+ NULL);
+ // These properties commented out so that autoplugging works for now:
+ // the autoplugging needs to be fixed (caps negotiation needed)
+ //,"rate", GST_PROPS_INT (44100)
+ //,"channels", GST_PROPS_INT (2)
+}
+
+static void gst_synaesthesia_class_init(GstSynaesthesiaClass *klass);
+static void gst_synaesthesia_init(GstSynaesthesia *synaesthesia);
+
+static void gst_synaesthesia_chain(GstPad *pad,GstBuffer *buf);
+
+static void gst_synaesthesia_set_property(GObject *object, guint prop_id, const GValue *value, GParamSpec *pspec);
+static void gst_synaesthesia_get_property(GObject *object, guint prop_id, GValue *value, GParamSpec *pspec);
+
+
+static GstPadTemplate *sink_template;
+
+GType
+gst_synaesthesia_get_type(void) {
+ static GType synaesthesia_type = 0;
+
+ if (!synaesthesia_type) {
+ static const GTypeInfo synaesthesia_info = {
+ sizeof(GstSynaesthesiaClass), NULL,
+ NULL,
+ (GClassInitFunc)gst_synaesthesia_class_init,
+ NULL,
+ NULL,
+ sizeof(GstSynaesthesia),
+ 0,
+ (GInstanceInitFunc)gst_synaesthesia_init,
+ };
+ synaesthesia_type = g_type_register_static(GST_TYPE_ELEMENT, "GstSynaesthesia", &synaesthesia_info, 0);
+ }
+ return synaesthesia_type;
+}
+
+static void
+gst_synaesthesia_class_init(GstSynaesthesiaClass *klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+
+ gobject_class = (GObjectClass*)klass;
+ gstelement_class = (GstElementClass*)klass;
+
+ parent_class = g_type_class_ref(GST_TYPE_ELEMENT);
+
+ g_object_class_install_property(G_OBJECT_CLASS(klass), ARG_WIDTH,
+ g_param_spec_int("width","width","width",
+ G_MININT,G_MAXINT,0,G_PARAM_READABLE)); // CHECKME
+ g_object_class_install_property(G_OBJECT_CLASS(klass), ARG_HEIGHT,
+ g_param_spec_int("height","height","height",
+ G_MININT,G_MAXINT,0,G_PARAM_READABLE)); // CHECKME
+
+ gobject_class->set_property = gst_synaesthesia_set_property;
+ gobject_class->get_property = gst_synaesthesia_get_property;
+}
+
+static void
+gst_synaesthesia_init(GstSynaesthesia *synaesthesia)
+{
+ synaesthesia->sinkpad = gst_pad_new_from_template (sink_template, "sink");
+ gst_element_add_pad(GST_ELEMENT(synaesthesia), synaesthesia->sinkpad);
+ gst_pad_set_chain_function(synaesthesia->sinkpad, gst_synaesthesia_chain);
+
+ gst_synaesthesia_start(GST_ELEMENT(synaesthesia));
+}
+
+static void gst_synaesthesia_chain(GstPad *pad,GstBuffer *buf) {
+ GstSynaesthesia *syna;
+ gint samplecount;
+
+ g_return_if_fail(pad != NULL);
+ g_return_if_fail(GST_IS_PAD(pad));
+ g_return_if_fail(buf != NULL);
+
+ syna = GST_SYNAESTHESIA(GST_OBJECT_PARENT (pad));
+ g_return_if_fail(syna != NULL);
+ g_return_if_fail(GST_IS_SYNAESTHESIA(syna));
+
+ samplecount = GST_BUFFER_SIZE(buf) /
+ (2 * sizeof(gint16));
+
+// GST_DEBUG (0,"fading\n");
+// fade(&syna->sp);
+ GST_DEBUG (0,"doing effect\n");
+ coreGo(&syna->sp,GST_BUFFER_DATA(buf),samplecount);
+
+// GST_DEBUG (0,"drawing\n");
+/* GST_DEBUG (0,"gdk_draw_indexed_image(%p,%p,%d,%d,%d,%d,%s,%p,%d,%p);\n",
+ syna->image->window,
+ syna->image->style->fg_gc[GTK_STATE_NORMAL],
+ 0,0,syna->width,syna->height,
+ "GDK_RGB_DITHER_NORMAL",
+ syna->sp.output,syna->width,
+ &syna->cmap);*/
+/* gdk_draw_indexed_image(syna->image->window,
+ syna->image->style->fg_gc[GTK_STATE_NORMAL],
+ 0,0,syna->width,syna->height,
+ GDK_RGB_DITHER_NORMAL,
+ syna->sp.output,syna->width,
+ &syna->cmap);*/
+ gdk_draw_gray_image(syna->image->window,
+ syna->image->style->fg_gc[GTK_STATE_NORMAL],
+ 0,0,syna->width,syna->height,
+ GDK_RGB_DITHER_NORMAL,
+ syna->sp.output,syna->width);
+
+ gst_trace_add_entry(NULL,0,buf,"synaesthesia: calculated syna");
+
+ gst_buffer_unref(buf);
+}
+
+static void gst_synaesthesia_set_property(GObject *object, guint prop_id, const GValue *value, GParamSpec *pspec) {
+ GstSynaesthesia *synaesthesia;
+
+ /* it's not null if we got it, but it might not be ours */
+ g_return_if_fail(GST_IS_SYNAESTHESIA(object));
+ synaesthesia = GST_SYNAESTHESIA(object);
+
+ switch (prop_id) {
+ case ARG_WIDTH:
+ synaesthesia->width = g_value_get_int (value);
+ break;
+ case ARG_HEIGHT:
+ synaesthesia->height = g_value_get_int (value);
+ break;
+ default:
+ break;
+ }
+}
+
+static void
+gst_synaesthesia_get_property(GObject *object, guint prop_id, GValue *value, GParamSpec *pspec)
+{
+ GstSynaesthesia *synaesthesia;
+
+ /* it's not null if we got it, but it might not be ours */
+ g_return_if_fail(GST_IS_SYNAESTHESIA(object));
+ synaesthesia = GST_SYNAESTHESIA(object);
+
+ GST_DEBUG (0,"have synaesthesia get_property(%d), wanting %d\n",id,ARG_WIDGET);
+
+ switch (prop_id) {
+ case ARG_WIDTH: {
+ g_value_set_int (value, synaesthesia->width);
+ GST_DEBUG (0,"returning width value %d\n",g_value_get_int (value));
+ break;
+ }
+ case ARG_HEIGHT: {
+ g_value_set_int (value, synaesthesia->height);
+ GST_DEBUG (0,"returning height value %d\n",g_value_get_int (value));
+ break;
+ }
+ case ARG_WIDGET: {
+ g_value_set_object (value, G_OBJECT(synaesthesia->image));
+ GST_DEBUG (0,"returning widget value %p\n",g_value_get_object (value));
+ break;
+ }
+ default: {
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ GST_DEBUG (0,"returning invalid type\n");
+ break;
+ }
+ }
+}
+
+
+
+static gboolean
+plugin_init (GModule *module, GstPlugin *plugin)
+{
+ GstElementFactory *factory;
+
+ factory = gst_elementfactory_new("synaesthesia", GST_TYPE_SYNAESTHESIA,
+ &gst_synaesthesia_details);
+ g_return_val_if_fail(factory != NULL, FALSE);
+
+ sink_template = sink_factory ();
+ gst_elementfactory_add_padtemplate(factory, sink_template);
+
+ gst_plugin_add_feature (plugin, GST_PLUGIN_FEATURE (factory));
+
+ return TRUE;
+}
+
+GstPluginDesc plugin_desc = {
+ GST_VERSION_MAJOR,
+ GST_VERSION_MINOR,
+ "synaesthesia",
+ plugin_init
+};
+
+static gboolean
+gst_synaesthesia_start(GstElement *element)
+{
+ GstSynaesthesia *syna;
+
+ g_return_val_if_fail(GST_IS_SYNAESTHESIA(element), FALSE);
+ syna = GST_SYNAESTHESIA(element);
+
+ syna->width = 255;
+ syna->height = 255;
+ syna->starsize = 2;
+
+ coreInit(&syna->sp, syna->width, syna->height);
+ setStarSize(&syna->sp, syna->starsize);
+
+ setupPalette(&syna->sp, syna->cmap.colors);
+
+ gdk_rgb_init();
+ syna->image = gtk_drawing_area_new();
+ GST_DEBUG (0,"image is %p\n",syna->image);
+ gtk_drawing_area_size(GTK_DRAWING_AREA(syna->image),
+ syna->width,
+ syna->height);
+ gtk_widget_show(syna->image);
+
+ GST_DEBUG (0,"started synaesthesia\n");
+ return TRUE;
+}
+
--- /dev/null
+/* Gnome-Streamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+
+#ifndef __GST_SYNAESTHESIA_H__
+#define __GST_SYNAESTHESIA_H__
+
+
+#include <gst/gst.h>
+
+#include <core.h>
+
+
+#ifdef __cplusplus
+extern "C" {
+#endif /* __cplusplus */
+
+
+#define GST_TYPE_SYNAESTHESIA \
+ (gst_synaesthesia_get_type())
+#define GST_SYNAESTHESIA(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_SYNAESTHESIA,GstSynaesthesia))
+#define GST_SYNAESTHESIA_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_SYNAESTHESIA,GstSynaesthesia))
+#define GST_IS_SYNAESTHESIA(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_SYNAESTHESIA))
+#define GST_IS_SYNAESTHESIA_CLASS(obj) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_SYNAESTHESIA))
+
+typedef struct _GstSynaesthesia GstSynaesthesia;
+typedef struct _GstSynaesthesiaClass GstSynaesthesiaClass;
+
+struct _GstSynaesthesia {
+ GstElement element;
+
+ GstPad *sinkpad;
+
+ gint width,height;
+ gdouble starsize;
+
+ struct syna_priv sp;
+};
+
+struct _GstSynaesthesiaClass {
+ GstElementClass parent_class;
+};
+
+GType gst_synaesthesia_get_type(void);
+
+
+#ifdef __cplusplus
+}
+#endif /* __cplusplus */
+
+
+#endif /* __GST_SYNAESTHESIA_H__ */