AG_GST_DEFAULT_ELEMENTS
dnl *** plug-ins to include ***
- musepack musicbrainz nas neon ofa openal rsvg schro sdl smooth sndfile soundtouch spandsp timidity \
+dnl Non ported plugins (non-dependant, then dependant)
+dnl Make sure you have a space before and after all plugins
+GST_PLUGINS_NONPORTED=" adpcmdec adpcmenc aiff asfmux \
+ autoconvert camerabin cdxaparse coloreffects \
+ dccp debugutils dtmf faceoverlay festival \
+ fieldanalysis freeverb freeze frei0r gaudieffects geometrictransform h264parse \
+ hdvparse hls id3tag inter interlace ivfparse jpegformat jp2kdecimator \
+ kate liveadder legacyresample librfb mpegdemux mpegtsmux \
+ mpegpsmux mpegvideoparse mve mxf mythtv nsf nuvdemux \
+ patchdetect pcapparse pnm rawparse real removesilence rtpmux rtpvp8 scaletempo \
+ sdi segmentclip siren speed subenc stereo tta videofilters \
+ videomaxrate videomeasure videosignal vmnc \
+ decklink fbdev linsys shm vcd \
+ voaacenc apexsink bz2 cdaudio celt cog curl dc1394 dirac directfb dts resindvd \
+ gsettings gsm jp2k ladspa modplug mpeg2enc mplex mimic \
++ musepack musicbrainz nas neon ofa openal opencv rsvg schro sdl smooth sndfile soundtouch spandsp timidity \
+ wildmidi xvid apple_media "
+AC_SUBST(GST_PLUGINS_NONPORTED)
dnl these are all the gst plug-ins, compilable without additional libs
AG_GST_CHECK_PLUGIN(adpcmdec)
# CFLAGS and LDFLAGS for compiling scan program. Only needed if your app/lib
# contains GtkObjects/GObjects and you want to document signals and properties.
- GTKDOC_CFLAGS = -DGST_USE_UNSTABLE_API $(GST_PLUGINS_BAD_CFLAGS) $(GST_BAD_CFLAGS) $(GST_PLUGINS_BASE_CFLAGS) $(GST_BASE_CFLAGS)
-GTKDOC_CFLAGS = $(GST_PLUGINS_BAD_CFLAGS) $(GST_PLUGINS_BASE_CFLAGS) $(GST_BASE_CFLAGS)
++GTKDOC_CFLAGS = -DGST_USE_UNSTABLE_API $(GST_PLUGINS_BAD_CFLAGS) $(GST_PLUGINS_BASE_CFLAGS) $(GST_BASE_CFLAGS)
GTKDOC_LIBS = \
$(top_builddir)/gst-libs/gst/codecparsers/libgstcodecparsers-@GST_MAJORMINOR@.la \
$(top_builddir)/gst-libs/gst/basecamerabinsrc/libgstbasecamerabinsrc-@GST_MAJORMINOR@.la \
$(top_builddir)/gst-libs/gst/interfaces/libgstphotography-@GST_MAJORMINOR@.la \
$(top_builddir)/gst-libs/gst/signalprocessor/libgstsignalprocessor-@GST_MAJORMINOR@.la \
$(top_builddir)/gst-libs/gst/video/libgstbasevideo-@GST_MAJORMINOR@.la \
- $(GST_BASE_LIBS) $(GST_BAD_LIBS)
- $(GST_BASE_LIBS)
++ $(GST_BASE_LIBS)
GTKDOC_CC=$(LIBTOOL) --tag=CC --mode=compile $(CC)
GTKDOC_LD=$(LIBTOOL) --tag=CC --mode=link $(CC)
}
if (G_UNLIKELY ((ret_size = faacEncEncode (faac->handle, (gint32 *) data,
- size / faac->bps, GST_BUFFER_DATA (out_buf),
- GST_BUFFER_SIZE (out_buf))) < 0))
+ size / faac->bps, out_data, out_size)) < 0))
goto encode_failed;
- GST_LOG_OBJECT (faac, "encoder return: %d", ret_size);
- if (G_LIKELY (ret_size > 0)) {
- GST_BUFFER_SIZE (out_buf) = ret_size;
+ gst_buffer_unmap (in_buf, data, size);
+
+ GST_LOG_OBJECT (faac, "encoder return: %" G_GSIZE_FORMAT, ret_size);
+
+ if (ret_size > 0) {
+ gst_buffer_unmap (out_buf, out_data, ret_size);
ret = gst_audio_encoder_finish_frame (enc, out_buf, faac->samples);
} else {
+ gst_buffer_unmap (out_buf, out_data, 0);
gst_buffer_unref (out_buf);
+ /* re-create encoder after final flush */
+ if (!in_buf) {
+ GST_DEBUG_OBJECT (faac, "flushed; recreating encoder");
+ gst_faac_close_encoder (faac);
+ if (!gst_faac_open_encoder (faac))
+ ret = GST_FLOW_ERROR;
+ }
}
return ret;
GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("audio/x-raw-int, "
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "format = (string) { " GST_AUDIO_NE (S16) " }, "
- "rate = (int) { 8000, 12000, 16000, 24000, 48000 }, "
+ "rate = (int) { 48000, 24000, 16000, 12000, 8000 }, "
- "channels = (int) [ 1, 8 ], "
- "endianness = (int) BYTE_ORDER, "
- "signed = (boolean) true, " "width = (int) 16, " "depth = (int) 16")
+ "channels = (int) [ 1, 8 ] ")
);
static GstStaticPadTemplate opus_dec_sink_factory =
static GstFlowReturn
gst_opus_dec_parse_header (GstOpusDec * dec, GstBuffer * buf)
{
- const guint8 *data = GST_BUFFER_DATA (buf);
+ const guint8 *data;
GstCaps *caps;
- GstStructure *s;
const GstAudioChannelPosition *pos = NULL;
g_return_val_if_fail (gst_opus_header_is_id_header (buf), GST_FLOW_ERROR);
GST_INFO_OBJECT (dec,
"Skipping %u samples (%u at 48000 Hz, %u left to skip)", skip,
scaled_skip, dec->pre_skip);
+ }
- if (gst_buffer_get_size (outbuf) == 0) {
- gst_buffer_unref (outbuf);
- outbuf = NULL;
- }
- if (GST_BUFFER_SIZE (outbuf) == 0) {
++ if (gst_buffer_get_size (outbuf) == 0) {
+ gst_buffer_unref (outbuf);
+ outbuf = NULL;
}
/* Apply gain */
static gboolean gst_opus_enc_sink_event (GstAudioEncoder * benc,
GstEvent * event);
-static GstCaps *gst_opus_enc_sink_getcaps (GstAudioEncoder * benc);
++static GstCaps *gst_opus_enc_sink_getcaps (GstAudioEncoder * benc,
++ GstCaps * filter);
static gboolean gst_opus_enc_setup (GstOpusEnc * enc);
static void gst_opus_enc_get_property (GObject * object, guint prop_id,
return FALSE;
}
-gst_opus_enc_sink_getcaps (GstAudioEncoder * benc)
+ static GstCaps *
- peercaps = gst_pad_peer_get_caps (GST_AUDIO_ENCODER_SRC_PAD (benc));
++gst_opus_enc_sink_getcaps (GstAudioEncoder * benc, GstCaps * filter)
+ {
+ GstOpusEnc *enc;
+ GstCaps *caps;
+ GstCaps *peercaps = NULL;
+ GstCaps *intersect = NULL;
+ guint i;
+ gboolean allow_multistream;
+
+ enc = GST_OPUS_ENC (benc);
+
+ GST_DEBUG_OBJECT (enc, "sink getcaps");
+
++ peercaps = gst_pad_peer_query_caps (GST_AUDIO_ENCODER_SRC_PAD (benc), filter);
+ if (!peercaps) {
+ GST_DEBUG_OBJECT (benc, "No peercaps, returning template sink caps");
+ return
+ gst_caps_copy (gst_pad_get_pad_template_caps
+ (GST_AUDIO_ENCODER_SINK_PAD (benc)));
+ }
+
+ intersect = gst_caps_intersect (peercaps,
+ gst_pad_get_pad_template_caps (GST_AUDIO_ENCODER_SRC_PAD (benc)));
+ gst_caps_unref (peercaps);
+
+ if (gst_caps_is_empty (intersect))
+ return intersect;
+
+ allow_multistream = FALSE;
+ for (i = 0; i < gst_caps_get_size (intersect); i++) {
+ GstStructure *s = gst_caps_get_structure (intersect, i);
+ gboolean multistream;
+ if (gst_structure_get_boolean (s, "multistream", &multistream)) {
+ if (multistream) {
+ allow_multistream = TRUE;
+ }
+ } else {
+ allow_multistream = TRUE;
+ }
+ }
+
+ gst_caps_unref (intersect);
+
+ caps =
+ gst_caps_copy (gst_pad_get_pad_template_caps (GST_AUDIO_ENCODER_SINK_PAD
+ (benc)));
+ if (!allow_multistream) {
+ GValue range = { 0 };
+ g_value_init (&range, GST_TYPE_INT_RANGE);
+ gst_value_set_int_range (&range, 1, 2);
+ for (i = 0; i < gst_caps_get_size (caps); i++) {
+ GstStructure *s = gst_caps_get_structure (caps, i);
+ gst_structure_set_value (s, "channels", &range);
+ }
+ g_value_unset (&range);
+ }
+
++ if (filter) {
++ GstCaps *tmp = gst_caps_intersect_full (caps, filter,
++ GST_CAPS_INTERSECT_FIRST);
++ gst_caps_unref (caps);
++ caps = tmp;
++ }
++
+ GST_DEBUG_OBJECT (enc, "Returning caps: %" GST_PTR_FORMAT, caps);
+ return caps;
+ }
+
static GstFlowReturn
gst_opus_enc_encode (GstOpusEnc * enc, GstBuffer * buf)
{
gst_opus_header_create_caps_from_headers (GstCaps ** caps, GSList ** headers,
GstBuffer * buf1, GstBuffer * buf2)
{
+ int n_streams, family;
+ gboolean multistream;
++ guint8 *data;
++ gsize size;
+
g_return_if_fail (caps);
g_return_if_fail (headers && !*headers);
- g_return_if_fail (GST_BUFFER_SIZE (buf1) >= 19);
++ g_return_if_fail (gst_buffer_get_size (buf1) >= 19);
++
++ data = gst_buffer_map (buf1, &size, NULL, GST_MAP_READ);
+
+ /* work out the number of streams */
- family = GST_BUFFER_DATA (buf1)[18];
++ family = data[18];
+ if (family == 0) {
+ n_streams = 1;
+ } else {
+ /* only included in the header for family > 0 */
- g_return_if_fail (GST_BUFFER_SIZE (buf1) >= 20);
- n_streams = GST_BUFFER_DATA (buf1)[19];
++ if (size >= 20)
++ n_streams = data[19];
++ else {
++ g_warning ("family > 0 but header buffer size < 20");
++ gst_buffer_unmap (buf1, data, size);
++ return;
++ }
+ }
+
++ gst_buffer_unmap (buf1, data, size);
+
/* mark and put on caps */
- *caps = gst_caps_from_string ("audio/x-opus");
+ multistream = n_streams > 1;
+ *caps = gst_caps_new_simple ("audio/x-opus",
+ "multistream", G_TYPE_BOOLEAN, multistream, NULL);
*caps = _gst_caps_set_buffer_array (*caps, "streamheader", buf1, buf2, NULL);
*headers = g_slist_prepend (*headers, buf2);
--- /dev/null
-
-
+ /*
+ * Opus Depayloader Gst Element
+ *
+ * @author: Danilo Cesar Lemes de Paula <danilo.cesar@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+ #ifdef HAVE_CONFIG_H
+ # include "config.h"
+ #endif
+
+ #include <string.h>
+ #include <stdlib.h>
+ #include <gst/rtp/gstrtpbuffer.h>
+ #include "gstrtpopusdepay.h"
+
+ GST_DEBUG_CATEGORY_STATIC (rtpopusdepay_debug);
+ #define GST_CAT_DEFAULT (rtpopusdepay_debug)
+
-static GstBuffer *gst_rtp_opus_depay_process (GstBaseRTPDepayload * depayload,
+ static GstStaticPadTemplate gst_rtp_opus_depay_sink_template =
+ GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"audio\", "
+ "payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ","
+ "clock-rate = (int) 48000, "
+ "encoding-name = (string) \"X-GST-OPUS-DRAFT-SPITTKA-00\"")
+ );
+
+ static GstStaticPadTemplate gst_rtp_opus_depay_src_template =
+ GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-opus")
+ );
+
-static gboolean gst_rtp_opus_depay_setcaps (GstBaseRTPDepayload * depayload,
++static GstBuffer *gst_rtp_opus_depay_process (GstRTPBaseDepayload * depayload,
+ GstBuffer * buf);
-GST_BOILERPLATE (GstRTPOpusDepay, gst_rtp_opus_depay, GstBaseRTPDepayload,
- GST_TYPE_BASE_RTP_DEPAYLOAD);
++static gboolean gst_rtp_opus_depay_setcaps (GstRTPBaseDepayload * depayload,
+ GstCaps * caps);
+
-gst_rtp_opus_depay_base_init (gpointer klass)
++G_DEFINE_TYPE (GstRTPOpusDepay, gst_rtp_opus_depay,
++ GST_TYPE_RTP_BASE_DEPAYLOAD);
+
+ static void
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
++gst_rtp_opus_depay_class_init (GstRTPOpusDepayClass * klass)
+ {
-}
-
-static void
-gst_rtp_opus_depay_class_init (GstRTPOpusDepayClass * klass)
-{
- GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
-
- gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
++ GstRTPBaseDepayloadClass *gstbasertpdepayload_class;
++ GstElementClass *element_class;
++
++ element_class = GST_ELEMENT_CLASS (klass);
++ gstbasertpdepayload_class = (GstRTPBaseDepayloadClass *) klass;
+
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_rtp_opus_depay_src_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_rtp_opus_depay_sink_template));
+ gst_element_class_set_details_simple (element_class,
+ "RTP Opus packet depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts Opus audio from RTP packets",
+ "Danilo Cesar Lemes de Paula <danilo.cesar@collabora.co.uk>");
-gst_rtp_opus_depay_init (GstRTPOpusDepay * rtpopusdepay,
- GstRTPOpusDepayClass * klass)
+
+ gstbasertpdepayload_class->process = gst_rtp_opus_depay_process;
+ gstbasertpdepayload_class->set_caps = gst_rtp_opus_depay_setcaps;
+
+ GST_DEBUG_CATEGORY_INIT (rtpopusdepay_debug, "rtpopusdepay", 0,
+ "Opus RTP Depayloader");
+ }
+
+ static void
-gst_rtp_opus_depay_setcaps (GstBaseRTPDepayload * depayload, GstCaps * caps)
++gst_rtp_opus_depay_init (GstRTPOpusDepay * rtpopusdepay)
+ {
+
+ }
+
+ static gboolean
- srccaps = gst_caps_new_simple ("audio/x-opus", NULL);
- ret = gst_pad_set_caps (GST_BASE_RTP_DEPAYLOAD_SRCPAD (depayload), srccaps);
++gst_rtp_opus_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
+ {
+ GstCaps *srccaps;
+ gboolean ret;
+
-gst_rtp_opus_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
++ srccaps = gst_caps_new_empty_simple ("audio/x-opus");
++ ret = gst_pad_set_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload), srccaps);
+
+ GST_DEBUG_OBJECT (depayload,
+ "set caps on source: %" GST_PTR_FORMAT " (ret=%d)", srccaps, ret);
+ gst_caps_unref (srccaps);
+
+ depayload->clock_rate = 48000;
+
+ return ret;
+ }
+
+ static GstBuffer *
- outbuf = gst_rtp_buffer_get_payload_buffer (buf);
++gst_rtp_opus_depay_process (GstRTPBaseDepayload * depayload, GstBuffer * buf)
+ {
+ GstBuffer *outbuf;
++ GstRTPBuffer rtpbuf = { NULL, };
++
++ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtpbuf);
++ outbuf = gst_rtp_buffer_get_payload_buffer (&rtpbuf);
++ gst_rtp_buffer_unmap (&rtpbuf);
+
+ return outbuf;
+ }
--- /dev/null
-#include <gst/rtp/gstbasertpdepayload.h>
+ /*
+ * Opus Depayloader Gst Element
+ *
+ * @author: Danilo Cesar Lemes de Paula <danilo.eu@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+ #ifndef __GST_RTP_OPUS_DEPAY_H__
+ #define __GST_RTP_OPUS_DEPAY_H__
+
+ #include <gst/gst.h>
- GstBaseRTPDepayload depayload;
++#include <gst/rtp/gstrtpbasedepayload.h>
+
+ G_BEGIN_DECLS typedef struct _GstRTPOpusDepay GstRTPOpusDepay;
+ typedef struct _GstRTPOpusDepayClass GstRTPOpusDepayClass;
+
+ #define GST_TYPE_RTP_OPUS_DEPAY \
+ (gst_rtp_opus_depay_get_type())
+ #define GST_RTP_OPUS_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_OPUS_DEPAY,GstRTPOpusDepay))
+ #define GST_RTP_OPUS_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_OPUS_DEPAY,GstRTPOpusDepayClass))
+ #define GST_IS_RTP_OPUS_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_OPUS_DEPAY))
+ #define GST_IS_RTP_OPUS_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_OPUS_DEPAY))
+
+
+ struct _GstRTPOpusDepay
+ {
- GstBaseRTPDepayloadClass parent_class;
++ GstRTPBaseDepayload depayload;
+
+ };
+
+ struct _GstRTPOpusDepayClass
+ {
++ GstRTPBaseDepayloadClass parent_class;
+ };
+
+ GType gst_rtp_opus_depay_get_type (void);
+
+ G_END_DECLS
+ #endif /* __GST_RTP_OPUS_DEPAY_H__ */
--- /dev/null
-static gboolean gst_rtp_opus_pay_setcaps (GstBaseRTPPayload * payload,
+ /*
+ * Opus Payloader Gst Element
+ *
+ * @author: Danilo Cesar Lemes de Paula <danilo.cesar@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+ #ifdef HAVE_CONFIG_H
+ # include "config.h"
+ #endif
+
+ #include <string.h>
+
+ #include <gst/rtp/gstrtpbuffer.h>
+
+ #include "gstrtpopuspay.h"
+
+ GST_DEBUG_CATEGORY_STATIC (rtpopuspay_debug);
+ #define GST_CAT_DEFAULT (rtpopuspay_debug)
+
+
+ static GstStaticPadTemplate gst_rtp_opus_pay_sink_template =
+ GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-opus, multistream = (boolean) FALSE")
+ );
+
+ static GstStaticPadTemplate gst_rtp_opus_pay_src_template =
+ GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"audio\", "
+ "payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ", "
+ "clock-rate = (int) 48000, "
+ "encoding-name = (string) \"X-GST-OPUS-DRAFT-SPITTKA-00\"")
+ );
+
-static GstFlowReturn gst_rtp_opus_pay_handle_buffer (GstBaseRTPPayload *
++static gboolean gst_rtp_opus_pay_setcaps (GstRTPBasePayload * payload,
+ GstCaps * caps);
-GST_BOILERPLATE (GstRtpOPUSPay, gst_rtp_opus_pay, GstBaseRTPPayload,
- GST_TYPE_BASE_RTP_PAYLOAD);
++static GstFlowReturn gst_rtp_opus_pay_handle_buffer (GstRTPBasePayload *
+ payload, GstBuffer * buffer);
+
-gst_rtp_opus_pay_base_init (gpointer klass)
++G_DEFINE_TYPE (GstRtpOPUSPay, gst_rtp_opus_pay, GST_TYPE_RTP_BASE_PAYLOAD);
+
+ static void
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
++gst_rtp_opus_pay_class_init (GstRtpOPUSPayClass * klass)
+ {
-}
-
-static void
-gst_rtp_opus_pay_class_init (GstRtpOPUSPayClass * klass)
-{
- GstBaseRTPPayloadClass *gstbasertppayload_class;
-
- gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
-
- gstbasertppayload_class->set_caps = gst_rtp_opus_pay_setcaps;
- gstbasertppayload_class->handle_buffer = gst_rtp_opus_pay_handle_buffer;
++ GstRTPBasePayloadClass *gstbasertppayload_class;
++ GstElementClass *element_class;
++
++ gstbasertppayload_class = (GstRTPBasePayloadClass *) klass;
++ element_class = GST_ELEMENT_CLASS (klass);
++
++ gstbasertppayload_class->set_caps = gst_rtp_opus_pay_setcaps;
++ gstbasertppayload_class->handle_buffer = gst_rtp_opus_pay_handle_buffer;
+
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_rtp_opus_pay_src_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_rtp_opus_pay_sink_template));
+
+ gst_element_class_set_details_simple (element_class,
+ "RTP Opus payloader",
+ "Codec/Payloader/Network/RTP",
+ "Puts Opus audio in RTP packets",
+ "Danilo Cesar Lemes de Paula <danilo.cesar@collabora.co.uk>");
-gst_rtp_opus_pay_init (GstRtpOPUSPay * rtpopuspay, GstRtpOPUSPayClass * klass)
+
+ GST_DEBUG_CATEGORY_INIT (rtpopuspay_debug, "rtpopuspay", 0,
+ "Opus RTP Payloader");
+ }
+
+ static void
-gst_rtp_opus_pay_setcaps (GstBaseRTPPayload * payload, GstCaps * caps)
++gst_rtp_opus_pay_init (GstRtpOPUSPay * rtpopuspay)
+ {
+ }
+
+ static gboolean
- gst_basertppayload_set_options (payload, "audio", FALSE,
++gst_rtp_opus_pay_setcaps (GstRTPBasePayload * payload, GstCaps * caps)
+ {
+ gboolean res;
+ gchar *capsstr;
+
+ capsstr = gst_caps_to_string (caps);
+
- gst_basertppayload_set_outcaps (payload, "caps", G_TYPE_STRING, capsstr,
++ gst_rtp_base_payload_set_options (payload, "audio", FALSE,
+ "X-GST-OPUS-DRAFT-SPITTKA-00", 48000);
+ res =
-gst_rtp_opus_pay_handle_buffer (GstBaseRTPPayload * basepayload,
++ gst_rtp_base_payload_set_outcaps (payload, "caps", G_TYPE_STRING, capsstr,
+ NULL);
+ g_free (capsstr);
+
+ return res;
+ }
+
+ static GstFlowReturn
- GstClockTime timestamp;
-
- guint size;
- guint8 *data;
- guint8 *payload;
-
- size = GST_BUFFER_SIZE (buffer);
- data = GST_BUFFER_DATA (buffer);
- timestamp = GST_BUFFER_TIMESTAMP (buffer);
-
- outbuf = gst_rtp_buffer_new_allocate (size, 0, 0);
- payload = gst_rtp_buffer_get_payload (outbuf);
-
- memcpy (payload, data, size);
-
- gst_rtp_buffer_set_marker (outbuf, FALSE);
- GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
-
- return gst_basertppayload_push (basepayload, outbuf);
++gst_rtp_opus_pay_handle_buffer (GstRTPBasePayload * basepayload,
+ GstBuffer * buffer)
+ {
++ GstRTPBuffer rtpbuf = { NULL, };
+ GstBuffer *outbuf;
++ gsize size;
++ gpointer *data;
++
++ /* Copy data and timestamp to a new output buffer
++ * FIXME : Don't we have a convenience function for this ? */
++ data = gst_buffer_map (buffer, &size, NULL, GST_MAP_READ);
++ outbuf = gst_rtp_buffer_new_copy_data (data, size);
++ GST_BUFFER_TIMESTAMP (outbuf) = GST_BUFFER_TIMESTAMP (buffer);
++
++ /* Unmap and free input buffer */
++ gst_buffer_unmap (buffer, data, size);
++ gst_buffer_unref (buffer);
++
++ /* Remove marker from RTP buffer */
++ gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtpbuf);
++ gst_rtp_buffer_set_marker (&rtpbuf, FALSE);
++ gst_rtp_buffer_unmap (&rtpbuf);
++
++ /* Push out */
++ return gst_rtp_base_payload_push (basepayload, outbuf);
+ }
--- /dev/null
-#include <gst/rtp/gstbasertppayload.h>
+ /*
+ * Opus Payloader Gst Element
+ *
+ * @author: Danilo Cesar Lemes de Paula <danilo.eu@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+ #ifndef __GST_RTP_OPUS_PAY_H__
+ #define __GST_RTP_OPUS_PAY_H__
+
+ #include <gst/gst.h>
- GstBaseRTPPayload payload;
++#include <gst/rtp/gstrtpbasepayload.h>
+
+ G_BEGIN_DECLS
+
+ #define GST_TYPE_RTP_OPUS_PAY \
+ (gst_rtp_opus_pay_get_type())
+ #define GST_RTP_OPUS_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_OPUS_PAY,GstRtpOPUSPay))
+ #define GST_RTP_OPUS_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_OPUS_PAY,GstRtpOPUSPayClass))
+ #define GST_IS_RTP_OPUS_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_OPUS_PAY))
+ #define GST_IS_RTP_OPUS_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_OPUS_PAY))
+
+ typedef struct _GstRtpOPUSPay GstRtpOPUSPay;
+ typedef struct _GstRtpOPUSPayClass GstRtpOPUSPayClass;
+
+ struct _GstRtpOPUSPay
+ {
- GstBaseRTPPayloadClass parent_class;
++ GstRTPBasePayload payload;
+ };
+
+ struct _GstRtpOPUSPayClass
+ {
++ GstRTPBasePayloadClass parent_class;
+ };
+
+ GType gst_rtp_opus_pay_get_type (void);
+
+ G_END_DECLS
+
+ #endif /* __GST_RTP_OPUS_PAY_H__ */
static GstStateChangeReturn gst_base_video_codec_change_state (GstElement *
element, GstStateChange transition);
-GType
-gst_video_frame_get_type (void)
-{
- static volatile gsize type = 0;
+static GstElementClass *parent_class = NULL;
- if (g_once_init_enter (&type)) {
- GType _type;
+G_DEFINE_BOXED_TYPE (GstVideoFrameState, gst_video_frame_state,
+ (GBoxedCopyFunc) gst_video_frame_state_ref,
- (GBoxedFreeFunc) gst_video_frame_state_unref)
++ (GBoxedFreeFunc) gst_video_frame_state_unref);
- _type = g_boxed_type_register_static ("GstVideoFrame",
- (GBoxedCopyFunc) gst_video_frame_ref,
- (GBoxedFreeFunc) gst_video_frame_unref);
- g_once_init_leave (&type, _type);
- }
- return (GType) type;
-}
-
-GST_BOILERPLATE (GstBaseVideoCodec, gst_base_video_codec, GstElement,
- GST_TYPE_ELEMENT);
+/* NOTE (Edward): Do not use G_DEFINE_* because we need to have
+ * a GClassInitFunc called with the target class (which the macros
- * don't handle). */
- static void gst_base_video_codec_class_init (GstBaseVideoCodecClass *
- klass);
- static void gst_base_video_codec_init (GstBaseVideoCodec * dec,
++ * don't handle).
++ */
++static void gst_base_video_codec_class_init (GstBaseVideoCodecClass * klass);
++static void gst_base_video_codec_init (GstBaseVideoCodec * dec,
+ GstBaseVideoCodecClass * klass);
-static void
-gst_base_video_codec_base_init (gpointer g_class)
+GType
+gst_base_video_codec_get_type (void)
{
- GST_DEBUG_CATEGORY_INIT (basevideocodec_debug, "basevideocodec", 0,
- "Base Video Codec");
+ static volatile gsize base_video_codec_type = 0;
+ if (g_once_init_enter (&base_video_codec_type)) {
+ GType _type;
+ static const GTypeInfo base_video_codec_info = {
+ sizeof (GstBaseVideoCodecClass),
+ NULL,
+ NULL,
+ (GClassInitFunc) gst_base_video_codec_class_init,
+ NULL,
+ NULL,
+ sizeof (GstBaseVideoCodec),
+ 0,
+ (GInstanceInitFunc) gst_base_video_codec_init,
+ };
+
+ _type = g_type_register_static (GST_TYPE_ELEMENT,
+ "GstBaseVideoCodec", &base_video_codec_info, G_TYPE_FLAG_ABSTRACT);
+ g_once_init_leave (&base_video_codec_type, _type);
+ }
+ return base_video_codec_type;
}
static void
GST_DEBUG_CATEGORY (basevideoencoder_debug);
#define GST_CAT_DEFAULT basevideoencoder_debug
+ typedef struct _ForcedKeyUnitEvent ForcedKeyUnitEvent;
+ struct _ForcedKeyUnitEvent
+ {
+ GstClockTime running_time;
+ gboolean pending; /* TRUE if this was requested already */
+ gboolean all_headers;
+ guint count;
+ };
+
+ static void
+ forced_key_unit_event_free (ForcedKeyUnitEvent * evt)
+ {
+ g_slice_free (ForcedKeyUnitEvent, evt);
+ }
+
+ static ForcedKeyUnitEvent *
+ forced_key_unit_event_new (GstClockTime running_time, gboolean all_headers,
+ guint count)
+ {
+ ForcedKeyUnitEvent *evt = g_slice_new0 (ForcedKeyUnitEvent);
+
+ evt->running_time = running_time;
+ evt->all_headers = all_headers;
+ evt->count = count;
+
+ return evt;
+ }
+
static void gst_base_video_encoder_finalize (GObject * object);
-static gboolean gst_base_video_encoder_sink_setcaps (GstPad * pad,
- GstCaps * caps);
-static GstCaps *gst_base_video_encoder_sink_getcaps (GstPad * pad);
+static GstCaps *gst_base_video_encoder_sink_getcaps (GstPad * pad,
+ GstCaps * filter);
static gboolean gst_base_video_encoder_src_event (GstPad * pad,
- GstEvent * event);
+ GstObject * parent, GstEvent * event);
static gboolean gst_base_video_encoder_sink_event (GstPad * pad,
- GstEvent * event);
+ GstObject * parent, GstEvent * event);
+static gboolean gst_base_video_encoder_sink_query (GstPad * pad,
+ GstObject * parent, GstQuery * query);
static GstFlowReturn gst_base_video_encoder_chain (GstPad * pad,
- GstBuffer * buf);
+ GstObject * parent, GstBuffer * buf);
static GstStateChangeReturn gst_base_video_encoder_change_state (GstElement *
element, GstStateChange transition);
-static const GstQueryType *gst_base_video_encoder_get_query_types (GstPad *
- pad);
static gboolean gst_base_video_encoder_src_query (GstPad * pad,
- GstQuery * query);
+ GstObject * parent, GstQuery * query);
-static void
-_do_init (GType object_type)
-{
- const GInterfaceInfo preset_interface_info = {
- NULL, /* interface_init */
- NULL, /* interface_finalize */
- NULL /* interface_data */
- };
-
- g_type_add_interface_static (object_type, GST_TYPE_PRESET,
- &preset_interface_info);
-}
-
-GST_BOILERPLATE_FULL (GstBaseVideoEncoder, gst_base_video_encoder,
- GstBaseVideoCodec, GST_TYPE_BASE_VIDEO_CODEC, _do_init);
-
-static void
-gst_base_video_encoder_base_init (gpointer g_class)
-{
- GST_DEBUG_CATEGORY_INIT (basevideoencoder_debug, "basevideoencoder", 0,
- "Base Video Encoder");
-
-}
+#define gst_base_video_encoder_parent_class parent_class
+G_DEFINE_TYPE_WITH_CODE (GstBaseVideoEncoder, gst_base_video_encoder,
- GST_TYPE_BASE_VIDEO_CODEC, G_IMPLEMENT_INTERFACE (GST_TYPE_PRESET, NULL);
- );
++ GST_TYPE_BASE_VIDEO_CODEC, G_IMPLEMENT_INTERFACE (GST_TYPE_PRESET, NULL););
static void
gst_base_video_encoder_class_init (GstBaseVideoEncoderClass * klass)
static void
gst_base_video_encoder_finalize (GObject * object)
{
- GstBaseVideoEncoder *base_video_encoder;
-
++ GstBaseVideoEncoder *base_video_encoder = (GstBaseVideoEncoder *) object;
GST_DEBUG_OBJECT (object, "finalize");
- base_video_encoder = GST_BASE_VIDEO_ENCODER (object);
+ gst_buffer_replace (&base_video_encoder->headers, NULL);
+
G_OBJECT_CLASS (parent_class)->finalize (object);
}
break;
}
- base_video_encoder->a.at_eos = FALSE;
+ base_video_encoder->at_eos = FALSE;
- gst_segment_set_newsegment_full (&GST_BASE_VIDEO_CODEC
- (base_video_encoder)->segment, update, rate, applied_rate, format,
- start, stop, position);
+ gst_segment_copy_into (segment, &GST_BASE_VIDEO_CODEC
+ (base_video_encoder)->segment);
GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (base_video_encoder);
break;
}
(base_video_encoder)->segment, GST_FORMAT_TIME,
frame->presentation_timestamp);
+ /* re-use upstream event if any so it also conveys any additional
+ * info upstream arranged in there */
GST_OBJECT_LOCK (base_video_encoder);
- if (base_video_encoder->force_keyunit_event) {
- ev = base_video_encoder->force_keyunit_event;
- base_video_encoder->force_keyunit_event = NULL;
- } else {
- ev = gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM,
- gst_structure_new_empty ("GstForceKeyUnit"));
+ for (l = base_video_encoder->force_key_unit; l; l = l->next) {
+ ForcedKeyUnitEvent *tmp = l->data;
+
+ /* Skip non-pending keyunits */
+ if (!tmp->pending)
+ continue;
+
+ /* Simple case, keyunit ASAP */
+ if (tmp->running_time == GST_CLOCK_TIME_NONE) {
+ fevt = tmp;
+ break;
+ }
+
+ /* Event for before this frame */
+ if (tmp->running_time <= running_time) {
+ fevt = tmp;
+ break;
+ }
+ }
+
+ if (fevt) {
+ base_video_encoder->force_key_unit =
+ g_list_remove (base_video_encoder->force_key_unit, fevt);
}
GST_OBJECT_UNLOCK (base_video_encoder);
- gst_structure_set (gst_event_writable_structure (ev),
- "timestamp", G_TYPE_UINT64, frame->presentation_timestamp,
- "stream-time", G_TYPE_UINT64, stream_time,
- "running-time", G_TYPE_UINT64, running_time, NULL);
+ if (fevt) {
+ stream_time =
+ gst_segment_to_stream_time (&GST_BASE_VIDEO_CODEC
+ (base_video_encoder)->segment, GST_FORMAT_TIME,
+ frame->presentation_timestamp);
- gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder), ev);
- }
+ ev = gst_video_event_new_downstream_force_key_unit
+ (frame->presentation_timestamp, stream_time, running_time,
+ fevt->all_headers, fevt->count);
- /* no buffer data means this frame is skipped/dropped */
- if (!frame->src_buffer) {
- GST_DEBUG_OBJECT (base_video_encoder, "skipping frame %" GST_TIME_FORMAT,
- GST_TIME_ARGS (frame->presentation_timestamp));
- goto done;
+ gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder),
+ ev);
+
+ if (fevt->all_headers) {
+ if (base_video_encoder->headers) {
+ headers = gst_buffer_ref (base_video_encoder->headers);
- headers = gst_buffer_make_metadata_writable (headers);
++ headers = gst_buffer_make_writable (headers);
+ }
+ }
+
+ GST_DEBUG_OBJECT (base_video_encoder,
+ "Forced key unit: running-time %" GST_TIME_FORMAT
+ ", all_headers %d, count %u",
+ GST_TIME_ARGS (running_time), fevt->all_headers, fevt->count);
+ forced_key_unit_event_free (fevt);
+ }
}
if (frame->is_sync_point) {
GST_BUFFER_DURATION (frame->src_buffer) = frame->presentation_duration;
GST_BUFFER_OFFSET (frame->src_buffer) = frame->decode_timestamp;
+ if (G_UNLIKELY (headers)) {
+ GST_BUFFER_TIMESTAMP (headers) = frame->presentation_timestamp;
+ GST_BUFFER_DURATION (headers) = 0;
+ GST_BUFFER_OFFSET (headers) = frame->decode_timestamp;
+ }
+
/* update rate estimate */
GST_BASE_VIDEO_CODEC (base_video_encoder)->bytes +=
- GST_BUFFER_SIZE (frame->src_buffer);
+ gst_buffer_get_size (frame->src_buffer);
if (GST_CLOCK_TIME_IS_VALID (frame->presentation_duration)) {
GST_BASE_VIDEO_CODEC (base_video_encoder)->time +=
frame->presentation_duration;
static gboolean gst_wave_scope_render (GstBaseAudioVisualizer * base,
GstBuffer * audio, GstBuffer * video);
-
-GST_BOILERPLATE (GstWaveScope, gst_wave_scope, GstBaseAudioVisualizer,
- GST_TYPE_BASE_AUDIO_VISUALIZER);
-
-static void
-gst_wave_scope_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_set_details_simple (element_class, "Waveform oscilloscope",
- "Visualization",
- "Simple waveform oscilloscope", "Stefan Kost <ensonic@users.sf.net>");
-
- gst_element_class_add_static_pad_template (element_class,
- &gst_wave_scope_src_template);
- gst_element_class_add_static_pad_template (element_class,
- &gst_wave_scope_sink_template);
-}
++#define gst_wave_scope_parent_class parent_class
+G_DEFINE_TYPE (GstWaveScope, gst_wave_scope, GST_TYPE_BASE_AUDIO_VISUALIZER);
static void
gst_wave_scope_class_init (GstWaveScopeClass * g_class)
break;
}
- return TRUE;
+ return GST_PAD_PROBE_OK;
}
-static gboolean
-gst_camera_bin_audio_src_data_probe (GstPad * pad, GstMiniObject * obj,
+static GstPadProbeReturn
- gst_camera_bin_audio_src_event_probe (GstPad * pad, GstPadProbeInfo * info,
++gst_camera_bin_audio_src_data_probe (GstPad * pad, GstPadProbeInfo * info,
gpointer data)
{
GstCameraBin2 *camera = data;
- gboolean ret = TRUE;
+ gboolean ret = GST_PAD_PROBE_OK;
- GstEvent *event = GST_EVENT (info->data);
- if (GST_EVENT_TYPE (event) == GST_EVENT_EOS) {
- /* we only let an EOS pass when the user is stopping a capture */
- if (camera->audio_drop_eos) {
- if (GST_IS_BUFFER (obj)) {
++ if (GST_IS_BUFFER (data)) {
+ if (G_UNLIKELY (camera->audio_send_newseg)) {
- GstBuffer *buf = GST_BUFFER_CAST (obj);
++ GstBuffer *buf = GST_BUFFER_CAST (data);
+ GstClockTime ts = GST_BUFFER_TIMESTAMP (buf);
+ GstPad *peer;
++ GstSegment segment;
+
+ if (!GST_CLOCK_TIME_IS_VALID (ts)) {
+ ts = 0;
+ }
+
+ peer = gst_pad_get_peer (pad);
+ g_return_val_if_fail (peer != NULL, TRUE);
+
- gst_pad_send_event (peer, gst_event_new_new_segment (FALSE, 1.0,
- GST_FORMAT_TIME, ts, -1, 0));
++ gst_segment_init (&segment, GST_FORMAT_TIME);
++ segment.start = ts;
++ gst_pad_send_event (peer, gst_event_new_segment (&segment));
+
+ gst_object_unref (peer);
+
+ camera->audio_send_newseg = FALSE;
+ }
+ } else {
- GstEvent *event = GST_EVENT_CAST (obj);
++ GstEvent *event = GST_EVENT_CAST (data);
+ if (GST_EVENT_TYPE (event) == GST_EVENT_EOS) {
+ /* we only let an EOS pass when the user is stopping a capture */
+ if (camera->audio_drop_eos) {
- ret = FALSE;
++ ret = GST_PAD_PROBE_DROP;
+ } else {
+ camera->audio_drop_eos = TRUE;
+ /* should already be false, but reinforce in case no buffers get
+ * pushed */
+ camera->audio_send_newseg = FALSE;
+ }
- } else if (GST_EVENT_TYPE (event) == GST_EVENT_NEWSEGMENT) {
- ret = FALSE;
++ } else if (GST_EVENT_TYPE (event) == GST_EVENT_SEGMENT) {
+ ret = GST_PAD_PROBE_DROP;
- } else {
- camera->audio_drop_eos = TRUE;
}
}
GstEncodingContainerProfile *prof;
GstCaps *caps;
- caps = gst_caps_new_simple ("application/ogg", NULL, NULL);
- caps = gst_caps_new_simple ("application/ogg", NULL);
++ caps = gst_caps_new_empty_simple ("application/ogg");
prof = gst_encoding_container_profile_new ("ogg", "theora+vorbis+ogg",
caps, NULL);
gst_caps_unref (caps);
- caps = gst_caps_new_simple ("video/x-theora", NULL, NULL);
- caps = gst_caps_new_simple ("video/x-theora", NULL);
++ caps = gst_caps_new_empty_simple ("video/x-theora");
if (!gst_encoding_container_profile_add_profile (prof,
(GstEncodingProfile *) gst_encoding_video_profile_new (caps,
NULL, NULL, 1))) {
}
gst_caps_unref (caps);
- caps = gst_caps_new_simple ("audio/x-vorbis", NULL, NULL);
- caps = gst_caps_new_simple ("audio/x-vorbis", NULL);
++ caps = gst_caps_new_empty_simple ("audio/x-vorbis");
if (!gst_encoding_container_profile_add_profile (prof,
(GstEncodingProfile *) gst_encoding_audio_profile_new (caps,
NULL, NULL, 1))) {
GstEncodingVideoProfile *vprof;
GstCaps *caps;
- caps = gst_caps_new_simple ("image/jpeg", NULL, NULL);
- caps = gst_caps_new_simple ("image/jpeg", NULL);
++ caps = gst_caps_new_empty_simple ("image/jpeg");
vprof = gst_encoding_video_profile_new (caps, NULL, NULL, 1);
gst_encoding_video_profile_set_variableframerate (vprof, TRUE);
srcpad = gst_element_get_static_pad (camera->audio_src, "src");
- /* 1) drop EOS for audiosrc elements that push them on state_changes
- * (basesrc does this)
- * 2) Fix newsegment events to have start time = first buffer ts */
- gst_pad_add_data_probe (srcpad,
- (GCallback) gst_camera_bin_audio_src_data_probe, camera);
+ /* drop EOS for audiosrc elements that push them on state_changes
+ * (basesrc does this) */
- gst_pad_add_probe (srcpad, GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM,
- gst_camera_bin_audio_src_event_probe, gst_object_ref (camera),
++ gst_pad_add_probe (srcpad, GST_PAD_PROBE_TYPE_DATA_DOWNSTREAM,
++ gst_camera_bin_audio_src_data_probe, gst_object_ref (camera),
+ gst_object_unref);
gst_object_unref (srcpad);
}
if (self->video_rec_status == GST_VIDEO_RECORDING_STATUS_DONE) {
/* NOP */
} else if (self->video_rec_status == GST_VIDEO_RECORDING_STATUS_STARTING) {
+ GstClockTime ts;
++ GstSegment segment;
+
GST_DEBUG_OBJECT (self, "Starting video recording");
self->video_rec_status = GST_VIDEO_RECORDING_STATUS_RUNNING;
- gst_pad_push_event (self->vidsrc, gst_event_new_new_segment (FALSE, 1.0,
- GST_FORMAT_TIME, ts, -1, 0));
+ ts = GST_BUFFER_TIMESTAMP (buffer);
+ if (!GST_CLOCK_TIME_IS_VALID (ts))
+ ts = 0;
++ gst_segment_init (&segment, GST_FORMAT_TIME);
++ segment.start = ts;
++ gst_pad_push_event (self->vidsrc, gst_event_new_segment (&segment));
+
/* post preview */
GST_DEBUG_OBJECT (self, "Posting preview for video");
gst_base_camera_src_post_preview (camerasrc, buffer);
enum
{
- ARG_0
+ PROP_AGGREGATE_GOPS = 1
};
+ #define DEFAULT_AGGREGATE_GOPS FALSE
+
static GstStaticPadTemplate mpegpsmux_sink_factory =
- GST_STATIC_PAD_TEMPLATE ("sink_%d",
+ GST_STATIC_PAD_TEMPLATE ("sink_%u",
GST_PAD_SINK,
GST_PAD_REQUEST,
GST_STATIC_CAPS ("video/mpeg, "
GValue * value, GParamSpec * pspec);
static gboolean gst_h264_parse_set_caps (GstBaseParse * parse, GstCaps * caps);
-static GstCaps *gst_h264_parse_get_caps (GstBaseParse * parse);
-static GstFlowReturn gst_h264_parse_chain (GstPad * pad, GstBuffer * buffer);
+static GstCaps *gst_h264_parse_get_caps (GstBaseParse * parse,
+ GstCaps * filter);
+static GstFlowReturn gst_h264_parse_chain (GstPad * pad, GstObject * parent,
+ GstBuffer * buffer);
+ static gboolean gst_h264_parse_event (GstBaseParse * parse, GstEvent * event);
+ static gboolean gst_h264_parse_src_event (GstBaseParse * parse,
+ GstEvent * event);
static void
-gst_h264_parse_base_init (gpointer g_class)
-{
- GstElementClass *gstelement_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_static_pad_template (gstelement_class, &srctemplate);
- gst_element_class_add_static_pad_template (gstelement_class, &sinktemplate);
-
- gst_element_class_set_details_simple (gstelement_class, "H.264 parser",
- "Codec/Parser/Converter/Video",
- "Parses H.264 streams",
- "Mark Nauwelaerts <mark.nauwelaerts@collabora.co.uk>");
-
- GST_DEBUG_CATEGORY_INIT (h264_parse_debug, "h264parse", 0, "h264 parser");
-}
-
-static void
gst_h264_parse_class_init (GstH264ParseClass * klass)
{
GObjectClass *gobject_class = (GObjectClass *) klass;
GST_DEBUG_FUNCPTR (gst_h264_parse_pre_push_frame);
parse_class->set_sink_caps = GST_DEBUG_FUNCPTR (gst_h264_parse_set_caps);
parse_class->get_sink_caps = GST_DEBUG_FUNCPTR (gst_h264_parse_get_caps);
+ parse_class->event = GST_DEBUG_FUNCPTR (gst_h264_parse_event);
+ parse_class->src_event = GST_DEBUG_FUNCPTR (gst_h264_parse_src_event);
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&srctemplate));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&sinktemplate));
+
+ gst_element_class_set_details_simple (gstelement_class, "H.264 parser",
+ "Codec/Parser/Converter/Video",
+ "Parses H.264 streams",
+ "Mark Nauwelaerts <mark.nauwelaerts@collabora.co.uk>");
}
static void
return res;
}
+ static gboolean
+ gst_h264_parse_event (GstBaseParse * parse, GstEvent * event)
+ {
+ gboolean handled = FALSE;
+ GstH264Parse *h264parse = GST_H264_PARSE (parse);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_CUSTOM_DOWNSTREAM:
+ {
+ GstClockTime timestamp, stream_time, running_time;
+ gboolean all_headers;
+ guint count;
+
+ if (!gst_video_event_is_force_key_unit (event))
+ break;
+
+ gst_video_event_parse_downstream_force_key_unit (event,
+ ×tamp, &stream_time, &running_time, &all_headers, &count);
+
+ GST_INFO_OBJECT (h264parse, "received downstream force key unit event, "
+ "seqnum %d running_time %" GST_TIME_FORMAT " all_headers %d count %d",
+ gst_event_get_seqnum (event), GST_TIME_ARGS (running_time),
+ all_headers, count);
+ handled = TRUE;
+
+ if (h264parse->force_key_unit_event) {
+ GST_INFO_OBJECT (h264parse, "ignoring force key unit event "
+ "as one is already queued");
+ break;
+ }
+
+ h264parse->pending_key_unit_ts = running_time;
+ gst_event_replace (&h264parse->force_key_unit_event, event);
+ break;
+ }
+ default:
+ break;
+ }
+
+ return handled;
+ }
+
+ static gboolean
+ gst_h264_parse_src_event (GstBaseParse * parse, GstEvent * event)
+ {
+ gboolean handled = FALSE;
+ GstH264Parse *h264parse = GST_H264_PARSE (parse);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_CUSTOM_UPSTREAM:
+ {
+ GstClockTime running_time;
+ gboolean all_headers;
+ guint count;
+
+ if (!gst_video_event_is_force_key_unit (event))
+ break;
+
+ gst_video_event_parse_upstream_force_key_unit (event,
+ &running_time, &all_headers, &count);
+
+ GST_INFO_OBJECT (h264parse, "received upstream force-key-unit event, "
+ "seqnum %d running_time %" GST_TIME_FORMAT " all_headers %d count %d",
+ gst_event_get_seqnum (event), GST_TIME_ARGS (running_time),
+ all_headers, count);
+
+ if (!all_headers)
+ break;
+
+ h264parse->pending_key_unit_ts = running_time;
+ gst_event_replace (&h264parse->force_key_unit_event, event);
+ /* leave handled = FALSE so that the event gets propagated upstream */
+ break;
+ }
+ default:
+ break;
+ }
+
+ return handled;
+ }
+
static GstFlowReturn
-gst_h264_parse_chain (GstPad * pad, GstBuffer * buffer)
+gst_h264_parse_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer)
{
- GstH264Parse *h264parse = GST_H264_PARSE (GST_PAD_PARENT (pad));
+ GstH264Parse *h264parse = GST_H264_PARSE (parent);
if (h264parse->packetized && buffer) {
GstBuffer *sub;
const GValue * value, GParamSpec * pspec);
static void gst_mpeg4vparse_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
+ static gboolean gst_mpeg4vparse_event (GstBaseParse * parse, GstEvent * event);
+ static gboolean gst_mpeg4vparse_src_event (GstBaseParse * parse,
+ GstEvent * event);
static void
-gst_mpeg4vparse_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_static_pad_template (element_class, &src_template);
- gst_element_class_add_static_pad_template (element_class, &sink_template);
-
- gst_element_class_set_details_simple (element_class,
- "MPEG 4 video elementary stream parser", "Codec/Parser/Video",
- "Parses MPEG-4 Part 2 elementary video streams",
- "Julien Moutte <julien@fluendo.com>");
-
- GST_DEBUG_CATEGORY_INIT (mpeg4v_parse_debug, "mpeg4videoparse", 0,
- "MPEG-4 video parser");
-}
-
-static void
gst_mpeg4vparse_set_property (GObject * object, guint property_id,
const GValue * value, GParamSpec * pspec)
{
gst_mpeg4vparse_class_init (GstMpeg4VParseClass * klass)
{
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
- GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
GstBaseParseClass *parse_class = GST_BASE_PARSE_CLASS (klass);
++ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
parent_class = g_type_class_peek_parent (klass);
0, 3600, DEFAULT_CONFIG_INTERVAL,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
- gst_element_class_add_pad_template (gstelement_class,
++ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&src_template));
- gst_element_class_add_pad_template (gstelement_class,
++ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&sink_template));
+
- gst_element_class_set_details_simple (gstelement_class,
++ gst_element_class_set_details_simple (element_class,
+ "MPEG 4 video elementary stream parser", "Codec/Parser/Video",
+ "Parses MPEG-4 Part 2 elementary video streams",
+ "Julien Moutte <julien@fluendo.com>");
+
++ GST_DEBUG_CATEGORY_INIT (mpeg4v_parse_debug, "mpeg4videoparse", 0,
++ "MPEG-4 video parser");
++
/* Override BaseParse vfuncs */
parse_class->start = GST_DEBUG_FUNCPTR (gst_mpeg4vparse_start);
parse_class->stop = GST_DEBUG_FUNCPTR (gst_mpeg4vparse_stop);
}
static gboolean
- gst_mpeg4vparse_process_config (GstMpeg4VParse * mp4vparse, const guint8 * data,
- gsize size)
+ gst_mpeg4vparse_process_config (GstMpeg4VParse * mp4vparse,
+ const guint8 * data, guint offset, gsize size)
{
/* only do stuff if something new */
- if (mp4vparse->config && size == gst_buffer_get_size (mp4vparse->config) &&
- gst_buffer_memcmp (mp4vparse->config, 0, data, size) == 0)
- if (mp4vparse->config && size == GST_BUFFER_SIZE (mp4vparse->config) &&
- memcmp (GST_BUFFER_DATA (mp4vparse->config), data, size) == 0)
++ if (!gst_buffer_memcmp (mp4vparse->config, offset, data, size))
return TRUE;
- if (!gst_mpeg4_params_parse_config (&mp4vparse->params, data, size)) {
- GST_DEBUG_OBJECT (mp4vparse, "failed to parse config data (size %"
- G_GSSIZE_FORMAT ")", size);
+ if (mp4vparse->vol_offset < 0) {
+ GST_WARNING ("No video object Layer parsed in this frame, cannot accept "
+ "config");
return FALSE;
}
if (mp4vparse->config != NULL)
gst_buffer_unref (mp4vparse->config);
-- mp4vparse->config = gst_buffer_new_and_alloc (size);
- gst_buffer_fill (mp4vparse->config, 0, data, size);
-
- memcpy (GST_BUFFER_DATA (mp4vparse->config), data, size);
-
++ mp4vparse->config = gst_buffer_new_wrapped (g_memdup (data, size), size);
/* trigger src caps update */
mp4vparse->update_caps = TRUE;
gst_mpeg4vparse_check_valid_frame (GstBaseParse * parse,
GstBaseParseFrame * frame, guint * framesize, gint * skipsize)
{
- GstMpeg4VParse *mp4vparse = GST_MPEG4VIDEOPARSE (parse);
- GstBuffer *buf = frame->buffer;
- GstByteReader reader;
+ GstMpeg4VParse *mp4vparse = GST_MPEG4VIDEO_PARSE (parse);
+ GstMpeg4Packet packet;
- guint8 *data = GST_BUFFER_DATA (frame->buffer);
- guint size = GST_BUFFER_SIZE (frame->buffer);
++ guint8 *data = NULL;
++ gsize size;
gint off = 0;
- gboolean ret;
+ gboolean ret = FALSE;
- guint code;
- guint8 *data;
- gsize size;
+
- data = gst_buffer_map (buf, &size, NULL, GST_MAP_READ);
- gst_byte_reader_init (&reader, data, size);
++ data = gst_buffer_map (frame->buffer, &size, NULL, GST_MAP_READ);
retry:
/* at least start code and subsequent byte */
if (G_UNLIKELY (size - off < 5))
- goto done;
- return FALSE;
++ goto out;
/* avoid stale cached parsing state */
if (!(frame->flags & GST_BASE_PARSE_FRAME_FLAG_PARSING)) {
goto next;
}
- off = gst_byte_reader_masked_scan_uint32 (&reader, 0xffffff00, 0x00000100,
- off, size - off);
-
- GST_LOG_OBJECT (mp4vparse, "possible sync at buffer offset %d", off);
-
/* didn't find anything that looks like a sync word, skip */
- if (G_UNLIKELY (off < 0)) {
- *skipsize = size - 3;
- goto done;
+ switch (gst_mpeg4_parse (&packet, TRUE, NULL, data, off, size)) {
+ case (GST_MPEG4_PARSER_NO_PACKET):
+ case (GST_MPEG4_PARSER_ERROR):
+ *skipsize = size - 3;
- return FALSE;
++ goto out;
+ default:
+ break;
}
+ off = packet.offset;
/* possible frame header, but not at offset 0? skip bytes before sync */
- if (G_UNLIKELY (off > 0)) {
- *skipsize = off;
- goto done;
+ if (G_UNLIKELY (off > 3)) {
+ *skipsize = off - 3;
- return FALSE;
++ goto out;
}
- /* ensure start code looks like a real starting start code */
- code = data[3];
- switch (code) {
- case MPEG4_VOP_STARTCODE:
- case MPEG4_VOS_STARTCODE:
- case MPEG4_GOP_STARTCODE:
+ switch (packet.type) {
+ case GST_MPEG4_GROUP_OF_VOP:
+ case GST_MPEG4_VISUAL_OBJ_SEQ_START:
+ case GST_MPEG4_VIDEO_OBJ_PLANE:
break;
default:
- if (code <= 0x1f)
+ if (packet.type <= GST_MPEG4_VIDEO_OBJ_LAST)
break;
/* undesirable sc */
GST_LOG_OBJECT (mp4vparse, "start code is no VOS, VO, VOP or GOP");
*skipsize = 0;
/* position a bit further than last sc */
off++;
- /* so now we have start code at start of data; locate next start code */
- off = gst_byte_reader_masked_scan_uint32 (&reader, 0xffffff00, 0x00000100,
- off, size - off);
-
- GST_LOG_OBJECT (mp4vparse, "next start code at %d", off);
- if (off < 0) {
- /* if draining, take all */
- if (GST_BASE_PARSE_DRAINING (parse)) {
- off = size;
- ret = TRUE;
- } else {
- /* resume scan where we left it */
- mp4vparse->last_sc = size - 4;
- /* request best next available */
- *framesize = G_MAXUINT;
- goto done;
- }
- } else {
- /* decide whether this startcode ends a frame */
- ret = gst_mpeg4vparse_process_sc (mp4vparse, buf, off);
+
+ /* so now we have start code at start of data; locate next packet */
+ switch (gst_mpeg4_parse (&packet, TRUE, NULL, data, off, size)) {
+ case (GST_MPEG4_PARSER_NO_PACKET_END):
+ ret = gst_mpeg4vparse_process_sc (mp4vparse, &packet, size);
+ if (ret)
+ break;
+ case (GST_MPEG4_PARSER_NO_PACKET):
+ case (GST_MPEG4_PARSER_ERROR):
+ /* if draining, take all */
+ if (GST_BASE_PARSE_DRAINING (parse)) {
+ *framesize = size;
- return TRUE;
++ ret = TRUE;
+ } else {
+ /* resume scan where we left it */
+ mp4vparse->last_sc = size - 3;
+ /* request best next available */
+ *framesize = G_MAXUINT;
- return FALSE;
+ }
++ goto out;
++ break;
+ default:
+ /* decide whether this startcode ends a frame */
+ ret = gst_mpeg4vparse_process_sc (mp4vparse, &packet, size);
+ break;
}
+ off = packet.offset;
+
if (ret) {
- *framesize = off;
+ *framesize = off - 3;
} else {
goto next;
}
- done:
- gst_buffer_unmap (buf, data, size);
-
++out:
++ gst_buffer_unmap (frame->buffer, data, size);
return ret;
}
{
GstCaps *caps = NULL;
+ GST_LOG_OBJECT (mp4vparse, "Updating caps");
+
/* only update if no src caps yet or explicitly triggered */
- if (G_LIKELY (GST_PAD_CAPS (GST_BASE_PARSE_SRC_PAD (mp4vparse)) &&
+ if (G_LIKELY (gst_pad_has_current_caps (GST_BASE_PARSE_SRC_PAD (mp4vparse)) &&
!mp4vparse->update_caps))
return;
/* carry over input caps as much as possible; override with our own stuff */
- caps = GST_PAD_CAPS (GST_BASE_PARSE_SINK_PAD (mp4vparse));
+ caps = gst_pad_get_current_caps (GST_BASE_PARSE_SINK_PAD (mp4vparse));
if (caps) {
- caps = gst_caps_make_writable (caps);
- caps = gst_caps_copy (caps);
++ GstCaps *tmp = gst_caps_copy (caps);
++ gst_caps_unref (caps);
++ caps = tmp;
} else {
caps = gst_caps_new_simple ("video/mpeg",
"mpegversion", G_TYPE_INT, 4, NULL);
GST_LOG_OBJECT (mp4vparse,
"interval since last config %" GST_TIME_FORMAT, GST_TIME_ARGS (diff));
- if (GST_TIME_AS_SECONDS (diff) >= mp4vparse->interval) {
+ if (GST_TIME_AS_SECONDS (diff) >= mp4vparse->interval || push_codec) {
++ guint8 *cdata;
++ gsize csize;
++ gboolean diffconf;
++
/* we need to send config now first */
- GST_LOG_OBJECT (parse, "inserting config in stream");
+ GST_INFO_OBJECT (parse, "inserting config in stream");
++ cdata = gst_buffer_map (mp4vparse->config, &csize, NULL, GST_MAP_READ);
++ diffconf = (gst_buffer_get_size (buffer) < csize)
++ || gst_buffer_memcmp (buffer, 0, cdata, csize);
++ gst_buffer_unmap (mp4vparse->config, cdata, csize);
/* avoid inserting duplicate config */
- if ((gst_buffer_get_size (buffer) <
- gst_buffer_get_size (mp4vparse->config))
- || compare_buffers (buffer, mp4vparse->config)) {
- if ((GST_BUFFER_SIZE (buffer) < GST_BUFFER_SIZE (mp4vparse->config)) ||
- memcmp (GST_BUFFER_DATA (buffer),
- GST_BUFFER_DATA (mp4vparse->config),
- GST_BUFFER_SIZE (mp4vparse->config))) {
++ if (diffconf) {
GstBuffer *superbuf;
/* insert header */
superbuf = gst_buffer_merge (mp4vparse->config, buffer);
- gst_buffer_copy_into (superbuf, buffer, GST_BUFFER_COPY_ALL, 0, -1);
- gst_buffer_copy_metadata (superbuf, buffer, GST_BUFFER_COPY_ALL);
++ gst_buffer_copy_into (superbuf, buffer, GST_BUFFER_COPY_METADATA, 0,
++ csize);
gst_buffer_replace (&frame->buffer, superbuf);
gst_buffer_unref (superbuf);
} else {
/* best possible parse attempt,
* src caps are based on sink caps so it will end up in there
* whether sucessful or not */
- gst_mpeg4vparse_process_config (mp4vparse, data, size);
- data = GST_BUFFER_DATA (buf);
- size = GST_BUFFER_SIZE (buf);
++ data = gst_buffer_map (buf, &size, NULL, GST_MAP_READ);
+ res = gst_mpeg4_parse (&packet, TRUE, NULL, data, 0, size);
+
+ while (res == GST_MPEG4_PARSER_OK || res == GST_MPEG4_PARSER_NO_PACKET_END) {
+
+ if (packet.type >= GST_MPEG4_VIDEO_LAYER_FIRST &&
+ packet.type <= GST_MPEG4_VIDEO_LAYER_LAST)
+ mp4vparse->vol_offset = packet.offset;
+
+ res = gst_mpeg4_parse (&packet, TRUE, NULL, data, packet.offset, size);
+ }
+
+ /* And take it as config */
- gst_mpeg4vparse_process_config (mp4vparse, GST_BUFFER_DATA (buf),
- 3, GST_BUFFER_SIZE (buf));
++ gst_mpeg4vparse_process_config (mp4vparse, data, 3, size);
+ gst_buffer_unmap (buf, data, size);
}
/* let's not interfere and accept regardless of config parsing success */
return TRUE;
}
+
static GstCaps *
-gst_mpeg4vparse_get_caps (GstBaseParse * parse)
+gst_mpeg4vparse_get_caps (GstBaseParse * parse, GstCaps * filter)
{
GstCaps *peercaps;
GstCaps *res;
(parse)));
}
++ if (filter) {
++ GstCaps *tmp = gst_caps_intersect_full (res, filter,
++ GST_CAPS_INTERSECT_FIRST);
++ gst_caps_unref (res);
++ res = tmp;
++ }
++
++
return res;
}
for (j = 0; j < gst_caps_get_size (caps); j++) {
GstCaps *in_caps, *out_caps;
GstStructure *s;
-- guint32 fourcc;
++ const gchar *fourcc;
in_caps = gst_caps_copy_nth (caps, i);
out_caps = gst_caps_copy_nth (caps, j);
/* FIXME remove if videotestsrc and video format handle these properly */
s = gst_caps_get_structure (in_caps, 0);
-- if (gst_structure_get_fourcc (s, "format", &fourcc)) {
-- if (fourcc == GST_MAKE_FOURCC ('Y', 'U', 'V', '9') ||
-- fourcc == GST_MAKE_FOURCC ('Y', 'V', 'U', '9') ||
-- fourcc == GST_MAKE_FOURCC ('v', '2', '1', '6')) {
++ if ((fourcc = gst_structure_get_string (s, "format"))) {
++ if (!strcmp (fourcc, "YUV9") ||
++ !strcmp (fourcc, "YVU9") || !strcmp (fourcc, "v216")) {
gst_caps_unref (in_caps);
gst_caps_unref (out_caps);
continue;