libgstrtp_la_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_BASE_CFLAGS) $(GST_CFLAGS)
libgstrtp_la_LIBADD = $(GST_PLUGINS_BASE_LIBS) \
-lgstaudio-@GST_MAJORMINOR@ \
+ -lgstvideo-@GST_MAJORMINOR@ \
-lgsttag-@GST_MAJORMINOR@ \
-lgstrtp-@GST_MAJORMINOR@ \
$(GST_BASE_LIBS) $(GST_LIBS) \
guint16 length; /* Length */
} GstAsteriskH263Header;
-#define GST_ASTERISKH263_HEADER_TIMESTAMP(buf) (((GstAsteriskH263Header *)(GST_BUFFER_DATA (buf)))->timestamp)
-#define GST_ASTERISKH263_HEADER_LENGTH(buf) (((GstAsteriskH263Header *)(GST_BUFFER_DATA (buf)))->length)
+#define GST_ASTERISKH263_HEADER_TIMESTAMP(data) (((GstAsteriskH263Header *)(data))->timestamp)
+#define GST_ASTERISKH263_HEADER_LENGTH(data) (((GstAsteriskH263Header *)(data))->length)
static GstStaticPadTemplate gst_asteriskh263_src_template =
GST_STATIC_PAD_TEMPLATE ("src",
static GstStateChangeReturn gst_asteriskh263_change_state (GstElement *
element, GstStateChange transition);
-GST_BOILERPLATE (GstAsteriskh263, gst_asteriskh263, GstElement,
- GST_TYPE_ELEMENT);
-
-static void
-gst_asteriskh263_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_asteriskh263_src_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_asteriskh263_sink_template));
-
- gst_element_class_set_details_simple (element_class,
- "RTP Asterisk H263 depayloader", "Codec/Depayloader/Network/RTP",
- "Extracts H263 video from RTP and encodes in Asterisk H263 format",
- "Neil Stratford <neils@vipadia.com>");
-}
+#define gst_asteriskh263_parent_class parent_class
+G_DEFINE_TYPE (GstAsteriskh263, gst_asteriskh263, GST_TYPE_ELEMENT);
static void
gst_asteriskh263_class_init (GstAsteriskh263Class * klass)
gobject_class->finalize = gst_asteriskh263_finalize;
gstelement_class->change_state = gst_asteriskh263_change_state;
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_asteriskh263_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_asteriskh263_sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP Asterisk H263 depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts H263 video from RTP and encodes in Asterisk H263 format",
+ "Neil Stratford <neils@vipadia.com>");
}
static void
-gst_asteriskh263_init (GstAsteriskh263 * asteriskh263,
- GstAsteriskh263Class * klass)
+gst_asteriskh263_init (GstAsteriskh263 * asteriskh263)
{
asteriskh263->srcpad =
gst_pad_new_from_static_template (&gst_asteriskh263_src_template, "src");
guint32 timestamp;
guint32 samples;
guint16 asterisk_len;
+ GstRTPBuffer rtp;
+ guint8 *data;
+
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
- payload_len = gst_rtp_buffer_get_payload_len (buf);
- payload = gst_rtp_buffer_get_payload (buf);
+ payload_len = gst_rtp_buffer_get_payload_len (&rtp);
+ payload = gst_rtp_buffer_get_payload (&rtp);
- M = gst_rtp_buffer_get_marker (buf);
- timestamp = gst_rtp_buffer_get_timestamp (buf);
+ M = gst_rtp_buffer_get_marker (&rtp);
+ timestamp = gst_rtp_buffer_get_timestamp (&rtp);
+
+ gst_rtp_buffer_unmap (&rtp);
outbuf = gst_buffer_new_and_alloc (payload_len +
GST_ASTERISKH263_HEADER_LEN);
samples = timestamp - asteriskh263->lastts;
asteriskh263->lastts = timestamp;
- GST_ASTERISKH263_HEADER_TIMESTAMP (outbuf) = g_htonl (samples);
- GST_ASTERISKH263_HEADER_LENGTH (outbuf) = g_htons (asterisk_len);
+ data = gst_buffer_map (outbuf, NULL, NULL, GST_MAP_WRITE);
+ GST_ASTERISKH263_HEADER_TIMESTAMP (data) = g_htonl (samples);
+ GST_ASTERISKH263_HEADER_LENGTH (data) = g_htons (asterisk_len);
/* copy the data into place */
- memcpy (GST_BUFFER_DATA (outbuf) + GST_ASTERISKH263_HEADER_LEN, payload,
- payload_len);
+ memcpy (data + GST_ASTERISKH263_HEADER_LEN, payload, payload_len);
+
+ gst_buffer_unmap (outbuf, data, -1);
GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
- gst_buffer_set_caps (outbuf,
- (GstCaps *) gst_pad_get_pad_template_caps (asteriskh263->srcpad));
+ if (!gst_pad_has_current_caps (asteriskh263->srcpad)) {
+ GstCaps *caps;
+
+ caps = gst_caps_copy
+ (gst_pad_get_pad_template_caps (asteriskh263->srcpad));
+ gst_pad_set_caps (asteriskh263->srcpad, caps);
+ gst_caps_unref (caps);
+ }
ret = gst_pad_push (asteriskh263->srcpad, outbuf);
)
);
-GST_BOILERPLATE (GstRtpL16Depay, gst_rtp_L16_depay, GstBaseRTPDepayload,
- GST_TYPE_BASE_RTP_DEPAYLOAD);
+#define gst_rtp_L16_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpL16Depay, gst_rtp_L16_depay, GST_TYPE_BASE_RTP_DEPAYLOAD);
static gboolean gst_rtp_L16_depay_setcaps (GstBaseRTPDepayload * depayload,
GstCaps * caps);
GstBuffer * buf);
static void
-gst_rtp_L16_depay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_L16_depay_src_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_L16_depay_sink_template));
-
- gst_element_class_set_details_simple (element_class, "RTP audio depayloader",
- "Codec/Depayloader/Network/RTP",
- "Extracts raw audio from RTP packets",
- "Zeeshan Ali <zak147@yahoo.com>," "Wim Taymans <wim.taymans@gmail.com>");
-}
-
-static void
gst_rtp_L16_depay_class_init (GstRtpL16DepayClass * klass)
{
+ GstElementClass *gstelement_class;
GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
+ gstelement_class = (GstElementClass *) klass;
gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
gstbasertpdepayload_class->set_caps = gst_rtp_L16_depay_setcaps;
gstbasertpdepayload_class->process = gst_rtp_L16_depay_process;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_L16_depay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_L16_depay_sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP audio depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts raw audio from RTP packets",
+ "Zeeshan Ali <zak147@yahoo.com>," "Wim Taymans <wim.taymans@gmail.com>");
+
GST_DEBUG_CATEGORY_INIT (rtpL16depay_debug, "rtpL16depay", 0,
"Raw Audio RTP Depayloader");
}
static void
-gst_rtp_L16_depay_init (GstRtpL16Depay * rtpL16depay,
- GstRtpL16DepayClass * klass)
+gst_rtp_L16_depay_init (GstRtpL16Depay * rtpL16depay)
{
/* needed because of GST_BOILERPLATE */
}
GstBuffer *outbuf;
gint payload_len;
gboolean marker;
+ GstRTPBuffer rtp;
rtpL16depay = GST_RTP_L16_DEPAY (depayload);
- payload_len = gst_rtp_buffer_get_payload_len (buf);
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
+ payload_len = gst_rtp_buffer_get_payload_len (&rtp);
if (payload_len <= 0)
goto empty_packet;
GST_DEBUG_OBJECT (rtpL16depay, "got payload of %d bytes", payload_len);
- outbuf = gst_rtp_buffer_get_payload_buffer (buf);
- marker = gst_rtp_buffer_get_marker (buf);
+ outbuf = gst_rtp_buffer_get_payload_buffer (&rtp);
+ marker = gst_rtp_buffer_get_marker (&rtp);
if (marker) {
/* mark talk spurt with DISCONT */
GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
}
+ gst_rtp_buffer_unmap (&rtp);
+
return outbuf;
/* ERRORS */
{
GST_ELEMENT_WARNING (rtpL16depay, STREAM, DECODE,
("Empty Payload."), (NULL));
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
}
static gboolean gst_rtp_L16_pay_setcaps (GstBaseRTPPayload * basepayload,
GstCaps * caps);
static GstCaps *gst_rtp_L16_pay_getcaps (GstBaseRTPPayload * rtppayload,
- GstPad * pad);
+ GstPad * pad, GstCaps * filter);
-GST_BOILERPLATE (GstRtpL16Pay, gst_rtp_L16_pay, GstBaseRTPAudioPayload,
- GST_TYPE_BASE_RTP_AUDIO_PAYLOAD);
-
-static void
-gst_rtp_L16_pay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_L16_pay_src_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_L16_pay_sink_template));
-
- gst_element_class_set_details_simple (element_class, "RTP audio payloader",
- "Codec/Payloader/Network/RTP",
- "Payload-encode Raw audio into RTP packets (RFC 3551)",
- "Wim Taymans <wim.taymans@gmail.com>");
-}
+#define gst_rtp_L16_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpL16Pay, gst_rtp_L16_pay, GST_TYPE_BASE_RTP_AUDIO_PAYLOAD);
static void
gst_rtp_L16_pay_class_init (GstRtpL16PayClass * klass)
{
+ GstElementClass *gstelement_class;
GstBaseRTPPayloadClass *gstbasertppayload_class;
+ gstelement_class = (GstElementClass *) klass;
gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
gstbasertppayload_class->set_caps = gst_rtp_L16_pay_setcaps;
gstbasertppayload_class->get_caps = gst_rtp_L16_pay_getcaps;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_L16_pay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_L16_pay_sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class, "RTP audio payloader",
+ "Codec/Payloader/Network/RTP",
+ "Payload-encode Raw audio into RTP packets (RFC 3551)",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
GST_DEBUG_CATEGORY_INIT (rtpL16pay_debug, "rtpL16pay", 0,
"L16 RTP Payloader");
}
static void
-gst_rtp_L16_pay_init (GstRtpL16Pay * rtpL16pay, GstRtpL16PayClass * klass)
+gst_rtp_L16_pay_init (GstRtpL16Pay * rtpL16pay)
{
GstBaseRTPAudioPayload *basertpaudiopayload;
}
static GstCaps *
-gst_rtp_L16_pay_getcaps (GstBaseRTPPayload * rtppayload, GstPad * pad)
+gst_rtp_L16_pay_getcaps (GstBaseRTPPayload * rtppayload, GstPad * pad,
+ GstCaps * filter)
{
GstCaps *otherpadcaps;
GstCaps *caps;
}
gst_caps_unref (otherpadcaps);
}
+
+ if (filter) {
+ GstCaps *tcaps = caps;
+
+ caps = gst_caps_intersect_full (filter, tcaps, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (tcaps);
+ }
+
return caps;
}
} else {
if (n > rest_bits) {
context->window =
- (context->window << rest_bits) | (*context->
- win_end & (((guint) pow (2.0, (double) rest_bits)) - 1));
+ (context->
+ window << rest_bits) | (*context->win_end & (((guint) pow (2.0,
+ (double) rest_bits)) - 1));
n -= rest_bits;
rest_bits = 0;
} else {
gst_rtp_h263_pay_boundry_init (&bound, NULL, rtph263pay->data - 1, 0, 0);
context->gobs =
- (GstRtpH263PayGob **) g_malloc0 (format_props[context->piclayer->
- ptype_srcformat][0] * sizeof (GstRtpH263PayGob *));
+ (GstRtpH263PayGob **) g_malloc0 (format_props[context->
+ piclayer->ptype_srcformat][0] * sizeof (GstRtpH263PayGob *));
for (i = 0; i < format_props[context->piclayer->ptype_srcformat][0]; i++) {
guint type, width, height;
guint16 dri, precision, length;
guint8 *qtable;
+ GstRTPBuffer rtp;
rtpjpegdepay = GST_RTP_JPEG_DEPAY (depayload);
rtpjpegdepay->discont = TRUE;
}
- payload_len = gst_rtp_buffer_get_payload_len (buf);
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
+ payload_len = gst_rtp_buffer_get_payload_len (&rtp);
if (payload_len < 8)
goto empty_packet;
- payload = gst_rtp_buffer_get_payload (buf);
+ payload = gst_rtp_buffer_get_payload (&rtp);
header_len = 0;
/* 0 1 2 3
if (frag_offset == 0) {
guint size;
+ guint8 *data;
if (rtpjpegdepay->width != width || rtpjpegdepay->height != height) {
GstCaps *outcaps;
}
/* max header length, should be big enough */
outbuf = gst_buffer_new_and_alloc (1000);
- size = MakeHeaders (GST_BUFFER_DATA (outbuf), type,
- width, height, qtable, precision, dri);
-
- GST_DEBUG_OBJECT (rtpjpegdepay, "pushing %u bytes of header", size);
+ data = gst_buffer_map (outbuf, NULL, NULL, GST_MAP_WRITE);
+ size = MakeHeaders (data, type, width, height, qtable, precision, dri);
+ gst_buffer_unmap (outbuf, data, size);
- GST_BUFFER_SIZE (outbuf) = size;
+ GST_DEBUG_OBJECT (rtpjpegdepay,
+ "pushing %" G_GSIZE_FORMAT " bytes of header", size);
gst_adapter_push (rtpjpegdepay->adapter, outbuf);
}
/* take JPEG data, push in the adapter */
GST_DEBUG_OBJECT (rtpjpegdepay, "pushing data at offset %d", header_len);
- outbuf = gst_rtp_buffer_get_payload_subbuffer (buf, header_len, -1);
+ outbuf = gst_rtp_buffer_get_payload_subbuffer (&rtp, header_len, -1);
gst_adapter_push (rtpjpegdepay->adapter, outbuf);
outbuf = NULL;
- if (gst_rtp_buffer_get_marker (buf)) {
+ if (gst_rtp_buffer_get_marker (&rtp)) {
guint avail;
guint8 end[2];
guint8 *data;
/* no EOI marker, add one */
outbuf = gst_buffer_new_and_alloc (2);
- data = GST_BUFFER_DATA (outbuf);
+ data = gst_buffer_map (outbuf, NULL, NULL, GST_MAP_WRITE);
data[0] = 0xff;
data[1] = 0xd9;
+ gst_buffer_unmap (outbuf, data, -1);
gst_adapter_push (rtpjpegdepay->adapter, outbuf);
avail += 2;
GST_DEBUG_OBJECT (rtpjpegdepay, "returning %u bytes", avail);
}
+ gst_rtp_buffer_unmap (&rtp);
+
return outbuf;
/* ERRORS */
{
GST_ELEMENT_WARNING (rtpjpegdepay, STREAM, DECODE,
("Empty Payload."), (NULL));
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
invalid_dimension:
{
GST_ELEMENT_WARNING (rtpjpegdepay, STREAM, FORMAT,
("Invalid Dimension %dx%d.", width, height), (NULL));
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
no_qtable:
{
GST_WARNING_OBJECT (rtpjpegdepay, "no qtable");
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
}
static GstFlowReturn gst_rtp_jpeg_pay_handle_buffer (GstBaseRTPPayload * pad,
GstBuffer * buffer);
-GST_BOILERPLATE (GstRtpJPEGPay, gst_rtp_jpeg_pay, GstBaseRTPPayload,
- GST_TYPE_BASE_RTP_PAYLOAD);
-
-static void
-gst_rtp_jpeg_pay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_jpeg_pay_src_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_jpeg_pay_sink_template));
-
- gst_element_class_set_details_simple (element_class, "RTP JPEG payloader",
- "Codec/Payloader/Network/RTP",
- "Payload-encodes JPEG pictures into RTP packets (RFC 2435)",
- "Axis Communications <dev-gstreamer@axis.com>");
-}
+#define gst_rtp_jpeg_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpJPEGPay, gst_rtp_jpeg_pay, GST_TYPE_BASE_RTP_PAYLOAD);
static void
gst_rtp_jpeg_pay_class_init (GstRtpJPEGPayClass * klass)
{
GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
GstBaseRTPPayloadClass *gstbasertppayload_class;
gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
gobject_class->set_property = gst_rtp_jpeg_pay_set_property;
gobject_class->get_property = gst_rtp_jpeg_pay_get_property;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_jpeg_pay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_jpeg_pay_sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class, "RTP JPEG payloader",
+ "Codec/Payloader/Network/RTP",
+ "Payload-encodes JPEG pictures into RTP packets (RFC 2435)",
+ "Axis Communications <dev-gstreamer@axis.com>");
+
gstbasertppayload_class->set_caps = gst_rtp_jpeg_pay_setcaps;
gstbasertppayload_class->handle_buffer = gst_rtp_jpeg_pay_handle_buffer;
}
static void
-gst_rtp_jpeg_pay_init (GstRtpJPEGPay * pay, GstRtpJPEGPayClass * klass)
+gst_rtp_jpeg_pay_init (GstRtpJPEGPay * pay)
{
pay->quality = DEFAULT_JPEG_QUALITY;
pay->quant = DEFAULT_JPEG_QUANT;
RtpQuantTable tables[15] = { {0, NULL}, };
CompInfo info[3] = { {0,}, };
guint quant_data_size;
- guint8 *data;
- guint size;
+ guint8 *data, *bdata;
+ gsize size;
guint mtu;
guint bytes_left;
guint jpeg_header_size = 0;
gboolean sos_found, sof_found, dqt_found, dri_found;
gint i;
GstBufferList *list = NULL;
- GstBufferListIterator *it = NULL;
pay = GST_RTP_JPEG_PAY (basepayload);
mtu = GST_BASE_RTP_PAYLOAD_MTU (pay);
- size = GST_BUFFER_SIZE (buffer);
- data = GST_BUFFER_DATA (buffer);
+ data = bdata = gst_buffer_map (buffer, &size, NULL, GST_MAP_READ);
timestamp = GST_BUFFER_TIMESTAMP (buffer);
offset = 0;
- GST_LOG_OBJECT (pay, "got buffer size %u, timestamp %" GST_TIME_FORMAT, size,
- GST_TIME_ARGS (timestamp));
+ GST_LOG_OBJECT (pay, "got buffer size %" G_GSIZE_FORMAT
+ " , timestamp %" GST_TIME_FORMAT, size, GST_TIME_ARGS (timestamp));
/* parse the jpeg header for 'start of scan' and read quant tables if needed */
sos_found = FALSE;
if (pay->buffer_list) {
list = gst_buffer_list_new ();
- it = gst_buffer_list_iterate (list);
}
bytes_left = sizeof (jpeg_header) + quant_data_size + size;
GstBuffer *outbuf;
guint8 *payload;
guint payload_size = (bytes_left < mtu ? bytes_left : mtu);
+ guint header_size;
+ GstBuffer *paybuf;
+ GstRTPBuffer rtp;
- if (pay->buffer_list) {
- guint header_size;
+ header_size = sizeof (jpeg_header) + quant_data_size;
+ if (dri_found)
+ header_size += sizeof (restart_marker_header);
- header_size = sizeof (jpeg_header) + quant_data_size;
- if (dri_found)
- header_size += sizeof (restart_marker_header);
+ outbuf = gst_rtp_buffer_new_allocate (header_size, 0, 0);
- outbuf = gst_rtp_buffer_new_allocate (header_size, 0, 0);
- } else {
- outbuf = gst_rtp_buffer_new_allocate (payload_size, 0, 0);
- }
- GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
+ gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
if (payload_size == bytes_left) {
GST_LOG_OBJECT (pay, "last packet of frame");
frame_done = TRUE;
- gst_rtp_buffer_set_marker (outbuf, 1);
+ gst_rtp_buffer_set_marker (&rtp, 1);
}
- payload = gst_rtp_buffer_get_payload (outbuf);
+ payload = gst_rtp_buffer_get_payload (&rtp);
/* update offset */
#if (G_BYTE_ORDER == G_LITTLE_ENDIAN)
quant_data_size = 0;
}
GST_LOG_OBJECT (pay, "sending payload size %d", payload_size);
+ gst_rtp_buffer_unmap (&rtp);
- if (pay->buffer_list) {
- GstBuffer *paybuf;
+ /* create a new buf to hold the payload */
+ paybuf = gst_buffer_copy_region (buffer, GST_BUFFER_COPY_MEMORY,
+ jpeg_header_size + offset, payload_size);
- /* create a new buf to hold the payload */
- paybuf = gst_buffer_create_sub (buffer, jpeg_header_size + offset,
- payload_size);
+ /* join memory parts */
+ outbuf = gst_buffer_join (outbuf, paybuf);
- /* create a new group to hold the rtp header and the payload */
- gst_buffer_list_iterator_add_group (it);
- gst_buffer_list_iterator_add (it, outbuf);
- gst_buffer_list_iterator_add (it, paybuf);
+ GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
+
+ if (pay->buffer_list) {
+ /* and add to list */
+ gst_buffer_list_insert (list, -1, outbuf);
} else {
- memcpy (payload, data, payload_size);
ret = gst_basertppayload_push (basepayload, outbuf);
if (ret != GST_FLOW_OK)
break;
while (!frame_done);
if (pay->buffer_list) {
- gst_buffer_list_iterator_free (it);
/* push the whole buffer list at once */
ret = gst_basertppayload_push_list (basepayload, list);
}
+ gst_buffer_unmap (buffer, bdata, -1);
gst_buffer_unref (buffer);
return ret;
unsupported_jpeg:
{
GST_ELEMENT_ERROR (pay, STREAM, FORMAT, ("Unsupported JPEG"), (NULL));
+ gst_buffer_unmap (buffer, bdata, -1);
gst_buffer_unref (buffer);
return GST_FLOW_NOT_SUPPORTED;
}
no_dimension:
{
GST_ELEMENT_ERROR (pay, STREAM, FORMAT, ("No size given"), (NULL));
+ gst_buffer_unmap (buffer, bdata, -1);
gst_buffer_unref (buffer);
return GST_FLOW_NOT_NEGOTIATED;
}
invalid_format:
{
/* error was posted */
+ gst_buffer_unmap (buffer, bdata, -1);
gst_buffer_unref (buffer);
return GST_FLOW_ERROR;
}
invalid_quant:
{
GST_ELEMENT_ERROR (pay, STREAM, FORMAT, ("Invalid quant tables"), (NULL));
+ gst_buffer_unmap (buffer, bdata, -1);
gst_buffer_unref (buffer);
return GST_FLOW_ERROR;
}
"clock-rate = (int) [1, MAX ], " "encoding-name = (string) \"MP1S\"")
);
-GST_BOILERPLATE (GstRtpMP1SDepay, gst_rtp_mp1s_depay, GstBaseRTPDepayload,
+G_DEFINE_TYPE (GstRtpMP1SDepay, gst_rtp_mp1s_depay,
GST_TYPE_BASE_RTP_DEPAYLOAD);
static gboolean gst_rtp_mp1s_depay_setcaps (GstBaseRTPDepayload * depayload,
GstBuffer * buf);
static void
-gst_rtp_mp1s_depay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_mp1s_depay_src_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_mp1s_depay_sink_template));
-
- gst_element_class_set_details_simple (element_class,
- "RTP MPEG1 System Stream depayloader", "Codec/Depayloader/Network/RTP",
- "Extracts MPEG1 System Streams from RTP packets (RFC 3555)",
- "Wim Taymans <wim.taymans@gmail.com>");
-}
-
-static void
gst_rtp_mp1s_depay_class_init (GstRtpMP1SDepayClass * klass)
{
+ GstElementClass *gstelement_class;
GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
+ gstelement_class = (GstElementClass *) klass;
gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
gstbasertpdepayload_class->process = gst_rtp_mp1s_depay_process;
gstbasertpdepayload_class->set_caps = gst_rtp_mp1s_depay_setcaps;
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_mp1s_depay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_mp1s_depay_sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP MPEG1 System Stream depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts MPEG1 System Streams from RTP packets (RFC 3555)",
+ "Wim Taymans <wim.taymans@gmail.com>");
}
static void
-gst_rtp_mp1s_depay_init (GstRtpMP1SDepay * rtpmp1sdepay,
- GstRtpMP1SDepayClass * klass)
+gst_rtp_mp1s_depay_init (GstRtpMP1SDepay * rtpmp1sdepay)
{
}
gst_rtp_mp1s_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
{
GstBuffer *outbuf;
+ GstRTPBuffer rtp;
- outbuf = gst_rtp_buffer_get_payload_buffer (buf);
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
+ outbuf = gst_rtp_buffer_get_payload_buffer (&rtp);
+ gst_rtp_buffer_unmap (&rtp);
if (outbuf)
GST_DEBUG ("gst_rtp_mp1s_depay_chain: pushing buffer of size %d",
- GST_BUFFER_SIZE (outbuf));
+ gst_buffer_get_size (outbuf));
return outbuf;
}
"clock-rate = (int) [1, MAX ]")
);
-GST_BOILERPLATE (GstRtpMP2TDepay, gst_rtp_mp2t_depay, GstBaseRTPDepayload,
+G_DEFINE_TYPE (GstRtpMP2TDepay, gst_rtp_mp2t_depay,
GST_TYPE_BASE_RTP_DEPAYLOAD);
static gboolean gst_rtp_mp2t_depay_setcaps (GstBaseRTPDepayload * depayload,
GValue * value, GParamSpec * pspec);
static void
-gst_rtp_mp2t_depay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_mp2t_depay_src_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_mp2t_depay_sink_template));
-
- gst_element_class_set_details_simple (element_class,
- "RTP MPEG Transport Stream depayloader", "Codec/Depayloader/Network/RTP",
- "Extracts MPEG2 TS from RTP packets (RFC 2250)",
- "Wim Taymans <wim.taymans@gmail.com>, "
- "Thijs Vermeir <thijs.vermeir@barco.com>");
-}
-
-static void
gst_rtp_mp2t_depay_class_init (GstRtpMP2TDepayClass * klass)
{
GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
gobject_class = (GObjectClass *) klass;
-
+ gstelement_class = (GstElementClass *) klass;
gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
gstbasertpdepayload_class->process = gst_rtp_mp2t_depay_process;
gobject_class->set_property = gst_rtp_mp2t_depay_set_property;
gobject_class->get_property = gst_rtp_mp2t_depay_get_property;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_mp2t_depay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_mp2t_depay_sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP MPEG Transport Stream depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts MPEG2 TS from RTP packets (RFC 2250)",
+ "Wim Taymans <wim.taymans@gmail.com>, "
+ "Thijs Vermeir <thijs.vermeir@barco.com>");
+
g_object_class_install_property (gobject_class, PROP_SKIP_FIRST_BYTES,
g_param_spec_uint ("skip-first-bytes",
"Skip first bytes",
}
static void
-gst_rtp_mp2t_depay_init (GstRtpMP2TDepay * rtpmp2tdepay,
- GstRtpMP2TDepayClass * klass)
+gst_rtp_mp2t_depay_init (GstRtpMP2TDepay * rtpmp2tdepay)
{
rtpmp2tdepay->skip_first_bytes = DEFAULT_SKIP_FIRST_BYTES;
}
GstRtpMP2TDepay *rtpmp2tdepay;
GstBuffer *outbuf;
gint payload_len;
+ GstRTPBuffer rtp;
rtpmp2tdepay = GST_RTP_MP2T_DEPAY (depayload);
- payload_len = gst_rtp_buffer_get_payload_len (buf);
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
+ payload_len = gst_rtp_buffer_get_payload_len (&rtp);
if (G_UNLIKELY (payload_len <= rtpmp2tdepay->skip_first_bytes))
goto empty_packet;
- outbuf =
- gst_rtp_buffer_get_payload_subbuffer (buf, rtpmp2tdepay->skip_first_bytes,
- -1);
+ outbuf = gst_rtp_buffer_get_payload_subbuffer (&rtp,
+ rtpmp2tdepay->skip_first_bytes, -1);
+
+ gst_rtp_buffer_unmap (&rtp);
if (outbuf)
GST_DEBUG ("gst_rtp_mp2t_depay_chain: pushing buffer of size %d",
- GST_BUFFER_SIZE (outbuf));
+ gst_buffer_get_size (outbuf));
return outbuf;
{
GST_ELEMENT_WARNING (rtpmp2tdepay, STREAM, DECODE,
(NULL), ("Packet was empty"));
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
}
static GstFlowReturn gst_rtp_mp2t_pay_flush (GstRTPMP2TPay * rtpmp2tpay);
static void gst_rtp_mp2t_pay_finalize (GObject * object);
-GST_BOILERPLATE (GstRTPMP2TPay, gst_rtp_mp2t_pay, GstBaseRTPPayload,
- GST_TYPE_BASE_RTP_PAYLOAD);
-
-static void
-gst_rtp_mp2t_pay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_mp2t_pay_sink_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_mp2t_pay_src_template));
- gst_element_class_set_details_simple (element_class,
- "RTP MPEG2 Transport Stream payloader", "Codec/Payloader/Network/RTP",
- "Payload-encodes MPEG2 TS into RTP packets (RFC 2250)",
- "Wim Taymans <wim.taymans@gmail.com>");
-}
+#define gst_rtp_mp2t_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRTPMP2TPay, gst_rtp_mp2t_pay, GST_TYPE_BASE_RTP_PAYLOAD);
static void
gst_rtp_mp2t_pay_class_init (GstRTPMP2TPayClass * klass)
{
GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
GstBaseRTPPayloadClass *gstbasertppayload_class;
gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
gobject_class->finalize = gst_rtp_mp2t_pay_finalize;
gstbasertppayload_class->set_caps = gst_rtp_mp2t_pay_setcaps;
gstbasertppayload_class->handle_buffer = gst_rtp_mp2t_pay_handle_buffer;
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_mp2t_pay_sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_mp2t_pay_src_template));
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP MPEG2 Transport Stream payloader", "Codec/Payloader/Network/RTP",
+ "Payload-encodes MPEG2 TS into RTP packets (RFC 2250)",
+ "Wim Taymans <wim.taymans@gmail.com>");
}
static void
-gst_rtp_mp2t_pay_init (GstRTPMP2TPay * rtpmp2tpay, GstRTPMP2TPayClass * klass)
+gst_rtp_mp2t_pay_init (GstRTPMP2TPay * rtpmp2tpay)
{
GST_BASE_RTP_PAYLOAD (rtpmp2tpay)->clock_rate = 90000;
GST_BASE_RTP_PAYLOAD_PT (rtpmp2tpay) = GST_RTP_PAYLOAD_MP2T;
guint8 *payload;
GstFlowReturn ret;
GstBuffer *outbuf;
+ GstRTPBuffer rtp;
avail = gst_adapter_available (rtpmp2tpay->adapter);
outbuf = gst_rtp_buffer_new_allocate (avail, 0, 0);
/* get payload */
- payload = gst_rtp_buffer_get_payload (outbuf);
+ gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
+ payload = gst_rtp_buffer_get_payload (&rtp);
/* copy stuff from adapter to payload */
gst_adapter_copy (rtpmp2tpay->adapter, payload, 0, avail);
+ gst_rtp_buffer_unmap (&rtp);
GST_BUFFER_TIMESTAMP (outbuf) = rtpmp2tpay->first_ts;
GST_BUFFER_DURATION (outbuf) = rtpmp2tpay->duration;
GST_DEBUG_OBJECT (rtpmp2tpay, "pushing buffer of size %d",
- GST_BUFFER_SIZE (outbuf));
+ gst_buffer_get_size (outbuf));
ret = gst_basertppayload_push (GST_BASE_RTP_PAYLOAD (rtpmp2tpay), outbuf);
rtpmp2tpay = GST_RTP_MP2T_PAY (basepayload);
- size = GST_BUFFER_SIZE (buffer);
+ size = gst_buffer_get_size (buffer);
timestamp = GST_BUFFER_TIMESTAMP (buffer);
duration = GST_BUFFER_DURATION (buffer);
)
);
-GST_BOILERPLATE (GstRtpMP4ADepay, gst_rtp_mp4a_depay, GstBaseRTPDepayload,
+#define gst_rtp_mp4a_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpMP4ADepay, gst_rtp_mp4a_depay,
GST_TYPE_BASE_RTP_DEPAYLOAD);
static void gst_rtp_mp4a_depay_finalize (GObject * object);
static void
-gst_rtp_mp4a_depay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_mp4a_depay_src_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_mp4a_depay_sink_template));
-
- gst_element_class_set_details_simple (element_class,
- "RTP MPEG4 audio depayloader", "Codec/Depayloader/Network/RTP",
- "Extracts MPEG4 audio from RTP packets (RFC 3016)",
- "Nokia Corporation (contact <stefan.kost@nokia.com>), "
- "Wim Taymans <wim.taymans@gmail.com>");
-}
-
-static void
gst_rtp_mp4a_depay_class_init (GstRtpMP4ADepayClass * klass)
{
GObjectClass *gobject_class;
gstbasertpdepayload_class->process = gst_rtp_mp4a_depay_process;
gstbasertpdepayload_class->set_caps = gst_rtp_mp4a_depay_setcaps;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_mp4a_depay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_mp4a_depay_sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP MPEG4 audio depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts MPEG4 audio from RTP packets (RFC 3016)",
+ "Nokia Corporation (contact <stefan.kost@nokia.com>), "
+ "Wim Taymans <wim.taymans@gmail.com>");
+
GST_DEBUG_CATEGORY_INIT (rtpmp4adepay_debug, "rtpmp4adepay", 0,
"MPEG4 audio RTP Depayloader");
}
static void
-gst_rtp_mp4a_depay_init (GstRtpMP4ADepay * rtpmp4adepay,
- GstRtpMP4ADepayClass * klass)
+gst_rtp_mp4a_depay_init (GstRtpMP4ADepay * rtpmp4adepay)
{
rtpmp4adepay->adapter = gst_adapter_new ();
}
if (gst_value_deserialize (&v, str)) {
GstBuffer *buffer;
guint8 *data;
- guint size;
+ gsize size;
gint i;
guint32 rate = 0;
guint8 obj_type = 0, sr_idx = 0, channels = 0;
gst_buffer_ref (buffer);
g_value_unset (&v);
- data = GST_BUFFER_DATA (buffer);
- size = GST_BUFFER_SIZE (buffer);
+ data = gst_buffer_map (buffer, &size, NULL, GST_MAP_READ);
if (size < 2) {
- GST_WARNING_OBJECT (depayload, "config too short (%d < 2)", size);
+ GST_WARNING_OBJECT (depayload, "config too short (%d < 2)",
+ (gint) size);
goto bad_config;
}
for (i = 0; i < size; i++) {
data[i] = ((data[i + 1] & 1) << 7) | ((data[i + 2] & 0xfe) >> 1);
}
- /* ignore remaining bit, we're only interested in full bytes */
- GST_BUFFER_SIZE (buffer) = size;
gst_bit_reader_init (&br, data, size);
break;
}
+ /* ignore remaining bit, we're only interested in full bytes */
+ gst_buffer_unmap (buffer, data, size);
+ data = NULL;
+
gst_caps_set_simple (srccaps,
"channels", G_TYPE_INT, (gint) channels,
"rate", G_TYPE_INT, (gint) rate,
"codec_data", GST_TYPE_BUFFER, buffer, NULL);
+ bad_config:
+ if (data)
+ gst_buffer_unmap (buffer, data, -1);
gst_buffer_unref (buffer);
} else {
g_warning ("cannot convert config to buffer");
}
}
-bad_config:
res = gst_pad_set_caps (depayload->srcpad, srccaps);
gst_caps_unref (srccaps);
{
GstRtpMP4ADepay *rtpmp4adepay;
GstBuffer *outbuf;
+ GstRTPBuffer rtp;
+ guint8 *bdata;
rtpmp4adepay = GST_RTP_MP4A_DEPAY (depayload);
gst_adapter_clear (rtpmp4adepay->adapter);
}
- outbuf = gst_rtp_buffer_get_payload_buffer (buf);
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
+ outbuf = gst_rtp_buffer_get_payload_buffer (&rtp);
- gst_buffer_copy_metadata (outbuf, buf, GST_BUFFER_COPY_TIMESTAMPS);
+ outbuf = gst_buffer_make_writable (outbuf);
+ GST_BUFFER_TIMESTAMP (outbuf) = GST_BUFFER_TIMESTAMP (buf);
gst_adapter_push (rtpmp4adepay->adapter, outbuf);
/* RTP marker bit indicates the last packet of the AudioMuxElement => create
* and push a buffer */
- if (gst_rtp_buffer_get_marker (buf)) {
+ if (gst_rtp_buffer_get_marker (&rtp)) {
guint avail;
guint i;
guint8 *data;
GST_LOG_OBJECT (rtpmp4adepay, "have marker and %u available", avail);
outbuf = gst_adapter_take_buffer (rtpmp4adepay->adapter, avail);
- data = GST_BUFFER_DATA (outbuf);
+ data = bdata = gst_buffer_map (outbuf, NULL, NULL, GST_MAP_READ);
/* position in data we are at */
pos = 0;
/* take data out, skip the header */
pos += skip;
- tmp = gst_buffer_create_sub (outbuf, pos, data_len);
+ tmp = gst_buffer_copy_region (outbuf, GST_BUFFER_COPY_MEMORY, pos,
+ data_len);
/* skip data too */
skip += data_len;
"possible wrongly encoded packet."));
}
+ gst_buffer_unmap (outbuf, bdata, -1);
gst_buffer_unref (outbuf);
}
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
/* ERRORS */
{
GST_ELEMENT_WARNING (rtpmp4adepay, STREAM, DECODE,
("Packet did not validate"), ("wrong packet size"));
+ gst_buffer_unmap (outbuf, bdata, -1);
gst_buffer_unref (outbuf);
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
}
static GstFlowReturn gst_rtp_mp4a_pay_handle_buffer (GstBaseRTPPayload *
payload, GstBuffer * buffer);
-GST_BOILERPLATE (GstRtpMP4APay, gst_rtp_mp4a_pay, GstBaseRTPPayload,
- GST_TYPE_BASE_RTP_PAYLOAD)
+#define gst_rtp_mp4a_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpMP4APay, gst_rtp_mp4a_pay, GST_TYPE_BASE_RTP_PAYLOAD)
- static void gst_rtp_mp4a_pay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_mp4a_pay_src_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_mp4a_pay_sink_template));
-
- gst_element_class_set_details_simple (element_class,
- "RTP MPEG4 audio payloader", "Codec/Payloader/Network/RTP",
- "Payload MPEG4 audio as RTP packets (RFC 3016)",
- "Wim Taymans <wim.taymans@gmail.com>");
-}
-
-static void
-gst_rtp_mp4a_pay_class_init (GstRtpMP4APayClass * klass)
+ static void gst_rtp_mp4a_pay_class_init (GstRtpMP4APayClass * klass)
{
GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
GstBaseRTPPayloadClass *gstbasertppayload_class;
gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
gobject_class->finalize = gst_rtp_mp4a_pay_finalize;
gstbasertppayload_class->set_caps = gst_rtp_mp4a_pay_setcaps;
gstbasertppayload_class->handle_buffer = gst_rtp_mp4a_pay_handle_buffer;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_mp4a_pay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_mp4a_pay_sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP MPEG4 audio payloader", "Codec/Payloader/Network/RTP",
+ "Payload MPEG4 audio as RTP packets (RFC 3016)",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
GST_DEBUG_CATEGORY_INIT (rtpmp4apay_debug, "rtpmp4apay", 0,
"MP4A-LATM RTP Payloader");
}
static void
-gst_rtp_mp4a_pay_init (GstRtpMP4APay * rtpmp4apay, GstRtpMP4APayClass * klass)
+gst_rtp_mp4a_pay_init (GstRtpMP4APay * rtpmp4apay)
{
rtpmp4apay->rate = 90000;
rtpmp4apay->profile = g_strdup ("1");
GstBuffer * buffer)
{
guint8 *data;
- guint size;
+ gsize size;
guint8 objectType;
guint8 samplingIdx;
guint8 channelCfg;
- data = GST_BUFFER_DATA (buffer);
- size = GST_BUFFER_SIZE (buffer);
+ data = gst_buffer_map (buffer, &size, NULL, GST_MAP_READ);
if (size < 2)
goto too_short;
"objectType: %d, samplingIdx: %d (%d), channelCfg: %d", objectType,
samplingIdx, rtpmp4apay->rate, channelCfg);
+ gst_buffer_unmap (buffer, data, -1);
+
return TRUE;
/* ERROR */
{
GST_ELEMENT_ERROR (rtpmp4apay, STREAM, FORMAT,
(NULL), ("config string too short, expected 2 bytes, got %d", size));
+ gst_buffer_unmap (buffer, data, -1);
return FALSE;
}
invalid_object:
{
GST_ELEMENT_ERROR (rtpmp4apay, STREAM, FORMAT,
(NULL), ("invalid object type 0"));
+ gst_buffer_unmap (buffer, data, -1);
return FALSE;
}
wrong_freq:
{
GST_ELEMENT_ERROR (rtpmp4apay, STREAM, NOT_IMPLEMENTED,
(NULL), ("unsupported frequency index %d", samplingIdx));
+ gst_buffer_unmap (buffer, data, -1);
return FALSE;
}
wrong_channels:
{
GST_ELEMENT_ERROR (rtpmp4apay, STREAM, NOT_IMPLEMENTED,
(NULL), ("unsupported number of channels %d, must < 8", channelCfg));
+ gst_buffer_unmap (buffer, data, -1);
return FALSE;
}
}
GstBuffer *buffer, *cbuffer;
guint8 *config;
guint8 *data;
- guint size, i;
+ guint i;
+ gsize size;
buffer = gst_value_get_buffer (codec_data);
GST_LOG_OBJECT (rtpmp4apay, "configuring codec_data");
if (!res)
goto config_failed;
- size = GST_BUFFER_SIZE (buffer);
- data = GST_BUFFER_DATA (buffer);
+ data = gst_buffer_map (buffer, &size, NULL, GST_MAP_READ);
/* make the StreamMuxConfig, we need 15 bits for the header */
- config = g_malloc0 (size + 2);
+ cbuffer = gst_buffer_new_and_alloc (size + 2);
+ config = gst_buffer_map (cbuffer, NULL, NULL, GST_MAP_WRITE);
/* Create StreamMuxConfig according to ISO/IEC 14496-3:
*
config[i + 2] |= ((data[i] & 0x7f) << 1);
}
- cbuffer = gst_buffer_new ();
- GST_BUFFER_DATA (cbuffer) = config;
- GST_BUFFER_MALLOCDATA (cbuffer) = config;
- GST_BUFFER_SIZE (cbuffer) = size + 2;
+ gst_buffer_unmap (cbuffer, config, -1);
+ gst_buffer_unmap (buffer, data, -1);
/* now we can configure the buffer */
if (rtpmp4apay->config)
GstRtpMP4APay *rtpmp4apay;
GstFlowReturn ret;
GstBuffer *outbuf;
- guint count, mtu, size;
- guint8 *data;
+ guint count, mtu;
+ gsize size;
+ guint8 *data, *bdata;
gboolean fragmented;
GstClockTime timestamp;
rtpmp4apay = GST_RTP_MP4A_PAY (basepayload);
- size = GST_BUFFER_SIZE (buffer);
- data = GST_BUFFER_DATA (buffer);
+ data = bdata = gst_buffer_map (buffer, &size, NULL, GST_MAP_READ);
timestamp = GST_BUFFER_TIMESTAMP (buffer);
fragmented = FALSE;
guint8 *payload;
guint payload_len;
guint packet_len;
+ GstRTPBuffer rtp;
/* this will be the total lenght of the packet */
packet_len = gst_rtp_buffer_calc_packet_len (size, 0, 0);
outbuf = gst_rtp_buffer_new_allocate (payload_len, 0, 0);
/* copy payload */
- payload = gst_rtp_buffer_get_payload (outbuf);
+ gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
+ payload = gst_rtp_buffer_get_payload (&rtp);
if (!fragmented) {
/* first packet write the header */
size -= payload_len;
/* marker only if the packet is complete */
- gst_rtp_buffer_set_marker (outbuf, size == 0);
+ gst_rtp_buffer_set_marker (&rtp, size == 0);
+
+ gst_rtp_buffer_unmap (&rtp);
/* copy incomming timestamp (if any) to outgoing buffers */
GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
fragmented = TRUE;
}
+ gst_buffer_unmap (buffer, bdata, -1);
gst_buffer_unref (buffer);
return ret;
}
-GST_BOILERPLATE (GstRtpMP4GDepay, gst_rtp_mp4g_depay, GstBaseRTPDepayload,
+#define gst_rtp_mp4g_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpMP4GDepay, gst_rtp_mp4g_depay,
GST_TYPE_BASE_RTP_DEPAYLOAD);
static void gst_rtp_mp4g_depay_finalize (GObject * object);
static void
-gst_rtp_mp4g_depay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_mp4g_depay_src_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_mp4g_depay_sink_template));
-
- gst_element_class_set_details_simple (element_class,
- "RTP MPEG4 ES depayloader", "Codec/Depayloader/Network/RTP",
- "Extracts MPEG4 elementary streams from RTP packets (RFC 3640)",
- "Wim Taymans <wim.taymans@gmail.com>");
-}
-
-static void
gst_rtp_mp4g_depay_class_init (GstRtpMP4GDepayClass * klass)
{
GObjectClass *gobject_class;
gstbasertpdepayload_class->set_caps = gst_rtp_mp4g_depay_setcaps;
gstbasertpdepayload_class->handle_event = gst_rtp_mp4g_depay_handle_event;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_mp4g_depay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_mp4g_depay_sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP MPEG4 ES depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts MPEG4 elementary streams from RTP packets (RFC 3640)",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
GST_DEBUG_CATEGORY_INIT (rtpmp4gdepay_debug, "rtpmp4gdepay", 0,
"MP4-generic RTP Depayloader");
}
static void
-gst_rtp_mp4g_depay_init (GstRtpMP4GDepay * rtpmp4gdepay,
- GstRtpMP4GDepayClass * klass)
+gst_rtp_mp4g_depay_init (GstRtpMP4GDepay * rtpmp4gdepay)
{
rtpmp4gdepay->adapter = gst_adapter_new ();
rtpmp4gdepay->packets = g_queue_new ();
gst_rtp_mp4g_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
{
GstRtpMP4GDepay *rtpmp4gdepay;
- GstBuffer *outbuf;
+ GstBuffer *outbuf = NULL;
GstClockTime timestamp;
+ GstRTPBuffer rtp;
rtpmp4gdepay = GST_RTP_MP4G_DEPAY (depayload);
guint AU_size, AU_index, AU_index_delta, payload_AU_size;
gboolean M;
- payload_len = gst_rtp_buffer_get_payload_len (buf);
- payload = gst_rtp_buffer_get_payload (buf);
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
+ payload_len = gst_rtp_buffer_get_payload_len (&rtp);
+ payload = gst_rtp_buffer_get_payload (&rtp);
GST_DEBUG_OBJECT (rtpmp4gdepay, "received payload of %d", payload_len);
- rtptime = gst_rtp_buffer_get_timestamp (buf);
- M = gst_rtp_buffer_get_marker (buf);
+ rtptime = gst_rtp_buffer_get_timestamp (&rtp);
+ M = gst_rtp_buffer_get_marker (&rtp);
+
+ gst_rtp_buffer_unmap (&rtp);
if (rtpmp4gdepay->sizelength > 0) {
gint num_AU_headers, AU_headers_bytes, i;
/* collect stuff in the adapter, strip header from payload and push in
* the adapter */
outbuf =
- gst_rtp_buffer_get_payload_subbuffer (buf, payload_AU, AU_size);
+ gst_rtp_buffer_get_payload_subbuffer (&rtp, payload_AU, AU_size);
gst_adapter_push (rtpmp4gdepay->adapter, outbuf);
if (M) {
avail = gst_adapter_available (rtpmp4gdepay->adapter);
outbuf = gst_adapter_take_buffer (rtpmp4gdepay->adapter, avail);
- gst_buffer_set_caps (outbuf, GST_PAD_CAPS (depayload->srcpad));
/* copy some of the fields we calculated above on the buffer. We also
* copy the AU_index so that we can sort the packets in our queue. */
timestamp = -1;
GST_DEBUG_OBJECT (depayload, "pushing buffer of size %d",
- GST_BUFFER_SIZE (outbuf));
+ gst_buffer_get_size (outbuf));
gst_rtp_mp4g_depay_queue (rtpmp4gdepay, outbuf);
}
} else {
/* push complete buffer in adapter */
- outbuf = gst_rtp_buffer_get_payload_subbuffer (buf, 0, payload_len);
+ outbuf = gst_rtp_buffer_get_payload_subbuffer (&rtp, 0, payload_len);
gst_adapter_push (rtpmp4gdepay->adapter, outbuf);
/* if this was the last packet of the VOP, create and push a buffer */
outbuf = gst_adapter_take_buffer (rtpmp4gdepay->adapter, avail);
GST_DEBUG ("gst_rtp_mp4g_depay_chain: pushing buffer of size %d",
- GST_BUFFER_SIZE (outbuf));
+ gst_buffer_get_size (outbuf));
+ gst_rtp_buffer_unmap (&rtp);
return outbuf;
}
}
}
+
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
/* ERRORS */
{
GST_ELEMENT_WARNING (rtpmp4gdepay, STREAM, DECODE,
("Packet payload was too short."), (NULL));
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
}
GstCaps * caps);
static GstFlowReturn gst_rtp_mp4g_pay_handle_buffer (GstBaseRTPPayload *
payload, GstBuffer * buffer);
-static gboolean gst_rtp_mp4g_pay_handle_event (GstPad * pad, GstEvent * event);
+static gboolean gst_rtp_mp4g_pay_handle_event (GstBaseRTPPayload * payload,
+ GstEvent * event);
-GST_BOILERPLATE (GstRtpMP4GPay, gst_rtp_mp4g_pay, GstBaseRTPPayload,
- GST_TYPE_BASE_RTP_PAYLOAD)
+#define gst_rtp_mp4g_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpMP4GPay, gst_rtp_mp4g_pay, GST_TYPE_BASE_RTP_PAYLOAD)
- static void gst_rtp_mp4g_pay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_mp4g_pay_src_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_mp4g_pay_sink_template));
-
- gst_element_class_set_details_simple (element_class, "RTP MPEG4 ES payloader",
- "Codec/Payloader/Network/RTP",
- "Payload MPEG4 elementary streams as RTP packets (RFC 3640)",
- "Wim Taymans <wim.taymans@gmail.com>");
-}
-
-static void
-gst_rtp_mp4g_pay_class_init (GstRtpMP4GPayClass * klass)
+ static void gst_rtp_mp4g_pay_class_init (GstRtpMP4GPayClass * klass)
{
GObjectClass *gobject_class;
GstElementClass *gstelement_class;
gstbasertppayload_class->handle_buffer = gst_rtp_mp4g_pay_handle_buffer;
gstbasertppayload_class->handle_event = gst_rtp_mp4g_pay_handle_event;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_mp4g_pay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_mp4g_pay_sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP MPEG4 ES payloader",
+ "Codec/Payloader/Network/RTP",
+ "Payload MPEG4 elementary streams as RTP packets (RFC 3640)",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
GST_DEBUG_CATEGORY_INIT (rtpmp4gpay_debug, "rtpmp4gpay", 0,
"MP4-generic RTP Payloader");
}
static void
-gst_rtp_mp4g_pay_init (GstRtpMP4GPay * rtpmp4gpay, GstRtpMP4GPayClass * klass)
+gst_rtp_mp4g_pay_init (GstRtpMP4GPay * rtpmp4gpay)
{
rtpmp4gpay->adapter = gst_adapter_new ();
}
GstBuffer * buffer)
{
guint8 *data;
- guint size;
+ gsize size;
guint8 objectType = 0;
guint8 samplingIdx = 0;
guint8 channelCfg = 0;
GstBitReader br;
- data = GST_BUFFER_DATA (buffer);
- size = GST_BUFFER_SIZE (buffer);
+ data = gst_buffer_map (buffer, &size, NULL, GST_MAP_READ);
gst_bit_reader_init (&br, data, size);
objectType, samplingIdx, rtpmp4gpay->rate, channelCfg,
rtpmp4gpay->frame_len);
+ gst_buffer_unmap (buffer, data, -1);
return TRUE;
/* ERROR */
{
GST_ELEMENT_ERROR (rtpmp4gpay, STREAM, FORMAT,
(NULL), ("config string too short"));
+ gst_buffer_unmap (buffer, data, -1);
return FALSE;
}
invalid_object:
{
GST_ELEMENT_ERROR (rtpmp4gpay, STREAM, FORMAT,
(NULL), ("invalid object type"));
+ gst_buffer_unmap (buffer, data, -1);
return FALSE;
}
wrong_freq:
{
GST_ELEMENT_ERROR (rtpmp4gpay, STREAM, NOT_IMPLEMENTED,
(NULL), ("unsupported frequency index %d", samplingIdx));
+ gst_buffer_unmap (buffer, data, -1);
return FALSE;
}
wrong_channels:
{
GST_ELEMENT_ERROR (rtpmp4gpay, STREAM, NOT_IMPLEMENTED,
(NULL), ("unsupported number of channels %d, must < 8", channelCfg));
+ gst_buffer_unmap (buffer, data, -1);
return FALSE;
}
}
GstBuffer * buffer)
{
guint8 *data;
- guint size;
+ gsize size;
guint32 code;
- data = GST_BUFFER_DATA (buffer);
- size = GST_BUFFER_SIZE (buffer);
+ data = gst_buffer_map (buffer, &size, NULL, GST_MAP_READ);
if (size < 5)
goto too_short;
GST_LOG_OBJECT (rtpmp4gpay, "profile %s", rtpmp4gpay->profile);
+ gst_buffer_unmap (buffer, data, -1);
+
return TRUE;
/* ERROR */
{
GST_ELEMENT_ERROR (rtpmp4gpay, STREAM, FORMAT,
(NULL), ("config string too short"));
+ gst_buffer_unmap (buffer, data, -1);
return FALSE;
}
}
guint8 *payload;
guint payload_len;
guint packet_len;
+ GstRTPBuffer rtp;
/* this will be the total lenght of the packet */
packet_len = gst_rtp_buffer_calc_packet_len (avail, 0, 0);
/* create buffer to hold the payload, also make room for the 4 header bytes. */
outbuf = gst_rtp_buffer_new_allocate (payload_len + 4, 0, 0);
+ gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
+
/* copy payload */
- payload = gst_rtp_buffer_get_payload (outbuf);
+ payload = gst_rtp_buffer_get_payload (&rtp);
/* +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+- .. -+-+-+-+-+-+-+-+-+-+
* |AU-headers-length|AU-header|AU-header| |AU-header|padding|
gst_adapter_flush (rtpmp4gpay->adapter, payload_len);
/* marker only if the packet is complete */
- gst_rtp_buffer_set_marker (outbuf, avail <= payload_len);
+ gst_rtp_buffer_set_marker (&rtp, avail <= payload_len);
+
+ gst_rtp_buffer_unmap (&rtp);
GST_BUFFER_TIMESTAMP (outbuf) = rtpmp4gpay->first_timestamp;
GST_BUFFER_DURATION (outbuf) = rtpmp4gpay->first_duration;
}
static gboolean
-gst_rtp_mp4g_pay_handle_event (GstPad * pad, GstEvent * event)
+gst_rtp_mp4g_pay_handle_event (GstBaseRTPPayload * payload, GstEvent * event)
{
GstRtpMP4GPay *rtpmp4gpay;
- rtpmp4gpay = GST_RTP_MP4G_PAY (gst_pad_get_parent (pad));
+ rtpmp4gpay = GST_RTP_MP4G_PAY (payload);
GST_DEBUG ("Got event: %s", GST_EVENT_TYPE_NAME (event));
switch (GST_EVENT_TYPE (event)) {
- case GST_EVENT_NEWSEGMENT:
+ case GST_EVENT_SEGMENT:
case GST_EVENT_EOS:
/* This flush call makes sure that the last buffer is always pushed
* to the base payloader */
break;
}
- g_object_unref (rtpmp4gpay);
-
/* let parent handle event too */
return FALSE;
}
)
);
-GST_BOILERPLATE (GstRtpMP4VDepay, gst_rtp_mp4v_depay, GstBaseRTPDepayload,
+#define gst_rtp_mp4v_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpMP4VDepay, gst_rtp_mp4v_depay,
GST_TYPE_BASE_RTP_DEPAYLOAD);
static void gst_rtp_mp4v_depay_finalize (GObject * object);
static GstStateChangeReturn gst_rtp_mp4v_depay_change_state (GstElement *
element, GstStateChange transition);
-
-static void
-gst_rtp_mp4v_depay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_mp4v_depay_src_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_mp4v_depay_sink_template));
-
- gst_element_class_set_details_simple (element_class,
- "RTP MPEG4 video depayloader", "Codec/Depayloader/Network/RTP",
- "Extracts MPEG4 video from RTP packets (RFC 3016)",
- "Wim Taymans <wim.taymans@gmail.com>");
-}
-
static void
gst_rtp_mp4v_depay_class_init (GstRtpMP4VDepayClass * klass)
{
gstbasertpdepayload_class->process = gst_rtp_mp4v_depay_process;
gstbasertpdepayload_class->set_caps = gst_rtp_mp4v_depay_setcaps;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_mp4v_depay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_mp4v_depay_sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP MPEG4 video depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts MPEG4 video from RTP packets (RFC 3016)",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
GST_DEBUG_CATEGORY_INIT (rtpmp4vdepay_debug, "rtpmp4vdepay", 0,
"MPEG4 video RTP Depayloader");
}
static void
-gst_rtp_mp4v_depay_init (GstRtpMP4VDepay * rtpmp4vdepay,
- GstRtpMP4VDepayClass * klass)
+gst_rtp_mp4v_depay_init (GstRtpMP4VDepay * rtpmp4vdepay)
{
rtpmp4vdepay->adapter = gst_adapter_new ();
}
gst_rtp_mp4v_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
{
GstRtpMP4VDepay *rtpmp4vdepay;
- GstBuffer *outbuf;
+ GstBuffer *outbuf = NULL;
+ GstRTPBuffer rtp;
rtpmp4vdepay = GST_RTP_MP4V_DEPAY (depayload);
if (GST_BUFFER_IS_DISCONT (buf))
gst_adapter_clear (rtpmp4vdepay->adapter);
- outbuf = gst_rtp_buffer_get_payload_buffer (buf);
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
+ outbuf = gst_rtp_buffer_get_payload_buffer (&rtp);
gst_adapter_push (rtpmp4vdepay->adapter, outbuf);
/* if this was the last packet of the VOP, create and push a buffer */
- if (gst_rtp_buffer_get_marker (buf)) {
+ if (gst_rtp_buffer_get_marker (&rtp)) {
guint avail;
avail = gst_adapter_available (rtpmp4vdepay->adapter);
outbuf = gst_adapter_take_buffer (rtpmp4vdepay->adapter, avail);
GST_DEBUG ("gst_rtp_mp4v_depay_chain: pushing buffer of size %d",
- GST_BUFFER_SIZE (outbuf));
-
- return outbuf;
+ gst_buffer_get_size (outbuf));
}
- return NULL;
+
+ gst_rtp_buffer_unmap (&rtp);
+
+ return outbuf;
}
static GstStateChangeReturn
GstCaps * caps);
static GstFlowReturn gst_rtp_mp4v_pay_handle_buffer (GstBaseRTPPayload *
payload, GstBuffer * buffer);
-static gboolean gst_rtp_mp4v_pay_handle_event (GstPad * pad, GstEvent * event);
+static gboolean gst_rtp_mp4v_pay_handle_event (GstBaseRTPPayload * pay,
+ GstEvent * event);
-GST_BOILERPLATE (GstRtpMP4VPay, gst_rtp_mp4v_pay, GstBaseRTPPayload,
- GST_TYPE_BASE_RTP_PAYLOAD)
+#define gst_rtp_mp4v_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpMP4VPay, gst_rtp_mp4v_pay, GST_TYPE_BASE_RTP_PAYLOAD)
- static void gst_rtp_mp4v_pay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_mp4v_pay_src_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_mp4v_pay_sink_template));
-
- gst_element_class_set_details_simple (element_class,
- "RTP MPEG4 Video payloader", "Codec/Payloader/Network/RTP",
- "Payload MPEG-4 video as RTP packets (RFC 3016)",
- "Wim Taymans <wim.taymans@gmail.com>");
-}
-
-static void
-gst_rtp_mp4v_pay_class_init (GstRtpMP4VPayClass * klass)
+ static void gst_rtp_mp4v_pay_class_init (GstRtpMP4VPayClass * klass)
{
GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
GstBaseRTPPayloadClass *gstbasertppayload_class;
gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
gobject_class->set_property = gst_rtp_mp4v_pay_set_property;
gobject_class->get_property = gst_rtp_mp4v_pay_get_property;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_mp4v_pay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_mp4v_pay_sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP MPEG4 Video payloader", "Codec/Payloader/Network/RTP",
+ "Payload MPEG-4 video as RTP packets (RFC 3016)",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_SEND_CONFIG,
g_param_spec_boolean ("send-config", "Send Config",
"Send the config parameters in RTP packets as well(deprecated "
}
static void
-gst_rtp_mp4v_pay_init (GstRtpMP4VPay * rtpmp4vpay, GstRtpMP4VPayClass * klass)
+gst_rtp_mp4v_pay_init (GstRtpMP4VPay * rtpmp4vpay)
{
rtpmp4vpay->adapter = gst_adapter_new ();
rtpmp4vpay->rate = 90000;
GST_LOG_OBJECT (rtpmp4vpay, "got codec_data");
if (G_VALUE_TYPE (codec_data) == GST_TYPE_BUFFER) {
GstBuffer *buffer;
- guint8 *data;
- guint size;
buffer = gst_value_get_buffer (codec_data);
- data = GST_BUFFER_DATA (buffer);
- size = GST_BUFFER_SIZE (buffer);
-
- if (size < 5)
+ if (gst_buffer_get_size (buffer) < 5)
goto done;
- rtpmp4vpay->profile = data[4];
+ gst_buffer_extract (buffer, 4, &rtpmp4vpay->profile, 1);
GST_LOG_OBJECT (rtpmp4vpay, "configuring codec_data, profile %d",
- data[4]);
+ rtpmp4vpay->profile);
if (rtpmp4vpay->config)
gst_buffer_unref (rtpmp4vpay->config);
GstBuffer *outbuf_data = NULL;
GstFlowReturn ret;
GstBufferList *list = NULL;
- GstBufferListIterator *it = NULL;
/* the data available in the adapter is either smaller
* than the MTU or bigger. In the case it is smaller, the complete
* of buffers and the whole list will be pushed downstream
* at once */
list = gst_buffer_list_new ();
- it = gst_buffer_list_iterate (list);
}
while (avail > 0) {
guint towrite;
- guint8 *payload;
guint payload_len;
guint packet_len;
+ GstRTPBuffer rtp;
/* this will be the total lenght of the packet */
packet_len = gst_rtp_buffer_calc_packet_len (avail, 0, 0);
/* this is the payload length */
payload_len = gst_rtp_buffer_calc_payload_len (towrite, 0, 0);
- if (rtpmp4vpay->buffer_list) {
- /* create buffer without payload. The payload will be put
- * in next buffer instead. Both buffers will be then added
- * to the list */
- outbuf = gst_rtp_buffer_new_allocate (0, 0, 0);
-
- /* Take buffer with the payload from the adapter */
- outbuf_data = gst_adapter_take_buffer (rtpmp4vpay->adapter, payload_len);
- } else {
- /* create buffer to hold the payload */
- outbuf = gst_rtp_buffer_new_allocate (payload_len, 0, 0);
-
- /* copy payload */
- payload = gst_rtp_buffer_get_payload (outbuf);
+ /* create buffer without payload. The payload will be put
+ * in next buffer instead. Both buffers will be merged */
+ outbuf = gst_rtp_buffer_new_allocate (0, 0, 0);
- gst_adapter_copy (rtpmp4vpay->adapter, payload, 0, payload_len);
- gst_adapter_flush (rtpmp4vpay->adapter, payload_len);
- }
+ /* Take buffer with the payload from the adapter */
+ outbuf_data = gst_adapter_take_buffer (rtpmp4vpay->adapter, payload_len);
avail -= payload_len;
- gst_rtp_buffer_set_marker (outbuf, avail == 0);
+ gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
+ gst_rtp_buffer_set_marker (&rtp, avail == 0);
+ gst_rtp_buffer_unmap (&rtp);
+
+ outbuf = gst_buffer_join (outbuf, outbuf_data);
GST_BUFFER_TIMESTAMP (outbuf) = rtpmp4vpay->first_timestamp;
if (rtpmp4vpay->buffer_list) {
- /* create a new group to hold the rtp header and the payload */
- gst_buffer_list_iterator_add_group (it);
- gst_buffer_list_iterator_add (it, outbuf);
- gst_buffer_list_iterator_add (it, outbuf_data);
+ /* add to list */
+ gst_buffer_list_insert (list, -1, outbuf);
} else {
ret = gst_basertppayload_push (GST_BASE_RTP_PAYLOAD (rtpmp4vpay), outbuf);
}
}
if (rtpmp4vpay->buffer_list) {
- gst_buffer_list_iterator_free (it);
/* push the whole buffer list at once */
ret =
gst_basertppayload_push_list (GST_BASE_RTP_PAYLOAD (rtpmp4vpay), list);
/* see if config changed */
equal = FALSE;
if (enc->config) {
- if (GST_BUFFER_SIZE (enc->config) == i) {
- equal = memcmp (GST_BUFFER_DATA (enc->config), data, i) == 0;
+ if (gst_buffer_get_size (enc->config) == i) {
+ equal = gst_buffer_memcmp (enc->config, 0, data, i) == 0;
}
}
/* if config string changed or new profile, make new caps */
if (!equal || newprofile) {
+ guint8 *bdata;
+
if (enc->config)
gst_buffer_unref (enc->config);
enc->config = gst_buffer_new_and_alloc (i);
- memcpy (GST_BUFFER_DATA (enc->config), data, i);
+ bdata = gst_buffer_map (enc->config, NULL, NULL, GST_MAP_WRITE);
+ memcpy (bdata, data, i);
+ gst_buffer_unmap (enc->config, bdata, -1);
gst_rtp_mp4v_pay_new_caps (enc);
}
*strip = i;
{
GstRtpMP4VPay *rtpmp4vpay;
GstFlowReturn ret;
- guint size, avail;
+ guint avail;
+ gsize size;
guint packet_len;
guint8 *data;
gboolean flush;
rtpmp4vpay = GST_RTP_MP4V_PAY (basepayload);
- size = GST_BUFFER_SIZE (buffer);
- data = GST_BUFFER_DATA (buffer);
+ data = gst_buffer_map (buffer, &size, NULL, GST_MAP_READ);
timestamp = GST_BUFFER_TIMESTAMP (buffer);
duration = GST_BUFFER_DURATION (buffer);
avail = gst_adapter_available (rtpmp4vpay->adapter);
/* depay incomming data and see if we need to start a new RTP
* packet */
flush = gst_rtp_mp4v_pay_depay_data (rtpmp4vpay, data, size, &strip, &vopi);
+ gst_buffer_unmap (buffer, data, -1);
+ data = NULL;
+
if (strip) {
/* strip off config if requested */
if (!(rtpmp4vpay->config_interval > 0)) {
GstBuffer *subbuf;
GST_LOG_OBJECT (rtpmp4vpay, "stripping config at %d, size %d", strip,
- size - strip);
+ (gint) size - strip);
/* strip off header */
- subbuf = gst_buffer_create_sub (buffer, strip, size - strip);
+ subbuf = gst_buffer_copy_region (buffer, GST_BUFFER_COPY_MEMORY, strip,
+ size - strip);
GST_BUFFER_TIMESTAMP (subbuf) = timestamp;
gst_buffer_unref (buffer);
buffer = subbuf;
- size = GST_BUFFER_SIZE (buffer);
+ size = gst_buffer_get_size (buffer);
} else {
GST_LOG_OBJECT (rtpmp4vpay, "found config in stream");
rtpmp4vpay->last_config = timestamp;
gst_buffer_unref (buffer);
buffer = superbuf;
- size = GST_BUFFER_SIZE (buffer);
+ size = gst_buffer_get_size (buffer);
if (timestamp != -1) {
rtpmp4vpay->last_config = timestamp;
}
static gboolean
-gst_rtp_mp4v_pay_handle_event (GstPad * pad, GstEvent * event)
+gst_rtp_mp4v_pay_handle_event (GstBaseRTPPayload * pay, GstEvent * event)
{
GstRtpMP4VPay *rtpmp4vpay;
- rtpmp4vpay = GST_RTP_MP4V_PAY (gst_pad_get_parent (pad));
+ rtpmp4vpay = GST_RTP_MP4V_PAY (pay);
GST_DEBUG ("Got event: %s", GST_EVENT_TYPE_NAME (event));
switch (GST_EVENT_TYPE (event)) {
- case GST_EVENT_NEWSEGMENT:
+ case GST_EVENT_SEGMENT:
case GST_EVENT_EOS:
/* This flush call makes sure that the last buffer is always pushed
* to the base payloader */
break;
}
- g_object_unref (rtpmp4vpay);
-
/* let parent handle event too */
return FALSE;
}
static GstBuffer *gst_rtp_qcelp_depay_process (GstBaseRTPDepayload * depayload,
GstBuffer * buf);
-GST_BOILERPLATE (GstRtpQCELPDepay, gst_rtp_qcelp_depay, GstBaseRTPDepayload,
+#define gst_rtp_qcelp_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpQCELPDepay, gst_rtp_qcelp_depay,
GST_TYPE_BASE_RTP_DEPAYLOAD);
static void
-gst_rtp_qcelp_depay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_qcelp_depay_src_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_qcelp_depay_sink_template));
-
- gst_element_class_set_details_simple (element_class, "RTP QCELP depayloader",
- "Codec/Depayloader/Network/RTP",
- "Extracts QCELP (PureVoice) audio from RTP packets (RFC 2658)",
- "Wim Taymans <wim.taymans@gmail.com>");
-}
-
-static void
gst_rtp_qcelp_depay_class_init (GstRtpQCELPDepayClass * klass)
{
GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
gobject_class->finalize = gst_rtp_qcelp_depay_finalize;
gstbasertpdepayload_class->process = gst_rtp_qcelp_depay_process;
gstbasertpdepayload_class->set_caps = gst_rtp_qcelp_depay_setcaps;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_qcelp_depay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_qcelp_depay_sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP QCELP depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts QCELP (PureVoice) audio from RTP packets (RFC 2658)",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
GST_DEBUG_CATEGORY_INIT (rtpqcelpdepay_debug, "rtpqcelpdepay", 0,
"QCELP RTP Depayloader");
}
static void
-gst_rtp_qcelp_depay_init (GstRtpQCELPDepay * rtpqcelpdepay,
- GstRtpQCELPDepayClass * klass)
+gst_rtp_qcelp_depay_init (GstRtpQCELPDepay * rtpqcelpdepay)
{
GstBaseRTPDepayload G_GNUC_UNUSED *depayload;
create_erasure_buffer (GstRtpQCELPDepay * depay)
{
GstBuffer *outbuf;
+ guint8 *data;
outbuf = gst_buffer_new_and_alloc (1);
- GST_BUFFER_DATA (outbuf)[0] = 14;
+ data = gst_buffer_map (outbuf, NULL, NULL, GST_MAP_WRITE);
+ data[0] = 14;
+ gst_buffer_unmap (outbuf, data, -1);
return outbuf;
}
guint payload_len, offset, index;
guint8 *payload;
guint LLL, NNN;
+ GstRTPBuffer rtp;
depay = GST_RTP_QCELP_DEPAY (depayload);
- payload_len = gst_rtp_buffer_get_payload_len (buf);
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
+
+ payload_len = gst_rtp_buffer_get_payload_len (&rtp);
if (payload_len < 2)
goto too_small;
timestamp = GST_BUFFER_TIMESTAMP (buf);
- payload = gst_rtp_buffer_get_payload (buf);
+ payload = gst_rtp_buffer_get_payload (&rtp);
/* 0 1 2 3 4 5 6 7
* +-+-+-+-+-+-+-+-+
outbuf = create_erasure_buffer (depay);
} else {
/* each frame goes into its buffer */
- outbuf = gst_rtp_buffer_get_payload_subbuffer (buf, offset, frame_len);
+ outbuf = gst_rtp_buffer_get_payload_subbuffer (&rtp, offset, frame_len);
}
GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
flush_packets (depay);
}
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
/* ERRORS */
{
GST_ELEMENT_WARNING (depay, STREAM, DECODE,
(NULL), ("QCELP RTP payload too small (%d)", payload_len));
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
invalid_lll:
{
GST_ELEMENT_WARNING (depay, STREAM, DECODE,
(NULL), ("QCELP RTP invalid LLL received (%d)", LLL));
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
invalid_nnn:
{
GST_ELEMENT_WARNING (depay, STREAM, DECODE,
(NULL), ("QCELP RTP invalid NNN received (%d)", NNN));
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
invalid_frame:
{
GST_ELEMENT_WARNING (depay, STREAM, DECODE,
(NULL), ("QCELP RTP invalid frame received"));
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
}
"encoding-name = (string)\"X-QDM\"")
);
-GST_BOILERPLATE (GstRtpQDM2Depay, gst_rtp_qdm2_depay, GstBaseRTPDepayload,
+#define gst_rtp_qdm2_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpQDM2Depay, gst_rtp_qdm2_depay,
GST_TYPE_BASE_RTP_DEPAYLOAD);
static const guint8 headheader[20] = {
GstCaps * caps);
static void
-gst_rtp_qdm2_depay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_qdm2_depay_src_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_qdm2_depay_sink_template));
-
-
- gst_element_class_set_details_simple (element_class, "RTP QDM2 depayloader",
- "Codec/Depayloader/Network/RTP",
- "Extracts QDM2 audio from RTP packets (no RFC)",
- "Edward Hervey <bilboed@bilboed.com>");
-}
-
-static void
gst_rtp_qdm2_depay_class_init (GstRtpQDM2DepayClass * klass)
{
GObjectClass *gobject_class;
gobject_class->finalize = gst_rtp_qdm2_depay_finalize;
gstelement_class->change_state = gst_rtp_qdm2_depay_change_state;
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_qdm2_depay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_qdm2_depay_sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP QDM2 depayloader",
+ "Codec/Depayloader/Network/RTP",
+ "Extracts QDM2 audio from RTP packets (no RFC)",
+ "Edward Hervey <bilboed@bilboed.com>");
}
static void
-gst_rtp_qdm2_depay_init (GstRtpQDM2Depay * rtpqdm2depay,
- GstRtpQDM2DepayClass * klass)
+gst_rtp_qdm2_depay_init (GstRtpQDM2Depay * rtpqdm2depay)
{
rtpqdm2depay->adapter = gst_adapter_new ();
}
GST_MEMDUMP ("Extracted packet", data, depay->packetsize);
buf = gst_buffer_new ();
- GST_BUFFER_DATA (buf) = data;
- GST_BUFFER_MALLOCDATA (buf) = data;
- GST_BUFFER_SIZE (buf) = depay->packetsize;
+ gst_buffer_take_memory (buf, -1,
+ gst_memory_new_wrapped (0, data, g_free, depay->packetsize, 0,
+ depay->packetsize));
gst_adapter_push (depay->adapter, buf);
gst_rtp_qdm2_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
{
GstRtpQDM2Depay *rtpqdm2depay;
- GstBuffer *outbuf;
+ GstBuffer *outbuf = NULL;
guint16 seq;
+ GstRTPBuffer rtp;
rtpqdm2depay = GST_RTP_QDM2_DEPAY (depayload);
guint avail;
guint pos = 0;
- payload_len = gst_rtp_buffer_get_payload_len (buf);
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
+ payload_len = gst_rtp_buffer_get_payload_len (&rtp);
if (payload_len < 3)
goto bad_packet;
- payload = gst_rtp_buffer_get_payload (buf);
- seq = gst_rtp_buffer_get_seq (buf);
+ payload = gst_rtp_buffer_get_payload (&rtp);
+ seq = gst_rtp_buffer_get_seq (&rtp);
if (G_UNLIKELY (seq != rtpqdm2depay->nextseq)) {
GST_DEBUG ("GAP in sequence number, Resetting data !");
/* Flush previous data */
if (G_UNLIKELY (!rtpqdm2depay->configured)) {
guint8 *ourdata;
GstBuffer *codecdata;
+ guint8 *cdata;
GstCaps *caps;
/* First bytes are unknown */
/* Caps */
codecdata = gst_buffer_new_and_alloc (48);
- memcpy (GST_BUFFER_DATA (codecdata), headheader, 20);
- memcpy (GST_BUFFER_DATA (codecdata) + 20, ourdata, 28);
+ cdata = gst_buffer_map (codecdata, NULL, NULL, GST_MAP_WRITE);
+ memcpy (cdata, headheader, 20);
+ memcpy (cdata + 20, ourdata, 28);
+ gst_buffer_unmap (codecdata, cdata, -1);
caps = gst_caps_new_simple ("audio/x-qdm2",
"samplesize", G_TYPE_INT, 16,
GST_BUFFER_TIMESTAMP (outbuf) = rtpqdm2depay->ptimestamp;
GST_DEBUG ("Outgoing buffer timestamp %" GST_TIME_FORMAT,
GST_TIME_ARGS (rtpqdm2depay->ptimestamp));
- return outbuf;
}
}
- return NULL;
+
+ gst_rtp_buffer_unmap (&rtp);
+ return outbuf;
/* ERRORS */
bad_packet:
{
GST_ELEMENT_WARNING (rtpqdm2depay, STREAM, DECODE,
(NULL), ("Packet was too short"));
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
}
static gboolean gst_rtp_siren_depay_setcaps (GstBaseRTPDepayload * depayload,
GstCaps * caps);
-GST_BOILERPLATE (GstRTPSirenDepay, gst_rtp_siren_depay, GstBaseRTPDepayload,
+G_DEFINE_TYPE (GstRTPSirenDepay, gst_rtp_siren_depay,
GST_TYPE_BASE_RTP_DEPAYLOAD);
static void
-gst_rtp_siren_depay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_siren_depay_src_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_siren_depay_sink_template));
- gst_element_class_set_details_simple (element_class,
- "RTP Siren packet depayloader", "Codec/Depayloader/Network/RTP",
- "Extracts Siren audio from RTP packets",
- "Philippe Kalaf <philippe.kalaf@collabora.co.uk>");
-}
-
-static void
gst_rtp_siren_depay_class_init (GstRTPSirenDepayClass * klass)
{
+ GstElementClass *gstelement_class;
GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
+ gstelement_class = (GstElementClass *) klass;
gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
gstbasertpdepayload_class->process = gst_rtp_siren_depay_process;
gstbasertpdepayload_class->set_caps = gst_rtp_siren_depay_setcaps;
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_siren_depay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_siren_depay_sink_template));
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP Siren packet depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts Siren audio from RTP packets",
+ "Philippe Kalaf <philippe.kalaf@collabora.co.uk>");
}
static void
-gst_rtp_siren_depay_init (GstRTPSirenDepay * rtpsirendepay,
- GstRTPSirenDepayClass * klass)
+gst_rtp_siren_depay_init (GstRTPSirenDepay * rtpsirendepay)
{
}
gst_rtp_siren_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
{
GstBuffer *outbuf;
+ GstRTPBuffer rtp;
- outbuf = gst_rtp_buffer_get_payload_buffer (buf);
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
+ outbuf = gst_rtp_buffer_get_payload_buffer (&rtp);
+ gst_rtp_buffer_unmap (&rtp);
return outbuf;
}
static gboolean gst_rtp_siren_pay_setcaps (GstBaseRTPPayload * payload,
GstCaps * caps);
-GST_BOILERPLATE (GstRTPSirenPay, gst_rtp_siren_pay, GstBaseRTPAudioPayload,
+G_DEFINE_TYPE (GstRTPSirenPay, gst_rtp_siren_pay,
GST_TYPE_BASE_RTP_AUDIO_PAYLOAD);
static void
-gst_rtp_siren_pay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_siren_pay_sink_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_siren_pay_src_template));
- gst_element_class_set_details_simple (element_class,
- "RTP Payloader for Siren Audio", "Codec/Payloader/Network/RTP",
- "Packetize Siren audio streams into RTP packets",
- "Youness Alaoui <kakaroto@kakaroto.homelinux.net>");
-}
-
-static void
gst_rtp_siren_pay_class_init (GstRTPSirenPayClass * klass)
{
+ GstElementClass *gstelement_class;
GstBaseRTPPayloadClass *gstbasertppayload_class;
+ gstelement_class = (GstElementClass *) klass;
gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
gstbasertppayload_class->set_caps = gst_rtp_siren_pay_setcaps;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_siren_pay_sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_siren_pay_src_template));
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP Payloader for Siren Audio", "Codec/Payloader/Network/RTP",
+ "Packetize Siren audio streams into RTP packets",
+ "Youness Alaoui <kakaroto@kakaroto.homelinux.net>");
+
GST_DEBUG_CATEGORY_INIT (rtpsirenpay_debug, "rtpsirenpay", 0,
"siren audio RTP payloader");
}
static void
-gst_rtp_siren_pay_init (GstRTPSirenPay * rtpsirenpay,
- GstRTPSirenPayClass * klass)
+gst_rtp_siren_pay_init (GstRTPSirenPay * rtpsirenpay)
{
GstBaseRTPPayload *basertppayload;
GstBaseRTPAudioPayload *basertpaudiopayload;
static gboolean gst_rtp_speex_depay_setcaps (GstBaseRTPDepayload * depayload,
GstCaps * caps);
-GST_BOILERPLATE (GstRtpSPEEXDepay, gst_rtp_speex_depay, GstBaseRTPDepayload,
+G_DEFINE_TYPE (GstRtpSPEEXDepay, gst_rtp_speex_depay,
GST_TYPE_BASE_RTP_DEPAYLOAD);
static void
-gst_rtp_speex_depay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_speex_depay_src_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_speex_depay_sink_template));
- gst_element_class_set_details_simple (element_class, "RTP Speex depayloader",
- "Codec/Depayloader/Network/RTP",
- "Extracts Speex audio from RTP packets",
- "Edgard Lima <edgard.lima@indt.org.br>");
-}
-
-static void
gst_rtp_speex_depay_class_init (GstRtpSPEEXDepayClass * klass)
{
+ GstElementClass *gstelement_class;
GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
+ gstelement_class = (GstElementClass *) klass;
gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
gstbasertpdepayload_class->process = gst_rtp_speex_depay_process;
gstbasertpdepayload_class->set_caps = gst_rtp_speex_depay_setcaps;
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_speex_depay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_speex_depay_sink_template));
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP Speex depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts Speex audio from RTP packets",
+ "Edgard Lima <edgard.lima@indt.org.br>");
}
static void
-gst_rtp_speex_depay_init (GstRtpSPEEXDepay * rtpspeexdepay,
- GstRtpSPEEXDepayClass * klass)
+gst_rtp_speex_depay_init (GstRtpSPEEXDepay * rtpspeexdepay)
{
}
GstRtpSPEEXDepay *rtpspeexdepay;
gint clock_rate, nb_channels;
GstBuffer *buf;
- guint8 *data;
+ guint8 *data, *bdata;
const gchar *params;
GstCaps *srccaps;
gboolean res;
/* construct minimal header and comment packet for the decoder */
buf = gst_buffer_new_and_alloc (80);
- data = GST_BUFFER_DATA (buf);
+ data = bdata = gst_buffer_map (buf, NULL, NULL, GST_MAP_WRITE);
memcpy (data, "Speex ", 8);
data += 8;
memcpy (data, "1.1.12", 7);
GST_WRITE_UINT32_LE (data, 0); /* reserved1 */
data += 4;
GST_WRITE_UINT32_LE (data, 0); /* reserved2 */
+ gst_buffer_unmap (buf, bdata, -1);
srccaps = gst_caps_new_simple ("audio/x-speex", NULL);
res = gst_pad_set_caps (depayload->srcpad, srccaps);
gst_caps_unref (srccaps);
- gst_buffer_set_caps (buf, GST_PAD_CAPS (depayload->srcpad));
gst_base_rtp_depayload_push (GST_BASE_RTP_DEPAYLOAD (rtpspeexdepay), buf);
buf = gst_buffer_new_and_alloc (sizeof (gst_rtp_speex_comment));
- memcpy (GST_BUFFER_DATA (buf), gst_rtp_speex_comment,
- sizeof (gst_rtp_speex_comment));
+ bdata = gst_buffer_map (buf, NULL, NULL, GST_MAP_WRITE);
+ memcpy (bdata, gst_rtp_speex_comment, sizeof (gst_rtp_speex_comment));
+ gst_buffer_unmap (buf, bdata, -1);
- gst_buffer_set_caps (buf, GST_PAD_CAPS (depayload->srcpad));
gst_base_rtp_depayload_push (GST_BASE_RTP_DEPAYLOAD (rtpspeexdepay), buf);
return res;
gst_rtp_speex_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
{
GstBuffer *outbuf = NULL;
+ GstRTPBuffer rtp;
+
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
GST_DEBUG ("process : got %d bytes, mark %d ts %u seqn %d",
- GST_BUFFER_SIZE (buf),
- gst_rtp_buffer_get_marker (buf),
- gst_rtp_buffer_get_timestamp (buf), gst_rtp_buffer_get_seq (buf));
+ gst_buffer_get_size (buf),
+ gst_rtp_buffer_get_marker (&rtp),
+ gst_rtp_buffer_get_timestamp (&rtp), gst_rtp_buffer_get_seq (&rtp));
/* nothing special to be done */
- outbuf = gst_rtp_buffer_get_payload_buffer (buf);
+ outbuf = gst_rtp_buffer_get_payload_buffer (&rtp);
+ gst_rtp_buffer_unmap (&rtp);
if (outbuf)
GST_BUFFER_DURATION (outbuf) = 20 * GST_MSECOND;
static gboolean gst_rtp_speex_pay_setcaps (GstBaseRTPPayload * payload,
GstCaps * caps);
static GstCaps *gst_rtp_speex_pay_getcaps (GstBaseRTPPayload * payload,
- GstPad * pad);
+ GstPad * pad, GstCaps * filter);
static GstFlowReturn gst_rtp_speex_pay_handle_buffer (GstBaseRTPPayload *
payload, GstBuffer * buffer);
-GST_BOILERPLATE (GstRtpSPEEXPay, gst_rtp_speex_pay, GstBaseRTPPayload,
- GST_TYPE_BASE_RTP_PAYLOAD);
-
-static void
-gst_rtp_speex_pay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_speex_pay_sink_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_speex_pay_src_template));
- gst_element_class_set_details_simple (element_class, "RTP Speex payloader",
- "Codec/Payloader/Network/RTP",
- "Payload-encodes Speex audio into a RTP packet",
- "Edgard Lima <edgard.lima@indt.org.br>");
-
- GST_DEBUG_CATEGORY_INIT (rtpspeexpay_debug, "rtpspeexpay", 0,
- "Speex RTP Payloader");
-}
+#define gst_rtp_speex_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpSPEEXPay, gst_rtp_speex_pay, GST_TYPE_BASE_RTP_PAYLOAD);
static void
gst_rtp_speex_pay_class_init (GstRtpSPEEXPayClass * klass)
gstbasertppayload_class->set_caps = gst_rtp_speex_pay_setcaps;
gstbasertppayload_class->get_caps = gst_rtp_speex_pay_getcaps;
gstbasertppayload_class->handle_buffer = gst_rtp_speex_pay_handle_buffer;
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_speex_pay_sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_speex_pay_src_template));
+ gst_element_class_set_details_simple (gstelement_class, "RTP Speex payloader",
+ "Codec/Payloader/Network/RTP",
+ "Payload-encodes Speex audio into a RTP packet",
+ "Edgard Lima <edgard.lima@indt.org.br>");
+
+ GST_DEBUG_CATEGORY_INIT (rtpspeexpay_debug, "rtpspeexpay", 0,
+ "Speex RTP Payloader");
}
static void
-gst_rtp_speex_pay_init (GstRtpSPEEXPay * rtpspeexpay,
- GstRtpSPEEXPayClass * klass)
+gst_rtp_speex_pay_init (GstRtpSPEEXPay * rtpspeexpay)
{
GST_BASE_RTP_PAYLOAD (rtpspeexpay)->clock_rate = 8000;
GST_BASE_RTP_PAYLOAD_PT (rtpspeexpay) = 110; /* Create String */
static GstCaps *
-gst_rtp_speex_pay_getcaps (GstBaseRTPPayload * payload, GstPad * pad)
+gst_rtp_speex_pay_getcaps (GstBaseRTPPayload * payload, GstPad * pad,
+ GstCaps * filter)
{
GstCaps *otherpadcaps;
GstCaps *caps;
gst_caps_unref (otherpadcaps);
}
+ if (filter) {
+ GstCaps *tcaps = caps;
+
+ caps = gst_caps_intersect_full (filter, tcaps, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (tcaps);
+ }
+
return caps;
}
GstBuffer * buffer)
{
GstRtpSPEEXPay *rtpspeexpay;
- guint size, payload_len;
+ guint payload_len;
+ gsize size;
GstBuffer *outbuf;
guint8 *payload, *data;
GstClockTime timestamp, duration;
GstFlowReturn ret;
+ GstRTPBuffer rtp;
rtpspeexpay = GST_RTP_SPEEX_PAY (basepayload);
- size = GST_BUFFER_SIZE (buffer);
- data = GST_BUFFER_DATA (buffer);
+ data = gst_buffer_map (buffer, &size, NULL, GST_MAP_READ);
switch (rtpspeexpay->packet) {
case 0:
GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
GST_BUFFER_DURATION (outbuf) = duration;
+ gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
/* get payload */
- payload = gst_rtp_buffer_get_payload (outbuf);
+ payload = gst_rtp_buffer_get_payload (&rtp);
/* copy data in payload */
memcpy (&payload[0], data, size);
+ gst_rtp_buffer_unmap (&rtp);
+
ret = gst_basertppayload_push (basepayload, outbuf);
done:
+ gst_buffer_unmap (buffer, data, -1);
gst_buffer_unref (buffer);
rtpspeexpay->packet++;
{
GST_ELEMENT_ERROR (rtpspeexpay, STREAM, DECODE, (NULL),
("Error parsing first identification packet."));
+ gst_buffer_unmap (buffer, data, -1);
gst_buffer_unref (buffer);
return GST_FLOW_ERROR;
}
"encoding-name = (string) { \"X-SV3V-ES\", \"X-SORENSON-VIDEO\" , \"X-SORENSONVIDEO\" , \"X-SorensonVideo\" }")
);
-GST_BOILERPLATE (GstRtpSV3VDepay, gst_rtp_sv3v_depay, GstBaseRTPDepayload,
+#define gst_rtp_sv3v_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpSV3VDepay, gst_rtp_sv3v_depay,
GST_TYPE_BASE_RTP_DEPAYLOAD);
static void gst_rtp_sv3v_depay_finalize (GObject * object);
GstCaps * caps);
static void
-gst_rtp_sv3v_depay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_sv3v_depay_src_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_sv3v_depay_sink_template));
-
-
- gst_element_class_set_details_simple (element_class, "RTP SVQ3 depayloader",
- "Codec/Depayloader/Network/RTP",
- "Extracts SVQ3 video from RTP packets (no RFC)",
- "Wim Taymans <wim.taymans@gmail.com>");
-}
-
-static void
gst_rtp_sv3v_depay_class_init (GstRtpSV3VDepayClass * klass)
{
GObjectClass *gobject_class;
gobject_class->finalize = gst_rtp_sv3v_depay_finalize;
gstelement_class->change_state = gst_rtp_sv3v_depay_change_state;
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_sv3v_depay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_sv3v_depay_sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP SVQ3 depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts SVQ3 video from RTP packets (no RFC)",
+ "Wim Taymans <wim.taymans@gmail.com>");
}
static void
-gst_rtp_sv3v_depay_init (GstRtpSV3VDepay * rtpsv3vdepay,
- GstRtpSV3VDepayClass * klass)
+gst_rtp_sv3v_depay_init (GstRtpSV3VDepay * rtpsv3vdepay)
{
rtpsv3vdepay->adapter = gst_adapter_new ();
}
gboolean C, S, E;
GstBuffer *outbuf = NULL;
guint16 seq;
+ GstRTPBuffer rtp;
rtpsv3vdepay = GST_RTP_SV3V_DEPAY (depayload);
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
+
/* flush on sequence number gaps */
- seq = gst_rtp_buffer_get_seq (buf);
+ seq = gst_rtp_buffer_get_seq (&rtp);
GST_DEBUG ("timestamp %" GST_TIME_FORMAT ", sequence number:%d",
GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)), seq);
}
rtpsv3vdepay->nextseq = seq + 1;
- payload_len = gst_rtp_buffer_get_payload_len (buf);
+ payload_len = gst_rtp_buffer_get_payload_len (&rtp);
if (payload_len < 3)
goto bad_packet;
- payload = gst_rtp_buffer_get_payload (buf);
+ payload = gst_rtp_buffer_get_payload (&rtp);
- M = gst_rtp_buffer_get_marker (buf);
+ M = gst_rtp_buffer_get_marker (&rtp);
/* This is all a guess:
* 1 1 1 1 1 1
GstCaps *caps;
GstBuffer *codec_data;
guint8 res;
+ guint8 *cdata;
GST_DEBUG ("Configuration packet");
/* if we already have caps, we don't need to do anything. FIXME, check if
* something changed. */
- if (G_UNLIKELY (GST_PAD_CAPS (GST_BASE_RTP_DEPAYLOAD_SRCPAD (depayload)))) {
+ if (G_UNLIKELY (gst_pad_has_current_caps (GST_BASE_RTP_DEPAYLOAD_SRCPAD
+ (depayload)))) {
GST_DEBUG ("Already configured, skipping config parsing");
goto beach;
}
/* CodecData needs to be 'SEQH' + len (32bit) + data according to
* ffmpeg's libavcodec/svq3.c:svq3_decode_init */
codec_data = gst_buffer_new_and_alloc (payload_len + 6);
- memcpy (GST_BUFFER_DATA (codec_data), "SEQH", 4);
- GST_WRITE_UINT32_LE (GST_BUFFER_DATA (codec_data) + 4, payload_len - 2);
- memcpy (GST_BUFFER_DATA (codec_data) + 8, payload + 2, payload_len - 2);
+ cdata = gst_buffer_map (codec_data, NULL, NULL, GST_MAP_WRITE);
+ memcpy (cdata, "SEQH", 4);
+ GST_WRITE_UINT32_LE (cdata + 4, payload_len - 2);
+ memcpy (cdata + 8, payload + 2, payload_len - 2);
+
+ GST_MEMDUMP ("codec_data", cdata, gst_buffer_get_size (codec_data));
- GST_MEMDUMP ("codec_data", GST_BUFFER_DATA (codec_data),
- GST_BUFFER_SIZE (codec_data));
+ gst_buffer_unmap (codec_data, cdata, -1);
caps = gst_caps_new_simple ("video/x-svq",
"svqversion", G_TYPE_INT, 3,
GST_DEBUG ("Storing incoming payload");
/* store data in adapter, stip off 2 bytes header */
- tmpbuf = gst_rtp_buffer_get_payload_subbuffer (buf, 2, -1);
+ tmpbuf = gst_rtp_buffer_get_payload_subbuffer (&rtp, 2, -1);
gst_adapter_push (rtpsv3vdepay->adapter, tmpbuf);
if (G_UNLIKELY (M)) {
}
beach:
+ gst_rtp_buffer_unmap (&rtp);
return outbuf;
/* ERRORS */
{
GST_ELEMENT_WARNING (rtpsv3vdepay, STREAM, DECODE,
(NULL), ("Packet was too short"));
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
}
GST_STATIC_CAPS ("video/x-theora")
);
-GST_BOILERPLATE (GstRtpTheoraDepay, gst_rtp_theora_depay, GstBaseRTPDepayload,
+#define gst_rtp_theora_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpTheoraDepay, gst_rtp_theora_depay,
GST_TYPE_BASE_RTP_DEPAYLOAD);
static gboolean gst_rtp_theora_depay_setcaps (GstBaseRTPDepayload * depayload,
static void gst_rtp_theora_depay_finalize (GObject * object);
-
-static void
-gst_rtp_theora_depay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_theora_depay_sink_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_theora_depay_src_template));
-
- gst_element_class_set_details_simple (element_class, "RTP Theora depayloader",
- "Codec/Depayloader/Network/RTP",
- "Extracts Theora video from RTP packets (draft-01 of RFC XXXX)",
- "Wim Taymans <wim.taymans@gmail.com>");
-}
-
static void
gst_rtp_theora_depay_class_init (GstRtpTheoraDepayClass * klass)
{
GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
gobject_class->finalize = gst_rtp_theora_depay_finalize;
gstbasertpdepayload_class->set_caps = gst_rtp_theora_depay_setcaps;
gstbasertpdepayload_class->packet_lost = gst_rtp_theora_depay_packet_lost;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_theora_depay_sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_theora_depay_src_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP Theora depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts Theora video from RTP packets (draft-01 of RFC XXXX)",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
GST_DEBUG_CATEGORY_INIT (rtptheoradepay_debug, "rtptheoradepay", 0,
"Theora RTP Depayloader");
}
static void
-gst_rtp_theora_depay_init (GstRtpTheoraDepay * rtptheoradepay,
- GstRtpTheoraDepayClass * klass)
+gst_rtp_theora_depay_init (GstRtpTheoraDepay * rtptheoradepay)
{
rtptheoradepay->adapter = gst_adapter_new ();
}
{
GstBuffer *buf;
guint32 num_headers;
- guint8 *data;
- guint size;
+ guint8 *data, *bdata;
+ gsize size;
gint i, j;
- data = GST_BUFFER_DATA (confbuf);
- size = GST_BUFFER_SIZE (confbuf);
+ data = bdata = gst_buffer_map (confbuf, &size, NULL, GST_MAP_READ);
- GST_DEBUG_OBJECT (rtptheoradepay, "config size %u", size);
+ GST_DEBUG_OBJECT (rtptheoradepay, "config size %" G_GSIZE_FORMAT, size);
/* +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
* | Number of packed headers |
for (j = 0; j <= n_headers; j++) {
guint h_size;
+ guint8 *odata;
h_size = h_sizes[j];
if (size < h_size) {
h_size);
buf = gst_buffer_new_and_alloc (h_size);
- memcpy (GST_BUFFER_DATA (buf), data, h_size);
+ odata = gst_buffer_map (buf, NULL, NULL, GST_MAP_WRITE);
+ memcpy (odata, data, h_size);
+ gst_buffer_unmap (buf, odata, -1);
conf->headers = g_list_append (conf->headers, buf);
data += h_size;
size -= h_size;
}
rtptheoradepay->configs = g_list_append (rtptheoradepay->configs, conf);
}
+
+ gst_buffer_unmap (confbuf, bdata, -1);
return TRUE;
/* ERRORS */
too_small:
{
GST_DEBUG_OBJECT (rtptheoradepay, "configuration too small");
+ gst_buffer_unmap (confbuf, bdata, -1);
return FALSE;
}
}
/* transform inline to out-of-band and parse that one */
confbuf = gst_buffer_new_and_alloc (size + 9);
- conf = GST_BUFFER_DATA (confbuf);
+ conf = gst_buffer_map (confbuf, NULL, NULL, GST_MAP_WRITE);
/* 1 header */
GST_WRITE_UINT32_BE (conf, 1);
/* write Ident */
GST_WRITE_UINT16_BE (conf + 7, length);
/* copy remainder */
memcpy (conf + 9, configuration, size);
+ gst_buffer_unmap (confbuf, conf, -1);
return gst_rtp_theora_depay_parse_configuration (rtptheoradepay, confbuf);
}
data = g_base64_decode (configuration, &size);
confbuf = gst_buffer_new ();
- GST_BUFFER_DATA (confbuf) = data;
- GST_BUFFER_MALLOCDATA (confbuf) = data;
- GST_BUFFER_SIZE (confbuf) = size;
+ gst_buffer_take_memory (confbuf, -1,
+ gst_memory_new_wrapped (0, data, g_free, size, 0, size));
if (!gst_rtp_theora_depay_parse_configuration (rtptheoradepay, confbuf))
goto invalid_configuration;
guint32 timestamp;
guint32 header, ident;
guint8 F, TDT, packets;
+ GstRTPBuffer rtp;
rtptheoradepay = GST_RTP_THEORA_DEPAY (depayload);
- payload_len = gst_rtp_buffer_get_payload_len (buf);
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
+
+ payload_len = gst_rtp_buffer_get_payload_len (&rtp);
GST_DEBUG_OBJECT (depayload, "got RTP packet of size %d", payload_len);
if (G_UNLIKELY (payload_len < 4))
goto packet_short;
- payload = gst_rtp_buffer_get_payload (buf);
+ payload = gst_rtp_buffer_get_payload (&rtp);
header = GST_READ_UINT32_BE (payload);
/*
/* first assembled packet, reuse 2 bytes to store the length */
headerskip = (F == 1 ? 4 : 6);
/* skip header and length. */
- vdata = gst_rtp_buffer_get_payload_subbuffer (buf, headerskip, -1);
+ vdata = gst_rtp_buffer_get_payload_subbuffer (&rtp, headerskip, -1);
GST_DEBUG_OBJECT (depayload, "assemble theora packet");
gst_adapter_push (rtptheoradepay->adapter, vdata);
* .. theora data |
* +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+*
*/
- timestamp = gst_rtp_buffer_get_timestamp (buf);
+ timestamp = gst_rtp_buffer_get_timestamp (&rtp);
while (payload_len >= 2) {
guint16 length;
/* create buffer for packet */
if (G_UNLIKELY (to_free)) {
outbuf = gst_buffer_new ();
- GST_BUFFER_DATA (outbuf) = payload;
- GST_BUFFER_MALLOCDATA (outbuf) = to_free;
- GST_BUFFER_SIZE (outbuf) = length;
+ gst_buffer_take_memory (buf, -1,
+ gst_memory_new_wrapped (0, to_free, g_free,
+ (payload - to_free) + length, payload - to_free, length));
to_free = NULL;
} else {
+ guint8 *odata;
+
outbuf = gst_buffer_new_and_alloc (length);
- memcpy (GST_BUFFER_DATA (outbuf), payload, length);
+ odata = gst_buffer_map (outbuf, NULL, NULL, GST_MAP_WRITE);
+ memcpy (odata, payload, length);
+ gst_buffer_unmap (outbuf, odata, -1);
}
if (payload_len > 0 && (payload[0] & 0xC0) == 0x0)
if (rtptheoradepay->needs_keyframe)
goto request_keyframe;
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
no_output:
{
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
/* ERORRS */
ignore_reserved:
{
GST_WARNING_OBJECT (rtptheoradepay, "reserved TDT ignored");
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
length_short:
gst_event_new_custom (GST_EVENT_CUSTOM_UPSTREAM,
gst_structure_new ("GstForceKeyUnit",
"all-headers", G_TYPE_BOOLEAN, TRUE, NULL)));
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
request_keyframe:
gst_pad_push_event (GST_BASE_RTP_DEPAYLOAD_SINKPAD (depayload),
gst_event_new_custom (GST_EVENT_CUSTOM_UPSTREAM,
gst_structure_new ("GstForceKeyUnit", NULL)));
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
}
GstRtpTheoraDepay *rtptheoradepay = GST_RTP_THEORA_DEPAY (depayload);
guint seqnum = 0;
- gst_structure_get_uint (event->structure, "seqnum", &seqnum);
+ gst_structure_get_uint (gst_event_get_structure (event), "seqnum", &seqnum);
GST_LOG_OBJECT (depayload, "Requested keyframe because frame with seqnum %u"
" is missing", seqnum);
rtptheoradepay->needs_keyframe = TRUE;
PROP_CONFIG_INTERVAL
};
-GST_BOILERPLATE (GstRtpTheoraPay, gst_rtp_theora_pay, GstBaseRTPPayload,
- GST_TYPE_BASE_RTP_PAYLOAD);
+#define gst_rtp_theora_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpTheoraPay, gst_rtp_theora_pay, GST_TYPE_BASE_RTP_PAYLOAD);
static gboolean gst_rtp_theora_pay_setcaps (GstBaseRTPPayload * basepayload,
GstCaps * caps);
element, GstStateChange transition);
static GstFlowReturn gst_rtp_theora_pay_handle_buffer (GstBaseRTPPayload * pad,
GstBuffer * buffer);
-static gboolean gst_rtp_theora_pay_handle_event (GstPad * pad,
+static gboolean gst_rtp_theora_pay_handle_event (GstBaseRTPPayload * payload,
GstEvent * event);
GValue * value, GParamSpec * pspec);
static void
-gst_rtp_theora_pay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_theora_pay_src_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_theora_pay_sink_template));
-
- gst_element_class_set_details_simple (element_class, "RTP Theora payloader",
- "Codec/Payloader/Network/RTP",
- "Payload-encode Theora video into RTP packets (draft-01 RFC XXXX)",
- "Wim Taymans <wim.taymans@gmail.com>");
-}
-
-static void
gst_rtp_theora_pay_class_init (GstRtpTheoraPayClass * klass)
{
GObjectClass *gobject_class;
gobject_class->set_property = gst_rtp_theora_pay_set_property;
gobject_class->get_property = gst_rtp_theora_pay_get_property;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_theora_pay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_theora_pay_sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP Theora payloader", "Codec/Payloader/Network/RTP",
+ "Payload-encode Theora video into RTP packets (draft-01 RFC XXXX)",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
GST_DEBUG_CATEGORY_INIT (rtptheorapay_debug, "rtptheorapay", 0,
"Theora RTP Payloader");
}
static void
-gst_rtp_theora_pay_init (GstRtpTheoraPay * rtptheorapay,
- GstRtpTheoraPayClass * klass)
+gst_rtp_theora_pay_init (GstRtpTheoraPay * rtptheorapay)
{
rtptheorapay->last_config = GST_CLOCK_TIME_NONE;
}
gst_rtp_theora_pay_reset_packet (GstRtpTheoraPay * rtptheorapay, guint8 TDT)
{
guint payload_len;
+ GstRTPBuffer rtp;
GST_DEBUG_OBJECT (rtptheorapay, "reset packet");
rtptheorapay->payload_pos = 4;
- payload_len = gst_rtp_buffer_get_payload_len (rtptheorapay->packet);
+ gst_rtp_buffer_map (rtptheorapay->packet, GST_MAP_READ, &rtp);
+ payload_len = gst_rtp_buffer_get_payload_len (&rtp);
+ gst_rtp_buffer_unmap (&rtp);
rtptheorapay->payload_left = payload_len - 4;
rtptheorapay->payload_duration = 0;
rtptheorapay->payload_F = 0;
GstFlowReturn ret;
guint8 *payload;
guint hlen;
+ GstRTPBuffer rtp;
/* check for empty packet */
if (!rtptheorapay->packet || rtptheorapay->payload_pos <= 4)
GST_DEBUG_OBJECT (rtptheorapay, "flushing packet");
+ gst_rtp_buffer_map (rtptheorapay->packet, GST_MAP_WRITE, &rtp);
+
/* fix header */
- payload = gst_rtp_buffer_get_payload (rtptheorapay->packet);
+ payload = gst_rtp_buffer_get_payload (&rtp);
/*
* 0 1 2 3
* 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
(rtptheorapay->payload_TDT & 0x3) << 4 |
(rtptheorapay->payload_pkts & 0xf);
+ gst_rtp_buffer_unmap (&rtp);
+
/* shrink the buffer size to the last written byte */
hlen = gst_rtp_buffer_calc_header_len (0);
- GST_BUFFER_SIZE (rtptheorapay->packet) = hlen + rtptheorapay->payload_pos;
+ gst_buffer_resize (rtptheorapay->packet, 0, hlen + rtptheorapay->payload_pos);
GST_BUFFER_DURATION (rtptheorapay->packet) = rtptheorapay->payload_duration;
extralen = 1;
for (walk = rtptheorapay->headers; walk; walk = g_list_next (walk)) {
GstBuffer *buf = GST_BUFFER_CAST (walk->data);
-
guint bsize;
+ guint8 *data;
+ gsize size;
- bsize = GST_BUFFER_SIZE (buf);
+ bsize = gst_buffer_get_size (buf);
length += bsize;
n_headers++;
} while (bsize);
}
/* update hash */
- ident = fnv1_hash_32_update (ident, GST_BUFFER_DATA (buf),
- GST_BUFFER_SIZE (buf));
+ data = gst_buffer_map (buf, &size, NULL, GST_MAP_READ);
+ ident = fnv1_hash_32_update (ident, data, size);
+ gst_buffer_unmap (buf, data, -1);
}
/* packet length is header size + packet length */
if (!g_list_next (walk))
break;
- bsize = GST_BUFFER_SIZE (buf);
+ bsize = gst_buffer_get_size (buf);
/* calc size */
size = 0;
} while (bsize);
temp = size;
- bsize = GST_BUFFER_SIZE (buf);
+ bsize = gst_buffer_get_size (buf);
/* write the size backwards */
flag = 0;
while (size) {
/* copy header data */
for (walk = rtptheorapay->headers; walk; walk = g_list_next (walk)) {
GstBuffer *buf = GST_BUFFER_CAST (walk->data);
+ guint8 *bdata;
+ gsize bsize;
- memcpy (data, GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (buf));
- data += GST_BUFFER_SIZE (buf);
+ bdata = gst_buffer_map (buf, &bsize, NULL, GST_MAP_READ);
+ memcpy (data, bdata, bsize);
+ gst_buffer_unmap (buf, bdata, -1);
+ data += bsize;
}
/* serialize to base64 */
guint plen;
guint8 *ppos, *payload;
gboolean fragmented;
+ GstRTPBuffer rtp;
/* size increases with packet length and 2 bytes size eader. */
newduration = rtptheorapay->payload_duration;
gst_rtp_theora_pay_init_packet (rtptheorapay, TDT, timestamp);
}
- payload = gst_rtp_buffer_get_payload (rtptheorapay->packet);
+ gst_rtp_buffer_map (rtptheorapay->packet, GST_MAP_WRITE, &rtp);
+ payload = gst_rtp_buffer_get_payload (&rtp);
ppos = payload + rtptheorapay->payload_pos;
fragmented = FALSE;
if (fragmented) {
/* fragmented packets are always flushed and have ptks of 0 */
rtptheorapay->payload_pkts = 0;
+ gst_rtp_buffer_unmap (&rtp);
ret = gst_rtp_theora_pay_flush_packet (rtptheorapay);
if (size > 0) {
/* start new packet and get pointers. TDT stays the same. */
gst_rtp_theora_pay_init_packet (rtptheorapay,
rtptheorapay->payload_TDT, timestamp);
- payload = gst_rtp_buffer_get_payload (rtptheorapay->packet);
+ gst_rtp_buffer_map (rtptheorapay->packet, GST_MAP_WRITE, &rtp);
+ payload = gst_rtp_buffer_get_payload (&rtp);
ppos = payload + rtptheorapay->payload_pos;
}
} else {
}
} while (size);
+ if (rtp.buffer)
+ gst_rtp_buffer_unmap (&rtp);
+
return ret;
}
{
GstRtpTheoraPay *rtptheorapay;
GstFlowReturn ret;
- guint size;
+ gsize size;
guint8 *data;
GstClockTime duration, timestamp;
guint8 TDT;
rtptheorapay = GST_RTP_THEORA_PAY (basepayload);
- size = GST_BUFFER_SIZE (buffer);
- data = GST_BUFFER_DATA (buffer);
+ data = gst_buffer_map (buffer, &size, NULL, GST_MAP_READ);
duration = GST_BUFFER_DURATION (buffer);
timestamp = GST_BUFFER_TIMESTAMP (buffer);
- GST_DEBUG_OBJECT (rtptheorapay, "size %u, duration %" GST_TIME_FORMAT,
- size, GST_TIME_ARGS (duration));
+ GST_DEBUG_OBJECT (rtptheorapay, "size %" G_GSIZE_FORMAT
+ ", duration %" GST_TIME_FORMAT, size, GST_TIME_ARGS (duration));
if (G_UNLIKELY (size > 0xffff))
goto wrong_size;
if (TDT != 0) {
GST_DEBUG_OBJECT (rtptheorapay, "collecting header, buffer %p", buffer);
/* append header to the list of headers */
+ gst_buffer_unmap (buffer, data, -1);
rtptheorapay->headers = g_list_append (rtptheorapay->headers, buffer);
ret = GST_FLOW_OK;
goto done;
ret = gst_rtp_theora_pay_payload_buffer (rtptheorapay, TDT, data, size,
timestamp, duration, 0);
+
+ gst_buffer_unmap (buffer, data, -1);
gst_buffer_unref (buffer);
done:
{
GST_ELEMENT_WARNING (rtptheorapay, STREAM, DECODE,
("Invalid packet size (%d <= 0xffff)", size), (NULL));
+ gst_buffer_unmap (buffer, data, -1);
gst_buffer_unref (buffer);
return GST_FLOW_OK;
}
parse_id_failed:
{
+ gst_buffer_unmap (buffer, data, -1);
gst_buffer_unref (buffer);
return GST_FLOW_ERROR;
}
{
GST_ELEMENT_WARNING (rtptheorapay, STREAM, DECODE,
(NULL), ("Ignoring unknown header received"));
+ gst_buffer_unmap (buffer, data, -1);
gst_buffer_unref (buffer);
return GST_FLOW_OK;
}
{
GST_ELEMENT_WARNING (rtptheorapay, STREAM, DECODE,
(NULL), ("Error initializing header config"));
+ gst_buffer_unmap (buffer, data, -1);
gst_buffer_unref (buffer);
return GST_FLOW_OK;
}
}
static gboolean
-gst_rtp_theora_pay_handle_event (GstPad * pad, GstEvent * event)
+gst_rtp_theora_pay_handle_event (GstBaseRTPPayload * payload, GstEvent * event)
{
- GstRtpTheoraPay *rtptheorapay = GST_RTP_THEORA_PAY (GST_PAD_PARENT (pad));
+ GstRtpTheoraPay *rtptheorapay = GST_RTP_THEORA_PAY (payload);
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_FLUSH_STOP:
GST_STATIC_CAPS ("audio/x-vorbis")
);
-GST_BOILERPLATE (GstRtpVorbisDepay, gst_rtp_vorbis_depay, GstBaseRTPDepayload,
+#define gst_rtp_vorbis_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpVorbisDepay, gst_rtp_vorbis_depay,
GST_TYPE_BASE_RTP_DEPAYLOAD);
static gboolean gst_rtp_vorbis_depay_setcaps (GstBaseRTPDepayload * depayload,
static GstStateChangeReturn gst_rtp_vorbis_depay_change_state (GstElement *
element, GstStateChange transition);
-
-static void
-gst_rtp_vorbis_depay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_vorbis_depay_sink_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_vorbis_depay_src_template));
-
- gst_element_class_set_details_simple (element_class, "RTP Vorbis depayloader",
- "Codec/Depayloader/Network/RTP",
- "Extracts Vorbis Audio from RTP packets (RFC 5215)",
- "Wim Taymans <wim.taymans@gmail.com>");
-}
-
static void
gst_rtp_vorbis_depay_class_init (GstRtpVorbisDepayClass * klass)
{
gstbasertpdepayload_class->process = gst_rtp_vorbis_depay_process;
gstbasertpdepayload_class->set_caps = gst_rtp_vorbis_depay_setcaps;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_vorbis_depay_sink_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_vorbis_depay_src_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP Vorbis depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts Vorbis Audio from RTP packets (RFC 5215)",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
GST_DEBUG_CATEGORY_INIT (rtpvorbisdepay_debug, "rtpvorbisdepay", 0,
"Vorbis RTP Depayloader");
}
static void
-gst_rtp_vorbis_depay_init (GstRtpVorbisDepay * rtpvorbisdepay,
- GstRtpVorbisDepayClass * klass)
+gst_rtp_vorbis_depay_init (GstRtpVorbisDepay * rtpvorbisdepay)
{
rtpvorbisdepay->adapter = gst_adapter_new ();
}
{
GstBuffer *buf;
guint32 num_headers;
- guint8 *data;
- guint size;
+ guint8 *data, *bdata;
+ gsize size;
guint offset;
gint i, j;
- data = GST_BUFFER_DATA (confbuf);
- size = GST_BUFFER_SIZE (confbuf);
+ bdata = data = gst_buffer_map (confbuf, &size, NULL, GST_MAP_READ);
- GST_DEBUG_OBJECT (rtpvorbisdepay, "config size %u", size);
+ GST_DEBUG_OBJECT (rtpvorbisdepay, "config size %" G_GSIZE_FORMAT, size);
/* +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
* | Number of packed headers |
GST_DEBUG_OBJECT (rtpvorbisdepay, "reading header %d, size %u", j,
h_size);
- buf = gst_buffer_create_sub (confbuf, offset, h_size);
+ buf = gst_buffer_copy_region (confbuf, GST_BUFFER_COPY_MEMORY, offset,
+ h_size);
conf->headers = g_list_append (conf->headers, buf);
offset += h_size;
size -= h_size;
}
rtpvorbisdepay->configs = g_list_append (rtpvorbisdepay->configs, conf);
}
+
+ gst_buffer_unmap (confbuf, bdata, -1);
gst_buffer_unref (confbuf);
return TRUE;
too_small:
{
GST_DEBUG_OBJECT (rtpvorbisdepay, "configuration too small");
+ gst_buffer_unmap (confbuf, bdata, -1);
gst_buffer_unref (confbuf);
return FALSE;
}
/* transform inline to out-of-band and parse that one */
confbuf = gst_buffer_new_and_alloc (size + 9);
- conf = GST_BUFFER_DATA (confbuf);
+ conf = gst_buffer_map (confbuf, NULL, NULL, -1);
/* 1 header */
GST_WRITE_UINT32_BE (conf, 1);
/* write Ident */
GST_WRITE_UINT16_BE (conf + 7, length);
/* copy remainder */
memcpy (conf + 9, configuration, size);
+ gst_buffer_unmap (confbuf, conf, -1);
return gst_rtp_vorbis_depay_parse_configuration (rtpvorbisdepay, confbuf);
}
data = g_base64_decode (configuration, &size);
confbuf = gst_buffer_new ();
- GST_BUFFER_DATA (confbuf) = data;
- GST_BUFFER_MALLOCDATA (confbuf) = data;
- GST_BUFFER_SIZE (confbuf) = size;
+ gst_buffer_take_memory (confbuf, -1,
+ gst_memory_new_wrapped (0, data, g_free, size, 0, size));
if (!gst_rtp_vorbis_depay_parse_configuration (rtpvorbisdepay, confbuf))
goto invalid_configuration;
} else {
guint32 timestamp;
guint32 header, ident;
guint8 F, VDT, packets;
+ GstRTPBuffer rtp;
rtpvorbisdepay = GST_RTP_VORBIS_DEPAY (depayload);
- payload_len = gst_rtp_buffer_get_payload_len (buf);
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
+
+ payload_len = gst_rtp_buffer_get_payload_len (&rtp);
GST_DEBUG_OBJECT (depayload, "got RTP packet of size %d", payload_len);
if (G_UNLIKELY (payload_len < 4))
goto packet_short;
- payload = gst_rtp_buffer_get_payload (buf);
+ payload = gst_rtp_buffer_get_payload (&rtp);
header = GST_READ_UINT32_BE (payload);
/*
/* first assembled packet, reuse 2 bytes to store the length */
headerskip = (F == 1 ? 4 : 6);
/* skip header and length. */
- vdata = gst_rtp_buffer_get_payload_subbuffer (buf, headerskip, -1);
+ vdata = gst_rtp_buffer_get_payload_subbuffer (&rtp, headerskip, -1);
GST_DEBUG_OBJECT (depayload, "assemble vorbis packet");
gst_adapter_push (rtpvorbisdepay->adapter, vdata);
* .. vorbis data |
* +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+*
*/
- timestamp = gst_rtp_buffer_get_timestamp (buf);
+ timestamp = gst_rtp_buffer_get_timestamp (&rtp);
while (payload_len > 2) {
guint16 length;
/* create buffer for packet */
if (G_UNLIKELY (to_free)) {
outbuf = gst_buffer_new ();
- GST_BUFFER_DATA (outbuf) = payload;
- GST_BUFFER_MALLOCDATA (outbuf) = to_free;
- GST_BUFFER_SIZE (outbuf) = length;
+ gst_buffer_take_memory (outbuf, -1,
+ gst_memory_new_wrapped (0, to_free, g_free,
+ (payload - to_free) + length, payload - to_free, length));
to_free = NULL;
} else {
+ guint8 *data;
+
outbuf = gst_buffer_new_and_alloc (length);
- memcpy (GST_BUFFER_DATA (outbuf), payload, length);
+ data = gst_buffer_map (outbuf, NULL, NULL, GST_MAP_WRITE);
+ memcpy (data, payload, length);
+ gst_buffer_unmap (outbuf, data, -1);
}
payload += length;
g_free (to_free);
+ gst_rtp_buffer_unmap (&rtp);
+
return NULL;
no_output:
{
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
/* ERORRS */
{
GST_ELEMENT_WARNING (rtpvorbisdepay, STREAM, DECODE,
(NULL), ("Could not switch codebooks"));
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
packet_short:
{
GST_ELEMENT_WARNING (rtpvorbisdepay, STREAM, DECODE,
(NULL), ("Packet was too short (%d < 4)", payload_len));
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
ignore_reserved:
{
GST_WARNING_OBJECT (rtpvorbisdepay, "reserved VDT ignored");
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
length_short:
{
GST_ELEMENT_WARNING (rtpvorbisdepay, STREAM, DECODE,
(NULL), ("Packet contains invalid data"));
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
invalid_configuration:
/* fatal, as we otherwise risk carrying on without output */
GST_ELEMENT_ERROR (rtpvorbisdepay, STREAM, DECODE,
(NULL), ("Packet contains invalid configuration"));
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
}
GST_STATIC_CAPS ("audio/x-vorbis")
);
-GST_BOILERPLATE (GstRtpVorbisPay, gst_rtp_vorbis_pay, GstBaseRTPPayload,
- GST_TYPE_BASE_RTP_PAYLOAD);
+#define gst_rtp_vorbis_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpVorbisPay, gst_rtp_vorbis_pay, GST_TYPE_BASE_RTP_PAYLOAD);
static gboolean gst_rtp_vorbis_pay_setcaps (GstBaseRTPPayload * basepayload,
GstCaps * caps);
element, GstStateChange transition);
static GstFlowReturn gst_rtp_vorbis_pay_handle_buffer (GstBaseRTPPayload * pad,
GstBuffer * buffer);
-static gboolean gst_rtp_vorbis_pay_handle_event (GstPad * pad,
+static gboolean gst_rtp_vorbis_pay_handle_event (GstBaseRTPPayload * payload,
GstEvent * event);
static void
-gst_rtp_vorbis_pay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_vorbis_pay_src_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_vorbis_pay_sink_template));
-
- gst_element_class_set_details_simple (element_class, "RTP Vorbis depayloader",
- "Codec/Payloader/Network/RTP",
- "Payload-encode Vorbis audio into RTP packets (RFC 5215)",
- "Wim Taymans <wimi.taymans@gmail.com>");
-}
-
-static void
gst_rtp_vorbis_pay_class_init (GstRtpVorbisPayClass * klass)
{
GstElementClass *gstelement_class;
gstbasertppayload_class->handle_buffer = gst_rtp_vorbis_pay_handle_buffer;
gstbasertppayload_class->handle_event = gst_rtp_vorbis_pay_handle_event;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_vorbis_pay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_vorbis_pay_sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP Vorbis depayloader",
+ "Codec/Payloader/Network/RTP",
+ "Payload-encode Vorbis audio into RTP packets (RFC 5215)",
+ "Wim Taymans <wimi.taymans@gmail.com>");
+
GST_DEBUG_CATEGORY_INIT (rtpvorbispay_debug, "rtpvorbispay", 0,
"Vorbis RTP Payloader");
}
static void
-gst_rtp_vorbis_pay_init (GstRtpVorbisPay * rtpvorbispay,
- GstRtpVorbisPayClass * klass)
+gst_rtp_vorbis_pay_init (GstRtpVorbisPay * rtpvorbispay)
{
/* needed because of GST_BOILERPLATE */
}
gst_rtp_vorbis_pay_reset_packet (GstRtpVorbisPay * rtpvorbispay, guint8 VDT)
{
guint payload_len;
+ GstRTPBuffer rtp;
GST_LOG_OBJECT (rtpvorbispay, "reset packet");
rtpvorbispay->payload_pos = 4;
- payload_len = gst_rtp_buffer_get_payload_len (rtpvorbispay->packet);
+ gst_rtp_buffer_map (rtpvorbispay->packet, GST_MAP_READ, &rtp);
+ payload_len = gst_rtp_buffer_get_payload_len (&rtp);
+ gst_rtp_buffer_unmap (&rtp);
rtpvorbispay->payload_left = payload_len - 4;
rtpvorbispay->payload_duration = 0;
rtpvorbispay->payload_F = 0;
GstFlowReturn ret;
guint8 *payload;
guint hlen;
+ GstRTPBuffer rtp;
/* check for empty packet */
if (!rtpvorbispay->packet || rtpvorbispay->payload_pos <= 4)
GST_LOG_OBJECT (rtpvorbispay, "flushing packet");
+ gst_rtp_buffer_map (rtpvorbispay->packet, GST_MAP_WRITE, &rtp);
+
/* fix header */
- payload = gst_rtp_buffer_get_payload (rtpvorbispay->packet);
+ payload = gst_rtp_buffer_get_payload (&rtp);
/*
* 0 1 2 3
* 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
(rtpvorbispay->payload_VDT & 0x3) << 4 |
(rtpvorbispay->payload_pkts & 0xf);
+ gst_rtp_buffer_unmap (&rtp);
+
/* shrink the buffer size to the last written byte */
hlen = gst_rtp_buffer_calc_header_len (0);
- GST_BUFFER_SIZE (rtpvorbispay->packet) = hlen + rtpvorbispay->payload_pos;
+ gst_buffer_resize (rtpvorbispay->packet, 0, hlen + rtpvorbispay->payload_pos);
GST_BUFFER_DURATION (rtpvorbispay->packet) = rtpvorbispay->payload_duration;
for (walk = rtpvorbispay->headers; walk; walk = g_list_next (walk)) {
GstBuffer *buf = GST_BUFFER_CAST (walk->data);
guint bsize;
+ guint8 *data;
+ gsize size;
- bsize = GST_BUFFER_SIZE (buf);
+ bsize = gst_buffer_get_size (buf);
length += bsize;
n_headers++;
} while (bsize);
}
/* update hash */
- ident = fnv1_hash_32_update (ident, GST_BUFFER_DATA (buf),
- GST_BUFFER_SIZE (buf));
+ data = gst_buffer_map (buf, NULL, NULL, GST_MAP_READ);
+ ident = fnv1_hash_32_update (ident, data, size);
+ gst_buffer_unmap (buf, data, -1);
}
/* packet length is header size + packet length */
if (!g_list_next (walk))
break;
- bsize = GST_BUFFER_SIZE (buf);
+ bsize = gst_buffer_get_size (buf);
/* calc size */
size = 0;
} while (bsize);
temp = size;
- bsize = GST_BUFFER_SIZE (buf);
+ bsize = gst_buffer_get_size (buf);
/* write the size backwards */
flag = 0;
while (size) {
for (walk = rtpvorbispay->headers; walk; walk = g_list_next (walk)) {
GstBuffer *buf = GST_BUFFER_CAST (walk->data);
- memcpy (data, GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (buf));
- data += GST_BUFFER_SIZE (buf);
+ gst_buffer_extract (buf, 0, data, gst_buffer_get_size (buf));
+ data += gst_buffer_get_size (buf);
}
/* serialize to base64 */
{
GstRtpVorbisPay *rtpvorbispay;
GstFlowReturn ret;
- guint size, newsize;
+ guint newsize;
+ gsize size;
guint8 *data;
guint packet_len;
GstClockTime duration, newduration, timestamp;
guint plen;
guint8 *ppos, *payload;
gboolean fragmented;
+ GstRTPBuffer rtp;
rtpvorbispay = GST_RTP_VORBIS_PAY (basepayload);
- size = GST_BUFFER_SIZE (buffer);
- data = GST_BUFFER_DATA (buffer);
+ data = gst_buffer_map (buffer, &size, NULL, GST_MAP_READ);
duration = GST_BUFFER_DURATION (buffer);
timestamp = GST_BUFFER_TIMESTAMP (buffer);
- GST_LOG_OBJECT (rtpvorbispay, "size %u, duration %" GST_TIME_FORMAT,
- size, GST_TIME_ARGS (duration));
+ GST_LOG_OBJECT (rtpvorbispay, "size %" G_GSIZE_FORMAT
+ ", duration %" GST_TIME_FORMAT, size, GST_TIME_ARGS (duration));
if (G_UNLIKELY (size < 1 || size > 0xffff))
goto wrong_size;
if (VDT != 0) {
GST_DEBUG_OBJECT (rtpvorbispay, "collecting header");
/* append header to the list of headers */
+ gst_buffer_unmap (buffer, data, -1);
rtpvorbispay->headers = g_list_append (rtpvorbispay->headers, buffer);
ret = GST_FLOW_OK;
goto done;
gst_rtp_vorbis_pay_init_packet (rtpvorbispay, VDT, timestamp);
}
- payload = gst_rtp_buffer_get_payload (rtpvorbispay->packet);
+ gst_rtp_buffer_map (rtpvorbispay->packet, GST_MAP_WRITE, &rtp);
+ payload = gst_rtp_buffer_get_payload (&rtp);
ppos = payload + rtpvorbispay->payload_pos;
fragmented = FALSE;
}
}
if (fragmented) {
+ gst_rtp_buffer_unmap (&rtp);
/* fragmented packets are always flushed and have ptks of 0 */
rtpvorbispay->payload_pkts = 0;
ret = gst_rtp_vorbis_pay_flush_packet (rtpvorbispay);
/* start new packet and get pointers. VDT stays the same. */
gst_rtp_vorbis_pay_init_packet (rtpvorbispay,
rtpvorbispay->payload_VDT, timestamp);
- payload = gst_rtp_buffer_get_payload (rtpvorbispay->packet);
+ gst_rtp_buffer_map (rtpvorbispay->packet, GST_MAP_WRITE, &rtp);
+ payload = gst_rtp_buffer_get_payload (&rtp);
ppos = payload + rtpvorbispay->payload_pos;
}
} else {
rtpvorbispay->payload_duration += duration;
}
}
+
+ if (rtp.buffer)
+ gst_rtp_buffer_unmap (&rtp);
+
+ gst_buffer_unmap (buffer, data, -1);
gst_buffer_unref (buffer);
done:
{
GST_ELEMENT_WARNING (rtpvorbispay, STREAM, DECODE,
("Invalid packet size (1 < %d <= 0xffff)", size), (NULL));
+ gst_buffer_unmap (buffer, data, -1);
gst_buffer_unref (buffer);
return GST_FLOW_OK;
}
parse_id_failed:
{
+ gst_buffer_unmap (buffer, data, -1);
gst_buffer_unref (buffer);
return GST_FLOW_ERROR;
}
{
GST_ELEMENT_WARNING (rtpvorbispay, STREAM, DECODE,
(NULL), ("Ignoring unknown header received"));
+ gst_buffer_unmap (buffer, data, -1);
gst_buffer_unref (buffer);
return GST_FLOW_OK;
}
{
GST_ELEMENT_WARNING (rtpvorbispay, STREAM, DECODE,
(NULL), ("Error initializing header config"));
+ gst_buffer_unmap (buffer, data, -1);
gst_buffer_unref (buffer);
return GST_FLOW_OK;
}
}
static gboolean
-gst_rtp_vorbis_pay_handle_event (GstPad * pad, GstEvent * event)
+gst_rtp_vorbis_pay_handle_event (GstBaseRTPPayload * payload, GstEvent * event)
{
- GstRtpVorbisPay *rtpvorbispay = GST_RTP_VORBIS_PAY (GST_PAD_PARENT (pad));
+ GstRtpVorbisPay *rtpvorbispay = GST_RTP_VORBIS_PAY (payload);
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_FLUSH_STOP:
"clock-rate = (int) 90000, " "encoding-name = (string) \"RAW\"")
);
-GST_BOILERPLATE (GstRtpVRawDepay, gst_rtp_vraw_depay, GstBaseRTPDepayload,
+#define gst_rtp_vraw_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpVRawDepay, gst_rtp_vraw_depay,
GST_TYPE_BASE_RTP_DEPAYLOAD);
static gboolean gst_rtp_vraw_depay_setcaps (GstBaseRTPDepayload * depayload,
GstEvent * event);
static void
-gst_rtp_vraw_depay_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_vraw_depay_src_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_rtp_vraw_depay_sink_template));
-
- gst_element_class_set_details_simple (element_class,
- "RTP Raw Video depayloader", "Codec/Depayloader/Network/RTP",
- "Extracts raw video from RTP packets (RFC 4175)",
- "Wim Taymans <wim.taymans@gmail.com>");
-}
-
-static void
gst_rtp_vraw_depay_class_init (GstRtpVRawDepayClass * klass)
{
GstElementClass *gstelement_class;
gstbasertpdepayload_class->process = gst_rtp_vraw_depay_process;
gstbasertpdepayload_class->handle_event = gst_rtp_vraw_depay_handle_event;
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_vraw_depay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_vraw_depay_sink_template));
+
+ gst_element_class_set_details_simple (gstelement_class,
+ "RTP Raw Video depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts raw video from RTP packets (RFC 4175)",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
GST_DEBUG_CATEGORY_INIT (rtpvrawdepay_debug, "rtpvrawdepay", 0,
"raw video RTP Depayloader");
}
static void
-gst_rtp_vraw_depay_init (GstRtpVRawDepay * rtpvrawdepay,
- GstRtpVRawDepayClass * klass)
+gst_rtp_vraw_depay_init (GstRtpVRawDepay * rtpvrawdepay)
{
/* needed because of GST_BOILERPLATE */
}
rtpvrawdepay->outbuf = NULL;
}
rtpvrawdepay->timestamp = -1;
+ if (rtpvrawdepay->pool) {
+ gst_buffer_pool_set_active (rtpvrawdepay->pool, FALSE);
+ gst_object_unref (rtpvrawdepay->pool);
+ rtpvrawdepay->pool = NULL;
+ }
+}
+
+static GstFlowReturn
+gst_rtp_vraw_depay_negotiate_pool (GstRtpVRawDepay * depay, GstCaps * caps,
+ GstVideoInfo * info)
+{
+ GstQuery *query;
+ GstBufferPool *pool = NULL;
+ guint size, min, max, prefix, alignment;
+ GstStructure *config;
+
+ /* find a pool for the negotiated caps now */
+ query = gst_query_new_allocation (caps, TRUE);
+
+ if (gst_pad_peer_query (GST_BASE_RTP_DEPAYLOAD_SRCPAD (depay), query)) {
+ GST_DEBUG_OBJECT (depay, "got downstream ALLOCATION hints");
+ /* we got configuration from our peer, parse them */
+ gst_query_parse_allocation_params (query, &size, &min, &max, &prefix,
+ &alignment, &pool);
+ } else {
+ GST_DEBUG_OBJECT (depay, "didn't get downstream ALLOCATION hints");
+ size = info->size;
+ min = max = 0;
+ prefix = 0;
+ alignment = 0;
+ }
+
+ if (pool == NULL) {
+ /* we did not get a pool, make one ourselves then */
+ pool = gst_buffer_pool_new ();
+ }
+
+ if (depay->pool)
+ gst_object_unref (depay->pool);
+ depay->pool = pool;
+
+ config = gst_buffer_pool_get_config (pool);
+ gst_buffer_pool_config_set (config, caps, size, min, max, prefix, alignment);
+ /* just set the metadata, if the pool can support it we will transparently use
+ * it through the video info API. We could also see if the pool support this
+ * metadata and only activate it then. */
+ gst_buffer_pool_config_add_meta (config, GST_META_API_VIDEO);
+
+ gst_buffer_pool_set_config (pool, config);
+ /* and activate */
+ gst_buffer_pool_set_active (pool, TRUE);
+
+ gst_query_unref (query);
+
+ return GST_FLOW_OK;
}
static gboolean
GstStructure *structure;
GstRtpVRawDepay *rtpvrawdepay;
gint clock_rate;
- const gchar *str, *type;
+ const gchar *str;
gint format, width, height, pgroup, xinc, yinc;
- guint ystride, uvstride, yp, up, vp, outsize;
GstCaps *srccaps;
- guint32 fourcc = 0;
gboolean res;
+ GstFlowReturn ret;
rtpvrawdepay = GST_RTP_VRAW_DEPAY (depayload);
structure = gst_caps_get_structure (caps, 0);
- yp = up = vp = uvstride = 0;
xinc = yinc = 1;
if (!gst_structure_get_int (structure, "clock-rate", &clock_rate))
if (!strcmp (str, "RGB")) {
format = GST_VIDEO_FORMAT_RGB;
pgroup = 3;
- ystride = GST_ROUND_UP_4 (width * 3);
- outsize = ystride * height;
- type = "video/x-raw-rgb";
} else if (!strcmp (str, "RGBA")) {
format = GST_VIDEO_FORMAT_RGBA;
pgroup = 4;
- ystride = width * 4;
- outsize = ystride * height;
- type = "video/x-raw-rgb";
} else if (!strcmp (str, "BGR")) {
format = GST_VIDEO_FORMAT_BGR;
pgroup = 3;
- ystride = GST_ROUND_UP_4 (width * 3);
- outsize = ystride * height;
- type = "video/x-raw-rgb";
} else if (!strcmp (str, "BGRA")) {
format = GST_VIDEO_FORMAT_BGRA;
pgroup = 4;
- ystride = width * 4;
- outsize = ystride * height;
- type = "video/x-raw-rgb";
} else if (!strcmp (str, "YCbCr-4:4:4")) {
format = GST_VIDEO_FORMAT_AYUV;
pgroup = 3;
- ystride = width * 4;
- outsize = ystride * height;
- type = "video/x-raw-yuv";
- fourcc = GST_MAKE_FOURCC ('A', 'Y', 'U', 'V');
} else if (!strcmp (str, "YCbCr-4:2:2")) {
format = GST_VIDEO_FORMAT_UYVY;
pgroup = 4;
- ystride = GST_ROUND_UP_2 (width) * 2;
- outsize = ystride * height;
- type = "video/x-raw-yuv";
- fourcc = GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y');
xinc = 2;
} else if (!strcmp (str, "YCbCr-4:2:0")) {
format = GST_VIDEO_FORMAT_I420;
pgroup = 6;
- ystride = GST_ROUND_UP_4 (width);
- uvstride = GST_ROUND_UP_8 (width) / 2;
- up = ystride * GST_ROUND_UP_2 (height);
- vp = up + uvstride * GST_ROUND_UP_2 (height) / 2;
- outsize = vp + uvstride * GST_ROUND_UP_2 (height) / 2;
- type = "video/x-raw-yuv";
- fourcc = GST_MAKE_FOURCC ('I', '4', '2', '0');
xinc = yinc = 2;
} else if (!strcmp (str, "YCbCr-4:1:1")) {
format = GST_VIDEO_FORMAT_Y41B;
pgroup = 6;
- ystride = GST_ROUND_UP_4 (width);
- uvstride = GST_ROUND_UP_8 (width) / 4;
- up = ystride * height;
- vp = up + uvstride * height;
- outsize = vp + uvstride * height;
- type = "video/x-raw-yuv";
- fourcc = GST_MAKE_FOURCC ('Y', '4', '1', 'B');
xinc = 4;
} else
goto unknown_format;
- rtpvrawdepay->width = width;
- rtpvrawdepay->height = height;
- rtpvrawdepay->format = format;
- rtpvrawdepay->yp = yp;
- rtpvrawdepay->up = up;
- rtpvrawdepay->vp = vp;
+ gst_video_info_init (&rtpvrawdepay->vinfo);
+ gst_video_info_set_format (&rtpvrawdepay->vinfo, format, width, height);
+ GST_VIDEO_INFO_FPS_N (&rtpvrawdepay->vinfo) = 0;
+ GST_VIDEO_INFO_FPS_D (&rtpvrawdepay->vinfo) = 1;
+
rtpvrawdepay->pgroup = pgroup;
rtpvrawdepay->xinc = xinc;
rtpvrawdepay->yinc = yinc;
- rtpvrawdepay->ystride = ystride;
- rtpvrawdepay->uvstride = uvstride;
- rtpvrawdepay->outsize = outsize;
-
- srccaps = gst_caps_new_simple (type,
- "width", G_TYPE_INT, width,
- "height", G_TYPE_INT, height,
- "format", GST_TYPE_FOURCC, fourcc,
- "framerate", GST_TYPE_FRACTION, 0, 1, NULL);
+ srccaps = gst_video_info_to_caps (&rtpvrawdepay->vinfo);
res = gst_pad_set_caps (GST_BASE_RTP_DEPAYLOAD_SRCPAD (depayload), srccaps);
gst_caps_unref (srccaps);
GST_DEBUG_OBJECT (depayload, "width %d, height %d, format %d", width, height,
format);
- GST_DEBUG_OBJECT (depayload, "yp %d, up %d, vp %d", yp, up, vp);
- GST_DEBUG_OBJECT (depayload, "xinc %d, yinc %d", xinc, yinc);
- GST_DEBUG_OBJECT (depayload, "pgroup %d, ystride %d, uvstride %d", pgroup,
- ystride, uvstride);
- GST_DEBUG_OBJECT (depayload, "outsize %u", outsize);
+ GST_DEBUG_OBJECT (depayload, "xinc %d, yinc %d, pgroup %d",
+ xinc, yinc, pgroup);
+
+ /* negotiate a bufferpool */
+ if ((ret = gst_rtp_vraw_depay_negotiate_pool (rtpvrawdepay, caps,
+ &rtpvrawdepay->vinfo)) != GST_FLOW_OK)
+ goto no_bufferpool;
return res;
GST_ERROR_OBJECT (depayload, "unknown sampling format '%s'", str);
return FALSE;
}
+no_bufferpool:
+ {
+ GST_DEBUG_OBJECT (depayload, "no bufferpool");
+ return FALSE;
+ }
}
static GstBuffer *
gst_rtp_vraw_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
{
GstRtpVRawDepay *rtpvrawdepay;
- guint8 *payload, *data, *yp, *up, *vp, *headers;
+ guint8 *payload, *yp, *up, *vp, *headers;
guint32 timestamp;
guint cont, ystride, uvstride, pgroup, payload_len;
gint width, height, xinc, yinc;
+ GstRTPBuffer rtp;
+ GstVideoFrame frame;
rtpvrawdepay = GST_RTP_VRAW_DEPAY (depayload);
- timestamp = gst_rtp_buffer_get_timestamp (buf);
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
+
+ timestamp = gst_rtp_buffer_get_timestamp (&rtp);
if (timestamp != rtpvrawdepay->timestamp || rtpvrawdepay->outbuf == NULL) {
GstBuffer *outbuf;
rtpvrawdepay->outbuf = NULL;
}
- ret = gst_pad_alloc_buffer (depayload->srcpad, -1, rtpvrawdepay->outsize,
- GST_PAD_CAPS (depayload->srcpad), &outbuf);
- if (ret != GST_FLOW_OK)
+ if (gst_pad_check_reconfigure (GST_BASE_RTP_DEPAYLOAD_SRCPAD (depayload))) {
+ GstCaps *caps;
+
+ caps =
+ gst_pad_get_current_caps (GST_BASE_RTP_DEPAYLOAD_SRCPAD (depayload));
+ gst_rtp_vraw_depay_negotiate_pool (rtpvrawdepay, caps,
+ &rtpvrawdepay->vinfo);
+ gst_caps_unref (caps);
+ }
+
+ ret = gst_buffer_pool_acquire_buffer (rtpvrawdepay->pool, &outbuf, NULL);
+ if (G_UNLIKELY (ret != GST_FLOW_OK))
goto alloc_failed;
/* clear timestamp from alloc... */
rtpvrawdepay->timestamp = timestamp;
}
- data = GST_BUFFER_DATA (rtpvrawdepay->outbuf);
+ if (!gst_video_frame_map (&frame, &rtpvrawdepay->vinfo, rtpvrawdepay->outbuf,
+ GST_MAP_WRITE))
+ goto invalid_frame;
/* get pointer and strides of the planes */
- yp = data + rtpvrawdepay->yp;
- up = data + rtpvrawdepay->up;
- vp = data + rtpvrawdepay->vp;
+ yp = GST_VIDEO_FRAME_COMP_DATA (&frame, 0);
+ up = GST_VIDEO_FRAME_COMP_DATA (&frame, 1);
+ vp = GST_VIDEO_FRAME_COMP_DATA (&frame, 2);
+
+ ystride = GST_VIDEO_FRAME_COMP_STRIDE (&frame, 0);
+ uvstride = GST_VIDEO_FRAME_COMP_STRIDE (&frame, 1);
- ystride = rtpvrawdepay->ystride;
- uvstride = rtpvrawdepay->uvstride;
pgroup = rtpvrawdepay->pgroup;
- width = rtpvrawdepay->width;
- height = rtpvrawdepay->height;
+ width = GST_VIDEO_INFO_WIDTH (&rtpvrawdepay->vinfo);
+ height = GST_VIDEO_INFO_HEIGHT (&rtpvrawdepay->vinfo);
xinc = rtpvrawdepay->xinc;
yinc = rtpvrawdepay->yinc;
- payload = gst_rtp_buffer_get_payload (buf);
- payload_len = gst_rtp_buffer_get_payload_len (buf);
+ payload = gst_rtp_buffer_get_payload (&rtp);
+ payload_len = gst_rtp_buffer_get_payload_len (&rtp);
if (payload_len < 3)
goto short_packet;
"writing length %u/%u, line %u, offset %u, remaining %u", plen, length,
line, offs, payload_len);
- switch (rtpvrawdepay->format) {
+ switch (GST_VIDEO_INFO_FORMAT (&rtpvrawdepay->vinfo)) {
case GST_VIDEO_FORMAT_RGB:
case GST_VIDEO_FORMAT_RGBA:
case GST_VIDEO_FORMAT_BGR:
payload_len -= length;
}
- if (gst_rtp_buffer_get_marker (buf)) {
+ gst_video_frame_unmap (&frame);
+ gst_rtp_buffer_unmap (&rtp);
+
+ if (gst_rtp_buffer_get_marker (&rtp)) {
GST_LOG_OBJECT (depayload, "marker, flushing frame");
if (rtpvrawdepay->outbuf) {
gst_base_rtp_depayload_push_ts (depayload, timestamp,
{
GST_ELEMENT_ERROR (depayload, STREAM, FORMAT,
(NULL), ("unimplemented sampling"));
+ gst_video_frame_unmap (&frame);
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
alloc_failed:
{
GST_WARNING_OBJECT (depayload, "failed to alloc output buffer");
+ gst_rtp_buffer_unmap (&rtp);
+ return NULL;
+ }
+invalid_frame:
+ {
+ GST_ERROR_OBJECT (depayload, "could not map video frame");
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
wrong_length:
{
GST_WARNING_OBJECT (depayload, "length not multiple of pgroup");
+ gst_video_frame_unmap (&frame);
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
short_packet:
{
GST_WARNING_OBJECT (depayload, "short packet");
+ gst_video_frame_unmap (&frame);
+ gst_rtp_buffer_unmap (&rtp);
return NULL;
}
}
#include <gst/gst.h>
#include <gst/video/video.h>
+#include <gst/video/gstmetavideo.h>
#include <gst/rtp/gstbasertpdepayload.h>
G_BEGIN_DECLS
{
GstBaseRTPDepayload payload;
- gint width, height;
- GstVideoFormat format;
+ GstBufferPool *pool;
+ GstVideoInfo vinfo;
GstBuffer *outbuf;
guint32 timestamp;
gint pgroup;
gint xinc, yinc;
- guint yp, up, vp;
- gint ystride;
- gint uvstride;
};
struct _GstRtpVRawDepayClass
GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("video/x-raw-rgb, "
- "bpp = (int) 24, "
- "depth = (int) 24, "
- "endianness = (int) BIG_ENDIAN, "
- "red_mask = (int) 0xFF000000, "
- "green_mask = (int) 0x00FF0000, "
- "blue_mask = (int) 0x0000FF00, "
- "width = (int) [ 1, 32767 ], "
- "height = (int) [ 1, 32767 ]; "
- "video/x-raw-rgb, "
- "bpp = (int) 32, "
- "depth = (int) 32, "
- "endianness = (int) BIG_ENDIAN, "
- "red_mask = (int) 0xFF000000, "
- "green_mask = (int) 0x00FF0000, "
- "blue_mask = (int) 0x0000FF00, "
- "alpha_mask = (int) 0x000000FF, "
- "width = (int) [ 1, 32767 ], "
- "height = (int) [ 1, 32767 ]; "
- "video/x-raw-rgb, "
- "bpp = (int) 24, "
- "depth = (int) 24, "
- "endianness = (int) BIG_ENDIAN, "
- "red_mask = (int) 0x0000FF00, "
- "green_mask = (int) 0x00FF0000, "
- "blue_mask = (int) 0xFF000000, "
- "width = (int) [ 1, 32767 ], "
- "height = (int) [ 1, 32767 ]; "
- "video/x-raw-rgb, "
- "bpp = (int) 32, "
- "depth = (int) 32, "
- "endianness = (int) BIG_ENDIAN, "
- "red_mask = (int) 0x0000FF00, "
- "green_mask = (int) 0x00FF0000, "
- "blue_mask = (int) 0xFF000000, "
- "alpha_mask = (int) 0x000000FF, "
- "width = (int) [ 1, 32767 ], "
- "height = (int) [ 1, 32767 ]; "
- "video/x-raw-yuv, "
- "format = (fourcc) { AYUV, UYVY, I420, Y41B, UYVP }, "
+ GST_STATIC_CAPS ("video/x-raw, "
+ "format = (string) { RGB, RGBA, BGR, BGRA, AYUYV, UYVY, I420, Y41B, UYVP, I420, Y42B, Y444 }, "
"width = (int) [ 1, 32767 ], " "height = (int) [ 1, 32767 ]; ")
);
static GstFlowReturn gst_rtp_vraw_pay_handle_buffer (GstBaseRTPPayload *
payload, GstBuffer * buffer);
-GST_BOILERPLATE (GstRtpVRawPay, gst_rtp_vraw_pay, GstBaseRTPPayload,
- GST_TYPE_BASE_RTP_PAYLOAD)
+G_DEFINE_TYPE (GstRtpVRawPay, gst_rtp_vraw_pay, GST_TYPE_BASE_RTP_PAYLOAD)
- static void gst_rtp_vraw_pay_base_init (gpointer klass)
+ static void gst_rtp_vraw_pay_class_init (GstRtpVRawPayClass * klass)
{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstBaseRTPPayloadClass *gstbasertppayload_class;
+ GstElementClass *gstelement_class;
+
+ gstelement_class = (GstElementClass *) klass;
+ gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
+
+ gstbasertppayload_class->set_caps = gst_rtp_vraw_pay_setcaps;
+ gstbasertppayload_class->handle_buffer = gst_rtp_vraw_pay_handle_buffer;
- gst_element_class_add_pad_template (element_class,
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_vraw_pay_src_template));
- gst_element_class_add_pad_template (element_class,
+ gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_vraw_pay_sink_template));
- gst_element_class_set_details_simple (element_class,
+ gst_element_class_set_details_simple (gstelement_class,
"RTP Raw Video payloader", "Codec/Payloader/Network/RTP",
"Payload raw video as RTP packets (RFC 4175)",
"Wim Taymans <wim.taymans@gmail.com>");
-}
-
-static void
-gst_rtp_vraw_pay_class_init (GstRtpVRawPayClass * klass)
-{
- GstBaseRTPPayloadClass *gstbasertppayload_class;
-
- gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
-
- gstbasertppayload_class->set_caps = gst_rtp_vraw_pay_setcaps;
- gstbasertppayload_class->handle_buffer = gst_rtp_vraw_pay_handle_buffer;
GST_DEBUG_CATEGORY_INIT (rtpvrawpay_debug, "rtpvrawpay", 0,
"Raw video RTP Payloader");
}
static void
-gst_rtp_vraw_pay_init (GstRtpVRawPay * rtpvrawpay, GstRtpVRawPayClass * klass)
+gst_rtp_vraw_pay_init (GstRtpVRawPay * rtpvrawpay)
{
}
gst_rtp_vraw_pay_setcaps (GstBaseRTPPayload * payload, GstCaps * caps)
{
GstRtpVRawPay *rtpvrawpay;
- GstStructure *s;
gboolean res;
- const gchar *name;
- gint width, height;
- gint yp, up, vp;
- gint pgroup, ystride, uvstride = 0, xinc, yinc;
- GstVideoFormat sampling;
+ gint pgroup, xinc, yinc;
const gchar *depthstr, *samplingstr, *colorimetrystr;
gchar *wstr, *hstr;
- gboolean interlaced;
- const gchar *color_matrix;
gint depth;
+ GstVideoInfo info;
rtpvrawpay = GST_RTP_VRAW_PAY (payload);
- s = gst_caps_get_structure (caps, 0);
-
- /* start parsing the format */
- name = gst_structure_get_name (s);
-
- /* these values are the only thing we can do */
- depthstr = "8";
-
- /* parse common width/height */
- res = gst_structure_get_int (s, "width", &width);
- res &= gst_structure_get_int (s, "height", &height);
- if (!res)
- goto missing_dimension;
+ if (!gst_video_info_from_caps (&info, caps))
+ goto invalid_caps;
- if (!gst_structure_get_boolean (s, "interlaced", &interlaced))
- interlaced = FALSE;
+ rtpvrawpay->vinfo = info;
- color_matrix = gst_structure_get_string (s, "color-matrix");
colorimetrystr = "SMPTE240M";
- if (color_matrix) {
- if (g_str_equal (color_matrix, "sdtv")) {
+ if (info.color_matrix) {
+ if (g_str_equal (info.color_matrix, "sdtv")) {
/* BT.601 implies a bit more than just color-matrix */
colorimetrystr = "BT601-5";
- } else if (g_str_equal (color_matrix, "hdtv")) {
+ } else if (g_str_equal (info.color_matrix, "hdtv")) {
colorimetrystr = "BT709-2";
}
}
- yp = up = vp = 0;
xinc = yinc = 1;
- if (!strcmp (name, "video/x-raw-rgb")) {
- gint amask, rmask;
- gboolean has_alpha;
-
- has_alpha = gst_structure_get_int (s, "alpha_mask", &amask);
- depth = 8;
-
- if (!gst_structure_get_int (s, "red_mask", &rmask))
- goto unknown_mask;
+ /* these values are the only thing we can do */
+ depthstr = "8";
+ depth = 8;
- if (has_alpha) {
+ switch (GST_VIDEO_INFO_FORMAT (&info)) {
+ case GST_VIDEO_FORMAT_RGBA:
+ samplingstr = "RGBA";
pgroup = 4;
- ystride = width * 4;
- if (rmask == 0xFF000000) {
- sampling = GST_VIDEO_FORMAT_RGBA;
- samplingstr = "RGBA";
- } else {
- sampling = GST_VIDEO_FORMAT_BGRA;
- samplingstr = "BGRA";
- }
- } else {
+ break;
+ case GST_VIDEO_FORMAT_BGRA:
+ samplingstr = "BGRA";
+ pgroup = 4;
+ break;
+ case GST_VIDEO_FORMAT_RGB:
+ samplingstr = "RGB";
pgroup = 3;
- ystride = GST_ROUND_UP_4 (width * 3);
- if (rmask == 0xFF000000) {
- sampling = GST_VIDEO_FORMAT_RGB;
- samplingstr = "RGB";
- } else {
- sampling = GST_VIDEO_FORMAT_BGR;
- samplingstr = "BGR";
- }
- }
- } else if (!strcmp (name, "video/x-raw-yuv")) {
- guint32 fourcc;
-
- if (!gst_structure_get_fourcc (s, "format", &fourcc))
- goto unknown_fourcc;
-
- GST_LOG_OBJECT (payload, "have fourcc %" GST_FOURCC_FORMAT,
- GST_FOURCC_ARGS (fourcc));
-
- switch (fourcc) {
- case GST_MAKE_FOURCC ('A', 'Y', 'U', 'V'):
- sampling = GST_VIDEO_FORMAT_AYUV;
- samplingstr = "YCbCr-4:4:4";
- pgroup = 3;
- ystride = width * 4;
- depth = 8;
- break;
- case GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y'):
- sampling = GST_VIDEO_FORMAT_UYVY;
- samplingstr = "YCbCr-4:2:2";
- pgroup = 4;
- xinc = 2;
- ystride = GST_ROUND_UP_2 (width) * 2;
- depth = 8;
- break;
- case GST_MAKE_FOURCC ('Y', '4', '1', 'B'):
- sampling = GST_VIDEO_FORMAT_Y41B;
- samplingstr = "YCbCr-4:1:1";
- pgroup = 6;
- xinc = 4;
- ystride = GST_ROUND_UP_4 (width);
- uvstride = GST_ROUND_UP_8 (width) / 4;
- up = ystride * height;
- vp = up + uvstride * height;
- depth = 8;
- break;
- case GST_MAKE_FOURCC ('I', '4', '2', '0'):
- sampling = GST_VIDEO_FORMAT_I420;
- samplingstr = "YCbCr-4:2:0";
- pgroup = 6;
- xinc = yinc = 2;
- ystride = GST_ROUND_UP_4 (width);
- uvstride = GST_ROUND_UP_8 (width) / 2;
- up = ystride * GST_ROUND_UP_2 (height);
- vp = up + uvstride * GST_ROUND_UP_2 (height) / 2;
- depth = 8;
- break;
- case GST_MAKE_FOURCC ('U', 'Y', 'V', 'P'):
-#define GST_VIDEO_FORMAT_UYVP GST_VIDEO_FORMAT_UYVY /* FIXME */
- sampling = GST_VIDEO_FORMAT_UYVP;
- samplingstr = "YCbCr-4:2:2";
- pgroup = 4;
- xinc = 2;
- ystride = GST_ROUND_UP_2 (width) * 2;
- depth = 10;
- break;
- default:
- goto unknown_fourcc;
- }
- } else
- goto unknown_format;
+ case GST_VIDEO_FORMAT_BGR:
+ samplingstr = "BGR";
+ pgroup = 3;
+ break;
+ case GST_VIDEO_FORMAT_AYUV:
+ samplingstr = "YCbCr-4:4:4";
+ pgroup = 3;
+ break;
+ case GST_VIDEO_FORMAT_UYVY:
+ samplingstr = "YCbCr-4:2:2";
+ pgroup = 4;
+ xinc = 2;
+ break;
+ case GST_VIDEO_FORMAT_Y41B:
+ samplingstr = "YCbCr-4:1:1";
+ pgroup = 6;
+ xinc = 4;
+ break;
+ case GST_VIDEO_FORMAT_I420:
+ samplingstr = "YCbCr-4:2:0";
+ pgroup = 6;
+ xinc = yinc = 2;
+ break;
+ case GST_VIDEO_FORMAT_UYVP:
+ samplingstr = "YCbCr-4:2:2";
+ pgroup = 4;
+ xinc = 2;
+ depth = 10;
+ depthstr = "10";
+ break;
+ default:
+ goto unknown_format;
+ break;
+ }
- if (interlaced) {
+ if (info.flags & GST_VIDEO_FLAG_INTERLACED) {
yinc *= 2;
}
- if (depth == 10) {
- depthstr = "10";
- }
- rtpvrawpay->width = width;
- rtpvrawpay->height = height;
- rtpvrawpay->sampling = sampling;
rtpvrawpay->pgroup = pgroup;
rtpvrawpay->xinc = xinc;
rtpvrawpay->yinc = yinc;
- rtpvrawpay->yp = yp;
- rtpvrawpay->up = up;
- rtpvrawpay->vp = vp;
- rtpvrawpay->ystride = ystride;
- rtpvrawpay->uvstride = uvstride;
- rtpvrawpay->interlaced = interlaced;
rtpvrawpay->depth = depth;
- GST_DEBUG_OBJECT (payload, "width %d, height %d, sampling %d", width, height,
- sampling);
- GST_DEBUG_OBJECT (payload, "yp %d, up %d, vp %d", yp, up, vp);
- GST_DEBUG_OBJECT (payload, "pgroup %d, ystride %d, uvstride %d", pgroup,
- ystride, uvstride);
+ GST_DEBUG_OBJECT (payload, "width %d, height %d, sampling %s",
+ GST_VIDEO_INFO_WIDTH (&info), GST_VIDEO_INFO_HEIGHT (&info), samplingstr);
+ GST_DEBUG_OBJECT (payload, "xinc %d, yinc %d, pgroup %d", xinc, yinc, pgroup);
- wstr = g_strdup_printf ("%d", rtpvrawpay->width);
- hstr = g_strdup_printf ("%d", rtpvrawpay->height);
+ wstr = g_strdup_printf ("%d", GST_VIDEO_INFO_WIDTH (&info));
+ hstr = g_strdup_printf ("%d", GST_VIDEO_INFO_HEIGHT (&info));
gst_basertppayload_set_options (payload, "video", TRUE, "RAW", 90000);
- if (interlaced) {
+ if (info.flags & GST_VIDEO_FLAG_INTERLACED) {
res = gst_basertppayload_set_outcaps (payload, "sampling", G_TYPE_STRING,
samplingstr, "depth", G_TYPE_STRING, depthstr, "width", G_TYPE_STRING,
wstr, "height", G_TYPE_STRING, hstr, "colorimetry", G_TYPE_STRING,
return res;
/* ERRORS */
-unknown_mask:
+invalid_caps:
{
- GST_ERROR_OBJECT (payload, "unknown red mask specified");
+ GST_ERROR_OBJECT (payload, "could not parse caps");
return FALSE;
}
unknown_format:
GST_ERROR_OBJECT (payload, "unknown caps format");
return FALSE;
}
-unknown_fourcc:
- {
- GST_ERROR_OBJECT (payload, "invalid or missing fourcc");
- return FALSE;
- }
-missing_dimension:
- {
- GST_ERROR_OBJECT (payload, "missing width or height property");
- return FALSE;
- }
}
static GstFlowReturn
GstRtpVRawPay *rtpvrawpay;
GstFlowReturn ret = GST_FLOW_OK;
guint line, offset;
- guint8 *data, *yp, *up, *vp;
+ guint8 *yp, *up, *vp;
guint ystride, uvstride;
- guint size, pgroup;
+ guint pgroup;
guint mtu;
guint width, height;
gint field;
+ GstVideoFrame frame;
+ gint interlaced;
+ GstRTPBuffer rtp;
rtpvrawpay = GST_RTP_VRAW_PAY (payload);
- data = GST_BUFFER_DATA (buffer);
- size = GST_BUFFER_SIZE (buffer);
+ gst_video_frame_map (&frame, &rtpvrawpay->vinfo, buffer, GST_MAP_READ);
- GST_LOG_OBJECT (rtpvrawpay, "new frame of %u bytes", size);
+ GST_LOG_OBJECT (rtpvrawpay, "new frame of %u bytes",
+ gst_buffer_get_size (buffer));
/* get pointer and strides of the planes */
- yp = data + rtpvrawpay->yp;
- up = data + rtpvrawpay->up;
- vp = data + rtpvrawpay->vp;
+ yp = GST_VIDEO_FRAME_COMP_DATA (&frame, 0);
+ up = GST_VIDEO_FRAME_COMP_DATA (&frame, 1);
+ vp = GST_VIDEO_FRAME_COMP_DATA (&frame, 2);
- ystride = rtpvrawpay->ystride;
- uvstride = rtpvrawpay->uvstride;
+ ystride = GST_VIDEO_FRAME_COMP_STRIDE (&frame, 0);
+ uvstride = GST_VIDEO_FRAME_COMP_STRIDE (&frame, 1);
mtu = GST_BASE_RTP_PAYLOAD_MTU (payload);
/* amount of bytes for one pixel */
pgroup = rtpvrawpay->pgroup;
- width = rtpvrawpay->width;
- height = rtpvrawpay->height;
+ width = GST_VIDEO_INFO_WIDTH (&rtpvrawpay->vinfo);
+ height = GST_VIDEO_INFO_HEIGHT (&rtpvrawpay->vinfo);
- /* start with line 0, offset 0 */
+ interlaced = !!(rtpvrawpay->vinfo.flags & GST_VIDEO_FLAG_INTERLACED);
- for (field = 0; field < 1 + rtpvrawpay->interlaced; field++) {
+ /* start with line 0, offset 0 */
+ for (field = 0; field < 1 + interlaced; field++) {
line = field;
offset = 0;
GST_BUFFER_DURATION (buffer) / 2;
}
- outdata = gst_rtp_buffer_get_payload (out);
+ gst_rtp_buffer_map (out, GST_MAP_WRITE, &rtp);
+ outdata = gst_rtp_buffer_get_payload (&rtp);
GST_LOG_OBJECT (rtpvrawpay, "created buffer of size %u for MTU %u", left,
mtu);
"writing length %u, line %u, offset %u, cont %d", length, lin, offs,
cont);
- switch (rtpvrawpay->sampling) {
+ switch (GST_VIDEO_INFO_FORMAT (&rtpvrawpay->vinfo)) {
case GST_VIDEO_FORMAT_RGB:
case GST_VIDEO_FORMAT_RGBA:
case GST_VIDEO_FORMAT_BGR:
break;
}
default:
+ gst_rtp_buffer_unmap (&rtp);
gst_buffer_unref (out);
goto unknown_sampling;
}
if (line >= height) {
GST_LOG_OBJECT (rtpvrawpay, "field/frame complete, set marker");
- gst_rtp_buffer_set_marker (out, TRUE);
+ gst_rtp_buffer_set_marker (&rtp, TRUE);
}
+ gst_rtp_buffer_unmap (&rtp);
if (left > 0) {
GST_LOG_OBJECT (rtpvrawpay, "we have %u bytes left", left);
- GST_BUFFER_SIZE (out) -= left;
+ gst_buffer_resize (out, 0, gst_buffer_get_size (out) - left);
}
/* push buffer */
}
}
+
+ gst_video_frame_unmap (&frame);
gst_buffer_unref (buffer);
return ret;
{
GST_ELEMENT_ERROR (payload, STREAM, FORMAT,
(NULL), ("unimplemented sampling"));
+ gst_video_frame_unmap (&frame);
gst_buffer_unref (buffer);
return GST_FLOW_NOT_SUPPORTED;
}
{
GstBaseRTPPayload payload;
- gint width, height;
- GstVideoFormat sampling;
+ GstVideoInfo vinfo;
gint pgroup;
gint xinc, yinc;
- guint yp, up, vp;
- gint ystride;
- gint uvstride;
- gboolean interlaced;
+// guint yp, up, vp;
+// gint ystride;
+// gint uvstride;
+// gboolean interlaced;
gint depth;
};