2 * Copyright (C) <2008> Wim Taymans <wim.taymans@gmail.com>
4 * This library is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Library General Public
6 * License as published by the Free Software Foundation; either
7 * version 2 of the License, or (at your option) any later version.
9 * This library is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Library General Public License for more details.
14 * You should have received a copy of the GNU Library General Public
15 * License along with this library; if not, write to the
16 * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
17 * Boston, MA 02111-1307, USA.
24 #include <gst/rtp/gstrtpbuffer.h>
28 #include "gstrtpvrawdepay.h"
30 GST_DEBUG_CATEGORY_STATIC (rtpvrawdepay_debug);
31 #define GST_CAT_DEFAULT (rtpvrawdepay_debug)
33 static GstStaticPadTemplate gst_rtp_vraw_depay_src_template =
34 GST_STATIC_PAD_TEMPLATE ("src",
37 GST_STATIC_CAPS ("video/x-raw")
40 static GstStaticPadTemplate gst_rtp_vraw_depay_sink_template =
41 GST_STATIC_PAD_TEMPLATE ("sink",
44 GST_STATIC_CAPS ("application/x-rtp, "
45 "media = (string) \"video\", "
46 "payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ", "
47 "clock-rate = (int) 90000, " "encoding-name = (string) \"RAW\"")
50 #define gst_rtp_vraw_depay_parent_class parent_class
51 G_DEFINE_TYPE (GstRtpVRawDepay, gst_rtp_vraw_depay,
52 GST_TYPE_BASE_RTP_DEPAYLOAD);
54 static gboolean gst_rtp_vraw_depay_setcaps (GstBaseRTPDepayload * depayload,
56 static GstBuffer *gst_rtp_vraw_depay_process (GstBaseRTPDepayload * depayload,
59 static GstStateChangeReturn gst_rtp_vraw_depay_change_state (GstElement *
60 element, GstStateChange transition);
62 static gboolean gst_rtp_vraw_depay_handle_event (GstBaseRTPDepayload * filter,
66 gst_rtp_vraw_depay_class_init (GstRtpVRawDepayClass * klass)
68 GstElementClass *gstelement_class;
69 GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
71 gstelement_class = (GstElementClass *) klass;
72 gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
74 gstelement_class->change_state = gst_rtp_vraw_depay_change_state;
76 gstbasertpdepayload_class->set_caps = gst_rtp_vraw_depay_setcaps;
77 gstbasertpdepayload_class->process = gst_rtp_vraw_depay_process;
78 gstbasertpdepayload_class->handle_event = gst_rtp_vraw_depay_handle_event;
80 gst_element_class_add_pad_template (gstelement_class,
81 gst_static_pad_template_get (&gst_rtp_vraw_depay_src_template));
82 gst_element_class_add_pad_template (gstelement_class,
83 gst_static_pad_template_get (&gst_rtp_vraw_depay_sink_template));
85 gst_element_class_set_details_simple (gstelement_class,
86 "RTP Raw Video depayloader", "Codec/Depayloader/Network/RTP",
87 "Extracts raw video from RTP packets (RFC 4175)",
88 "Wim Taymans <wim.taymans@gmail.com>");
90 GST_DEBUG_CATEGORY_INIT (rtpvrawdepay_debug, "rtpvrawdepay", 0,
91 "raw video RTP Depayloader");
95 gst_rtp_vraw_depay_init (GstRtpVRawDepay * rtpvrawdepay)
97 /* needed because of GST_BOILERPLATE */
101 gst_rtp_vraw_depay_reset (GstRtpVRawDepay * rtpvrawdepay)
103 if (rtpvrawdepay->outbuf) {
104 gst_buffer_unref (rtpvrawdepay->outbuf);
105 rtpvrawdepay->outbuf = NULL;
107 rtpvrawdepay->timestamp = -1;
108 if (rtpvrawdepay->pool) {
109 gst_buffer_pool_set_active (rtpvrawdepay->pool, FALSE);
110 gst_object_unref (rtpvrawdepay->pool);
111 rtpvrawdepay->pool = NULL;
116 gst_rtp_vraw_depay_negotiate_pool (GstRtpVRawDepay * depay, GstCaps * caps,
120 GstBufferPool *pool = NULL;
121 guint size, min, max, prefix, alignment;
122 GstStructure *config;
124 /* find a pool for the negotiated caps now */
125 query = gst_query_new_allocation (caps, TRUE);
127 if (gst_pad_peer_query (GST_BASE_RTP_DEPAYLOAD_SRCPAD (depay), query)) {
128 GST_DEBUG_OBJECT (depay, "got downstream ALLOCATION hints");
129 /* we got configuration from our peer, parse them */
130 gst_query_parse_allocation_params (query, &size, &min, &max, &prefix,
133 GST_DEBUG_OBJECT (depay, "didn't get downstream ALLOCATION hints");
141 /* we did not get a pool, make one ourselves then */
142 pool = gst_buffer_pool_new ();
146 gst_object_unref (depay->pool);
149 config = gst_buffer_pool_get_config (pool);
150 gst_buffer_pool_config_set (config, caps, size, min, max, prefix, alignment);
151 /* just set the metadata, if the pool can support it we will transparently use
152 * it through the video info API. We could also see if the pool support this
153 * metadata and only activate it then. */
154 gst_buffer_pool_config_add_option (config, GST_BUFFER_POOL_OPTION_META_VIDEO);
156 gst_buffer_pool_set_config (pool, config);
158 gst_buffer_pool_set_active (pool, TRUE);
160 gst_query_unref (query);
166 gst_rtp_vraw_depay_setcaps (GstBaseRTPDepayload * depayload, GstCaps * caps)
168 GstStructure *structure;
169 GstRtpVRawDepay *rtpvrawdepay;
172 gint format, width, height, pgroup, xinc, yinc;
177 rtpvrawdepay = GST_RTP_VRAW_DEPAY (depayload);
179 structure = gst_caps_get_structure (caps, 0);
183 if (!gst_structure_get_int (structure, "clock-rate", &clock_rate))
184 clock_rate = 90000; /* default */
185 depayload->clock_rate = clock_rate;
187 if (!(str = gst_structure_get_string (structure, "width")))
191 if (!(str = gst_structure_get_string (structure, "height")))
195 /* optional interlace value but we don't handle interlaced
197 if (gst_structure_get_string (structure, "interlace"))
200 if (!(str = gst_structure_get_string (structure, "sampling")))
203 if (!strcmp (str, "RGB")) {
204 format = GST_VIDEO_FORMAT_RGB;
206 } else if (!strcmp (str, "RGBA")) {
207 format = GST_VIDEO_FORMAT_RGBA;
209 } else if (!strcmp (str, "BGR")) {
210 format = GST_VIDEO_FORMAT_BGR;
212 } else if (!strcmp (str, "BGRA")) {
213 format = GST_VIDEO_FORMAT_BGRA;
215 } else if (!strcmp (str, "YCbCr-4:4:4")) {
216 format = GST_VIDEO_FORMAT_AYUV;
218 } else if (!strcmp (str, "YCbCr-4:2:2")) {
219 format = GST_VIDEO_FORMAT_UYVY;
222 } else if (!strcmp (str, "YCbCr-4:2:0")) {
223 format = GST_VIDEO_FORMAT_I420;
226 } else if (!strcmp (str, "YCbCr-4:1:1")) {
227 format = GST_VIDEO_FORMAT_Y41B;
233 gst_video_info_init (&rtpvrawdepay->vinfo);
234 gst_video_info_set_format (&rtpvrawdepay->vinfo, format, width, height);
235 GST_VIDEO_INFO_FPS_N (&rtpvrawdepay->vinfo) = 0;
236 GST_VIDEO_INFO_FPS_D (&rtpvrawdepay->vinfo) = 1;
238 rtpvrawdepay->pgroup = pgroup;
239 rtpvrawdepay->xinc = xinc;
240 rtpvrawdepay->yinc = yinc;
242 srccaps = gst_video_info_to_caps (&rtpvrawdepay->vinfo);
243 res = gst_pad_set_caps (GST_BASE_RTP_DEPAYLOAD_SRCPAD (depayload), srccaps);
244 gst_caps_unref (srccaps);
246 GST_DEBUG_OBJECT (depayload, "width %d, height %d, format %d", width, height,
248 GST_DEBUG_OBJECT (depayload, "xinc %d, yinc %d, pgroup %d",
251 /* negotiate a bufferpool */
252 if ((ret = gst_rtp_vraw_depay_negotiate_pool (rtpvrawdepay, caps,
253 &rtpvrawdepay->vinfo)) != GST_FLOW_OK)
261 GST_ERROR_OBJECT (depayload, "no width specified");
266 GST_ERROR_OBJECT (depayload, "no height specified");
271 GST_ERROR_OBJECT (depayload, "interlaced formats not supported yet");
276 GST_ERROR_OBJECT (depayload, "no sampling specified");
281 GST_ERROR_OBJECT (depayload, "unknown sampling format '%s'", str);
286 GST_DEBUG_OBJECT (depayload, "no bufferpool");
292 gst_rtp_vraw_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
294 GstRtpVRawDepay *rtpvrawdepay;
295 guint8 *payload, *yp, *up, *vp, *headers;
297 guint cont, ystride, uvstride, pgroup, payload_len;
298 gint width, height, xinc, yinc;
302 rtpvrawdepay = GST_RTP_VRAW_DEPAY (depayload);
304 gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
306 timestamp = gst_rtp_buffer_get_timestamp (&rtp);
308 if (timestamp != rtpvrawdepay->timestamp || rtpvrawdepay->outbuf == NULL) {
312 GST_LOG_OBJECT (depayload, "new frame with timestamp %u", timestamp);
313 /* new timestamp, flush old buffer and create new output buffer */
314 if (rtpvrawdepay->outbuf) {
315 gst_base_rtp_depayload_push_ts (depayload, rtpvrawdepay->timestamp,
316 rtpvrawdepay->outbuf);
317 rtpvrawdepay->outbuf = NULL;
320 if (gst_pad_check_reconfigure (GST_BASE_RTP_DEPAYLOAD_SRCPAD (depayload))) {
324 gst_pad_get_current_caps (GST_BASE_RTP_DEPAYLOAD_SRCPAD (depayload));
325 gst_rtp_vraw_depay_negotiate_pool (rtpvrawdepay, caps,
326 &rtpvrawdepay->vinfo);
327 gst_caps_unref (caps);
330 ret = gst_buffer_pool_acquire_buffer (rtpvrawdepay->pool, &outbuf, NULL);
331 if (G_UNLIKELY (ret != GST_FLOW_OK))
334 /* clear timestamp from alloc... */
335 GST_BUFFER_TIMESTAMP (outbuf) = -1;
337 rtpvrawdepay->outbuf = outbuf;
338 rtpvrawdepay->timestamp = timestamp;
341 if (!gst_video_frame_map (&frame, &rtpvrawdepay->vinfo, rtpvrawdepay->outbuf,
345 /* get pointer and strides of the planes */
346 yp = GST_VIDEO_FRAME_COMP_DATA (&frame, 0);
347 up = GST_VIDEO_FRAME_COMP_DATA (&frame, 1);
348 vp = GST_VIDEO_FRAME_COMP_DATA (&frame, 2);
350 ystride = GST_VIDEO_FRAME_COMP_STRIDE (&frame, 0);
351 uvstride = GST_VIDEO_FRAME_COMP_STRIDE (&frame, 1);
353 pgroup = rtpvrawdepay->pgroup;
354 width = GST_VIDEO_INFO_WIDTH (&rtpvrawdepay->vinfo);
355 height = GST_VIDEO_INFO_HEIGHT (&rtpvrawdepay->vinfo);
356 xinc = rtpvrawdepay->xinc;
357 yinc = rtpvrawdepay->yinc;
359 payload = gst_rtp_buffer_get_payload (&rtp);
360 payload_len = gst_rtp_buffer_get_payload_len (&rtp);
365 /* skip extended seqnum */
369 /* remember header position */
372 /* find data start */
377 cont = payload[4] & 0x80;
384 guint length, line, offs, plen;
387 /* stop when we run out of data */
388 if (payload_len == 0)
391 /* read length and cont. This should work because we iterated the headers
393 length = (headers[0] << 8) | headers[1];
394 line = ((headers[2] & 0x7f) << 8) | headers[3];
395 offs = ((headers[4] & 0x7f) << 8) | headers[5];
396 cont = headers[4] & 0x80;
399 /* length must be a multiple of pgroup */
400 if (length % pgroup != 0)
403 if (length > payload_len)
404 length = payload_len;
407 if (line > (height - yinc)) {
408 GST_WARNING_OBJECT (depayload, "skipping line %d: out of range", line);
411 if (offs > (width - xinc)) {
412 GST_WARNING_OBJECT (depayload, "skipping offset %d: out of range", offs);
416 /* calculate the maximim amount of bytes we can use per line */
417 if (offs + ((length / pgroup) * xinc) > width) {
418 plen = ((width - offs) * pgroup) / xinc;
419 GST_WARNING_OBJECT (depayload, "clipping length %d, offset %d, plen %d",
424 GST_LOG_OBJECT (depayload,
425 "writing length %u/%u, line %u, offset %u, remaining %u", plen, length,
426 line, offs, payload_len);
428 switch (GST_VIDEO_INFO_FORMAT (&rtpvrawdepay->vinfo)) {
429 case GST_VIDEO_FORMAT_RGB:
430 case GST_VIDEO_FORMAT_RGBA:
431 case GST_VIDEO_FORMAT_BGR:
432 case GST_VIDEO_FORMAT_BGRA:
433 case GST_VIDEO_FORMAT_UYVY:
434 /* samples are packed just like gstreamer packs them */
436 datap = yp + (line * ystride) + (offs * pgroup);
438 memcpy (datap, payload, plen);
440 case GST_VIDEO_FORMAT_AYUV:
445 datap = yp + (line * ystride) + (offs * 4);
448 /* samples are packed in order Cb-Y-Cr for both interlaced and
449 * progressive frames */
450 for (i = 0; i < plen; i += pgroup) {
459 case GST_VIDEO_FORMAT_I420:
463 guint8 *yd1p, *yd2p, *udp, *vdp, *p;
465 yd1p = yp + (line * ystride) + (offs);
466 yd2p = yd1p + ystride;
467 uvoff = (line / yinc * uvstride) + (offs / xinc);
473 /* line 0/1: Y00-Y01-Y10-Y11-Cb00-Cr00 Y02-Y03-Y12-Y13-Cb01-Cr01 ... */
474 for (i = 0; i < plen; i += pgroup) {
485 case GST_VIDEO_FORMAT_Y41B:
489 guint8 *ydp, *udp, *vdp, *p;
491 ydp = yp + (line * ystride) + (offs);
492 uvoff = (line / yinc * uvstride) + (offs / xinc);
498 /* Samples are packed in order Cb0-Y0-Y1-Cr0-Y2-Y3 for both interlaced
499 * and progressive scan lines */
500 for (i = 0; i < plen; i += pgroup) {
512 goto unknown_sampling;
520 payload_len -= length;
523 gst_video_frame_unmap (&frame);
524 gst_rtp_buffer_unmap (&rtp);
526 if (gst_rtp_buffer_get_marker (&rtp)) {
527 GST_LOG_OBJECT (depayload, "marker, flushing frame");
528 if (rtpvrawdepay->outbuf) {
529 gst_base_rtp_depayload_push_ts (depayload, timestamp,
530 rtpvrawdepay->outbuf);
531 rtpvrawdepay->outbuf = NULL;
533 rtpvrawdepay->timestamp = -1;
540 GST_ELEMENT_ERROR (depayload, STREAM, FORMAT,
541 (NULL), ("unimplemented sampling"));
542 gst_video_frame_unmap (&frame);
543 gst_rtp_buffer_unmap (&rtp);
548 GST_WARNING_OBJECT (depayload, "failed to alloc output buffer");
549 gst_rtp_buffer_unmap (&rtp);
554 GST_ERROR_OBJECT (depayload, "could not map video frame");
555 gst_rtp_buffer_unmap (&rtp);
560 GST_WARNING_OBJECT (depayload, "length not multiple of pgroup");
561 gst_video_frame_unmap (&frame);
562 gst_rtp_buffer_unmap (&rtp);
567 GST_WARNING_OBJECT (depayload, "short packet");
568 gst_video_frame_unmap (&frame);
569 gst_rtp_buffer_unmap (&rtp);
575 gst_rtp_vraw_depay_handle_event (GstBaseRTPDepayload * filter, GstEvent * event)
578 GstRtpVRawDepay *rtpvrawdepay;
580 rtpvrawdepay = GST_RTP_VRAW_DEPAY (filter);
582 switch (GST_EVENT_TYPE (event)) {
583 case GST_EVENT_FLUSH_STOP:
584 gst_rtp_vraw_depay_reset (rtpvrawdepay);
591 GST_BASE_RTP_DEPAYLOAD_CLASS (parent_class)->handle_event (filter, event);
596 static GstStateChangeReturn
597 gst_rtp_vraw_depay_change_state (GstElement * element,
598 GstStateChange transition)
600 GstRtpVRawDepay *rtpvrawdepay;
601 GstStateChangeReturn ret;
603 rtpvrawdepay = GST_RTP_VRAW_DEPAY (element);
605 switch (transition) {
606 case GST_STATE_CHANGE_NULL_TO_READY:
608 case GST_STATE_CHANGE_READY_TO_PAUSED:
609 gst_rtp_vraw_depay_reset (rtpvrawdepay);
615 ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
617 switch (transition) {
618 case GST_STATE_CHANGE_PAUSED_TO_READY:
619 gst_rtp_vraw_depay_reset (rtpvrawdepay);
621 case GST_STATE_CHANGE_READY_TO_NULL:
630 gst_rtp_vraw_depay_plugin_init (GstPlugin * plugin)
632 return gst_element_register (plugin, "rtpvrawdepay",
633 GST_RANK_SECONDARY, GST_TYPE_RTP_VRAW_DEPAY);