2 * Copyright (C) <2008> Wim Taymans <wim.taymans@gmail.com>
4 * This library is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Library General Public
6 * License as published by the Free Software Foundation; either
7 * version 2 of the License, or (at your option) any later version.
9 * This library is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Library General Public License for more details.
14 * You should have received a copy of the GNU Library General Public
15 * License along with this library; if not, write to the
16 * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
17 * Boston, MA 02111-1307, USA.
24 #include <gst/rtp/gstrtpbuffer.h>
28 #include "gstrtpvrawdepay.h"
30 GST_DEBUG_CATEGORY_STATIC (rtpvrawdepay_debug);
31 #define GST_CAT_DEFAULT (rtpvrawdepay_debug)
33 static GstStaticPadTemplate gst_rtp_vraw_depay_src_template =
34 GST_STATIC_PAD_TEMPLATE ("src",
37 GST_STATIC_CAPS ("video/x-raw")
40 static GstStaticPadTemplate gst_rtp_vraw_depay_sink_template =
41 GST_STATIC_PAD_TEMPLATE ("sink",
44 GST_STATIC_CAPS ("application/x-rtp, "
45 "media = (string) \"video\", "
46 "payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ", "
47 "clock-rate = (int) 90000, " "encoding-name = (string) \"RAW\"")
50 #define gst_rtp_vraw_depay_parent_class parent_class
51 G_DEFINE_TYPE (GstRtpVRawDepay, gst_rtp_vraw_depay,
52 GST_TYPE_RTP_BASE_DEPAYLOAD);
54 static gboolean gst_rtp_vraw_depay_setcaps (GstRTPBaseDepayload * depayload,
56 static GstBuffer *gst_rtp_vraw_depay_process (GstRTPBaseDepayload * depayload,
59 static GstStateChangeReturn gst_rtp_vraw_depay_change_state (GstElement *
60 element, GstStateChange transition);
62 static gboolean gst_rtp_vraw_depay_handle_event (GstRTPBaseDepayload * filter,
66 gst_rtp_vraw_depay_class_init (GstRtpVRawDepayClass * klass)
68 GstElementClass *gstelement_class;
69 GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
71 gstelement_class = (GstElementClass *) klass;
72 gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
74 gstelement_class->change_state = gst_rtp_vraw_depay_change_state;
76 gstrtpbasedepayload_class->set_caps = gst_rtp_vraw_depay_setcaps;
77 gstrtpbasedepayload_class->process = gst_rtp_vraw_depay_process;
78 gstrtpbasedepayload_class->handle_event = gst_rtp_vraw_depay_handle_event;
80 gst_element_class_add_pad_template (gstelement_class,
81 gst_static_pad_template_get (&gst_rtp_vraw_depay_src_template));
82 gst_element_class_add_pad_template (gstelement_class,
83 gst_static_pad_template_get (&gst_rtp_vraw_depay_sink_template));
85 gst_element_class_set_details_simple (gstelement_class,
86 "RTP Raw Video depayloader", "Codec/Depayloader/Network/RTP",
87 "Extracts raw video from RTP packets (RFC 4175)",
88 "Wim Taymans <wim.taymans@gmail.com>");
90 GST_DEBUG_CATEGORY_INIT (rtpvrawdepay_debug, "rtpvrawdepay", 0,
91 "raw video RTP Depayloader");
95 gst_rtp_vraw_depay_init (GstRtpVRawDepay * rtpvrawdepay)
97 /* needed because of GST_BOILERPLATE */
101 gst_rtp_vraw_depay_reset (GstRtpVRawDepay * rtpvrawdepay)
103 if (rtpvrawdepay->outbuf) {
104 gst_buffer_unref (rtpvrawdepay->outbuf);
105 rtpvrawdepay->outbuf = NULL;
107 rtpvrawdepay->timestamp = -1;
108 if (rtpvrawdepay->pool) {
109 gst_buffer_pool_set_active (rtpvrawdepay->pool, FALSE);
110 gst_object_unref (rtpvrawdepay->pool);
111 rtpvrawdepay->pool = NULL;
116 gst_rtp_vraw_depay_negotiate_pool (GstRtpVRawDepay * depay, GstCaps * caps,
120 GstBufferPool *pool = NULL;
121 guint size, min, max;
122 GstStructure *config;
124 /* find a pool for the negotiated caps now */
125 query = gst_query_new_allocation (caps, TRUE);
127 if (!gst_pad_peer_query (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depay), query)) {
128 /* not a problem, we use the defaults of query */
129 GST_DEBUG_OBJECT (depay, "could not get downstream ALLOCATION hints");
132 if (gst_query_get_n_allocation_pools (query) > 0) {
133 /* we got configuration from our peer, parse them */
134 gst_query_parse_nth_allocation_pool (query, 0, &pool, &size, &min, &max);
136 GST_DEBUG_OBJECT (depay, "didn't get downstream pool hints");
142 /* we did not get a pool, make one ourselves then */
143 pool = gst_video_buffer_pool_new ();
147 gst_object_unref (depay->pool);
150 config = gst_buffer_pool_get_config (pool);
151 gst_buffer_pool_config_set_params (config, caps, size, min, max);
152 if (gst_query_has_allocation_meta (query, GST_VIDEO_META_API_TYPE)) {
153 /* just set the metadata, if the pool can support it we will transparently use
154 * it through the video info API. We could also see if the pool support this
155 * metadata and only activate it then. */
156 gst_buffer_pool_config_add_option (config,
157 GST_BUFFER_POOL_OPTION_VIDEO_META);
160 gst_buffer_pool_set_config (pool, config);
162 gst_buffer_pool_set_active (pool, TRUE);
164 gst_query_unref (query);
170 gst_rtp_vraw_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
172 GstStructure *structure;
173 GstRtpVRawDepay *rtpvrawdepay;
176 gint format, width, height, pgroup, xinc, yinc;
181 rtpvrawdepay = GST_RTP_VRAW_DEPAY (depayload);
183 structure = gst_caps_get_structure (caps, 0);
187 if (!gst_structure_get_int (structure, "clock-rate", &clock_rate))
188 clock_rate = 90000; /* default */
189 depayload->clock_rate = clock_rate;
191 if (!(str = gst_structure_get_string (structure, "width")))
195 if (!(str = gst_structure_get_string (structure, "height")))
199 /* optional interlace value but we don't handle interlaced
201 if (gst_structure_get_string (structure, "interlace"))
204 if (!(str = gst_structure_get_string (structure, "sampling")))
207 if (!strcmp (str, "RGB")) {
208 format = GST_VIDEO_FORMAT_RGB;
210 } else if (!strcmp (str, "RGBA")) {
211 format = GST_VIDEO_FORMAT_RGBA;
213 } else if (!strcmp (str, "BGR")) {
214 format = GST_VIDEO_FORMAT_BGR;
216 } else if (!strcmp (str, "BGRA")) {
217 format = GST_VIDEO_FORMAT_BGRA;
219 } else if (!strcmp (str, "YCbCr-4:4:4")) {
220 format = GST_VIDEO_FORMAT_AYUV;
222 } else if (!strcmp (str, "YCbCr-4:2:2")) {
223 format = GST_VIDEO_FORMAT_UYVY;
226 } else if (!strcmp (str, "YCbCr-4:2:0")) {
227 format = GST_VIDEO_FORMAT_I420;
230 } else if (!strcmp (str, "YCbCr-4:1:1")) {
231 format = GST_VIDEO_FORMAT_Y41B;
237 gst_video_info_init (&rtpvrawdepay->vinfo);
238 gst_video_info_set_format (&rtpvrawdepay->vinfo, format, width, height);
239 GST_VIDEO_INFO_FPS_N (&rtpvrawdepay->vinfo) = 0;
240 GST_VIDEO_INFO_FPS_D (&rtpvrawdepay->vinfo) = 1;
242 rtpvrawdepay->pgroup = pgroup;
243 rtpvrawdepay->xinc = xinc;
244 rtpvrawdepay->yinc = yinc;
246 srccaps = gst_video_info_to_caps (&rtpvrawdepay->vinfo);
247 res = gst_pad_set_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload), srccaps);
248 gst_caps_unref (srccaps);
250 GST_DEBUG_OBJECT (depayload, "width %d, height %d, format %d", width, height,
252 GST_DEBUG_OBJECT (depayload, "xinc %d, yinc %d, pgroup %d",
255 /* negotiate a bufferpool */
256 if ((ret = gst_rtp_vraw_depay_negotiate_pool (rtpvrawdepay, caps,
257 &rtpvrawdepay->vinfo)) != GST_FLOW_OK)
265 GST_ERROR_OBJECT (depayload, "no width specified");
270 GST_ERROR_OBJECT (depayload, "no height specified");
275 GST_ERROR_OBJECT (depayload, "interlaced formats not supported yet");
280 GST_ERROR_OBJECT (depayload, "no sampling specified");
285 GST_ERROR_OBJECT (depayload, "unknown sampling format '%s'", str);
290 GST_DEBUG_OBJECT (depayload, "no bufferpool");
296 gst_rtp_vraw_depay_process (GstRTPBaseDepayload * depayload, GstBuffer * buf)
298 GstRtpVRawDepay *rtpvrawdepay;
299 guint8 *payload, *yp, *up, *vp, *headers;
301 guint cont, ystride, uvstride, pgroup, payload_len;
302 gint width, height, xinc, yinc;
303 GstRTPBuffer rtp = { NULL };
306 rtpvrawdepay = GST_RTP_VRAW_DEPAY (depayload);
308 gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
310 timestamp = gst_rtp_buffer_get_timestamp (&rtp);
312 if (timestamp != rtpvrawdepay->timestamp || rtpvrawdepay->outbuf == NULL) {
316 GST_LOG_OBJECT (depayload, "new frame with timestamp %u", timestamp);
317 /* new timestamp, flush old buffer and create new output buffer */
318 if (rtpvrawdepay->outbuf) {
319 gst_rtp_base_depayload_push (depayload, rtpvrawdepay->outbuf);
320 rtpvrawdepay->outbuf = NULL;
323 if (gst_pad_check_reconfigure (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload))) {
327 gst_pad_get_current_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload));
328 gst_rtp_vraw_depay_negotiate_pool (rtpvrawdepay, caps,
329 &rtpvrawdepay->vinfo);
330 gst_caps_unref (caps);
333 ret = gst_buffer_pool_acquire_buffer (rtpvrawdepay->pool, &outbuf, NULL);
334 if (G_UNLIKELY (ret != GST_FLOW_OK))
337 /* clear timestamp from alloc... */
338 GST_BUFFER_TIMESTAMP (outbuf) = -1;
340 rtpvrawdepay->outbuf = outbuf;
341 rtpvrawdepay->timestamp = timestamp;
344 if (!gst_video_frame_map (&frame, &rtpvrawdepay->vinfo, rtpvrawdepay->outbuf,
348 /* get pointer and strides of the planes */
349 yp = GST_VIDEO_FRAME_COMP_DATA (&frame, 0);
350 up = GST_VIDEO_FRAME_COMP_DATA (&frame, 1);
351 vp = GST_VIDEO_FRAME_COMP_DATA (&frame, 2);
353 ystride = GST_VIDEO_FRAME_COMP_STRIDE (&frame, 0);
354 uvstride = GST_VIDEO_FRAME_COMP_STRIDE (&frame, 1);
356 pgroup = rtpvrawdepay->pgroup;
357 width = GST_VIDEO_INFO_WIDTH (&rtpvrawdepay->vinfo);
358 height = GST_VIDEO_INFO_HEIGHT (&rtpvrawdepay->vinfo);
359 xinc = rtpvrawdepay->xinc;
360 yinc = rtpvrawdepay->yinc;
362 payload = gst_rtp_buffer_get_payload (&rtp);
363 payload_len = gst_rtp_buffer_get_payload_len (&rtp);
368 /* skip extended seqnum */
372 /* remember header position */
375 /* find data start */
380 cont = payload[4] & 0x80;
387 guint length, line, offs, plen;
390 /* stop when we run out of data */
391 if (payload_len == 0)
394 /* read length and cont. This should work because we iterated the headers
396 length = (headers[0] << 8) | headers[1];
397 line = ((headers[2] & 0x7f) << 8) | headers[3];
398 offs = ((headers[4] & 0x7f) << 8) | headers[5];
399 cont = headers[4] & 0x80;
402 /* length must be a multiple of pgroup */
403 if (length % pgroup != 0)
406 if (length > payload_len)
407 length = payload_len;
410 if (line > (height - yinc)) {
411 GST_WARNING_OBJECT (depayload, "skipping line %d: out of range", line);
414 if (offs > (width - xinc)) {
415 GST_WARNING_OBJECT (depayload, "skipping offset %d: out of range", offs);
419 /* calculate the maximim amount of bytes we can use per line */
420 if (offs + ((length / pgroup) * xinc) > width) {
421 plen = ((width - offs) * pgroup) / xinc;
422 GST_WARNING_OBJECT (depayload, "clipping length %d, offset %d, plen %d",
427 GST_LOG_OBJECT (depayload,
428 "writing length %u/%u, line %u, offset %u, remaining %u", plen, length,
429 line, offs, payload_len);
431 switch (GST_VIDEO_INFO_FORMAT (&rtpvrawdepay->vinfo)) {
432 case GST_VIDEO_FORMAT_RGB:
433 case GST_VIDEO_FORMAT_RGBA:
434 case GST_VIDEO_FORMAT_BGR:
435 case GST_VIDEO_FORMAT_BGRA:
436 case GST_VIDEO_FORMAT_UYVY:
437 /* samples are packed just like gstreamer packs them */
439 datap = yp + (line * ystride) + (offs * pgroup);
441 memcpy (datap, payload, plen);
443 case GST_VIDEO_FORMAT_AYUV:
448 datap = yp + (line * ystride) + (offs * 4);
451 /* samples are packed in order Cb-Y-Cr for both interlaced and
452 * progressive frames */
453 for (i = 0; i < plen; i += pgroup) {
462 case GST_VIDEO_FORMAT_I420:
466 guint8 *yd1p, *yd2p, *udp, *vdp, *p;
468 yd1p = yp + (line * ystride) + (offs);
469 yd2p = yd1p + ystride;
470 uvoff = (line / yinc * uvstride) + (offs / xinc);
476 /* line 0/1: Y00-Y01-Y10-Y11-Cb00-Cr00 Y02-Y03-Y12-Y13-Cb01-Cr01 ... */
477 for (i = 0; i < plen; i += pgroup) {
488 case GST_VIDEO_FORMAT_Y41B:
492 guint8 *ydp, *udp, *vdp, *p;
494 ydp = yp + (line * ystride) + (offs);
495 uvoff = (line / yinc * uvstride) + (offs / xinc);
501 /* Samples are packed in order Cb0-Y0-Y1-Cr0-Y2-Y3 for both interlaced
502 * and progressive scan lines */
503 for (i = 0; i < plen; i += pgroup) {
515 goto unknown_sampling;
523 payload_len -= length;
526 gst_video_frame_unmap (&frame);
527 gst_rtp_buffer_unmap (&rtp);
529 if (gst_rtp_buffer_get_marker (&rtp)) {
530 GST_LOG_OBJECT (depayload, "marker, flushing frame");
531 if (rtpvrawdepay->outbuf) {
532 gst_rtp_base_depayload_push (depayload, rtpvrawdepay->outbuf);
533 rtpvrawdepay->outbuf = NULL;
535 rtpvrawdepay->timestamp = -1;
542 GST_ELEMENT_ERROR (depayload, STREAM, FORMAT,
543 (NULL), ("unimplemented sampling"));
544 gst_video_frame_unmap (&frame);
545 gst_rtp_buffer_unmap (&rtp);
550 GST_WARNING_OBJECT (depayload, "failed to alloc output buffer");
551 gst_rtp_buffer_unmap (&rtp);
556 GST_ERROR_OBJECT (depayload, "could not map video frame");
557 gst_rtp_buffer_unmap (&rtp);
562 GST_WARNING_OBJECT (depayload, "length not multiple of pgroup");
563 gst_video_frame_unmap (&frame);
564 gst_rtp_buffer_unmap (&rtp);
569 GST_WARNING_OBJECT (depayload, "short packet");
570 gst_video_frame_unmap (&frame);
571 gst_rtp_buffer_unmap (&rtp);
577 gst_rtp_vraw_depay_handle_event (GstRTPBaseDepayload * filter, GstEvent * event)
580 GstRtpVRawDepay *rtpvrawdepay;
582 rtpvrawdepay = GST_RTP_VRAW_DEPAY (filter);
584 switch (GST_EVENT_TYPE (event)) {
585 case GST_EVENT_FLUSH_STOP:
586 gst_rtp_vraw_depay_reset (rtpvrawdepay);
593 GST_RTP_BASE_DEPAYLOAD_CLASS (parent_class)->handle_event (filter, event);
598 static GstStateChangeReturn
599 gst_rtp_vraw_depay_change_state (GstElement * element,
600 GstStateChange transition)
602 GstRtpVRawDepay *rtpvrawdepay;
603 GstStateChangeReturn ret;
605 rtpvrawdepay = GST_RTP_VRAW_DEPAY (element);
607 switch (transition) {
608 case GST_STATE_CHANGE_NULL_TO_READY:
610 case GST_STATE_CHANGE_READY_TO_PAUSED:
611 gst_rtp_vraw_depay_reset (rtpvrawdepay);
617 ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
619 switch (transition) {
620 case GST_STATE_CHANGE_PAUSED_TO_READY:
621 gst_rtp_vraw_depay_reset (rtpvrawdepay);
623 case GST_STATE_CHANGE_READY_TO_NULL:
632 gst_rtp_vraw_depay_plugin_init (GstPlugin * plugin)
634 return gst_element_register (plugin, "rtpvrawdepay",
635 GST_RANK_SECONDARY, GST_TYPE_RTP_VRAW_DEPAY);