2 * Copyright (C) <2008> Wim Taymans <wim.taymans@gmail.com>
4 * This library is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Library General Public
6 * License as published by the Free Software Foundation; either
7 * version 2 of the License, or (at your option) any later version.
9 * This library is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Library General Public License for more details.
14 * You should have received a copy of the GNU Library General Public
15 * License along with this library; if not, write to the
16 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
17 * Boston, MA 02110-1301, USA.
24 #include <gst/rtp/gstrtpbuffer.h>
25 #include <gst/video/video.h>
29 #include "gstrtpelements.h"
30 #include "gstrtpvrawdepay.h"
31 #include "gstrtputils.h"
33 GST_DEBUG_CATEGORY_STATIC (rtpvrawdepay_debug);
34 #define GST_CAT_DEFAULT (rtpvrawdepay_debug)
36 static GstStaticPadTemplate gst_rtp_vraw_depay_src_template =
37 GST_STATIC_PAD_TEMPLATE ("src",
40 GST_STATIC_CAPS ("video/x-raw")
43 static GstStaticPadTemplate gst_rtp_vraw_depay_sink_template =
44 GST_STATIC_PAD_TEMPLATE ("sink",
47 GST_STATIC_CAPS ("application/x-rtp, "
48 "media = (string) \"video\", "
49 "clock-rate = (int) 90000, "
50 "encoding-name = (string) \"RAW\", "
51 "sampling = (string) { \"RGB\", \"RGBA\", \"BGR\", \"BGRA\", "
52 "\"YCbCr-4:4:4\", \"YCbCr-4:2:2\", \"YCbCr-4:2:0\", "
54 /* we cannot express these as strings
55 * "width = (string) [1 32767],"
56 * "height = (string) [1 32767],"
58 "depth = (string) { \"8\", \"10\", \"12\", \"16\" }")
61 #define gst_rtp_vraw_depay_parent_class parent_class
62 G_DEFINE_TYPE (GstRtpVRawDepay, gst_rtp_vraw_depay,
63 GST_TYPE_RTP_BASE_DEPAYLOAD);
64 GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpvrawdepay, "rtpvrawdepay",
65 GST_RANK_SECONDARY, GST_TYPE_RTP_VRAW_DEPAY, rtp_element_init (plugin));
67 static gboolean gst_rtp_vraw_depay_setcaps (GstRTPBaseDepayload * depayload,
69 static GstBuffer *gst_rtp_vraw_depay_process_packet (GstRTPBaseDepayload *
70 depay, GstRTPBuffer * rtp);
72 static GstStateChangeReturn gst_rtp_vraw_depay_change_state (GstElement *
73 element, GstStateChange transition);
75 static gboolean gst_rtp_vraw_depay_handle_event (GstRTPBaseDepayload * filter,
79 gst_rtp_vraw_depay_class_init (GstRtpVRawDepayClass * klass)
81 GstElementClass *gstelement_class;
82 GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
84 gstelement_class = (GstElementClass *) klass;
85 gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
87 gstelement_class->change_state = gst_rtp_vraw_depay_change_state;
89 gstrtpbasedepayload_class->set_caps = gst_rtp_vraw_depay_setcaps;
90 gstrtpbasedepayload_class->process_rtp_packet =
91 gst_rtp_vraw_depay_process_packet;
92 gstrtpbasedepayload_class->handle_event = gst_rtp_vraw_depay_handle_event;
94 gst_element_class_add_static_pad_template (gstelement_class,
95 &gst_rtp_vraw_depay_src_template);
96 gst_element_class_add_static_pad_template (gstelement_class,
97 &gst_rtp_vraw_depay_sink_template);
99 gst_element_class_set_static_metadata (gstelement_class,
100 "RTP Raw Video depayloader", "Codec/Depayloader/Network/RTP",
101 "Extracts raw video from RTP packets (RFC 4175)",
102 "Wim Taymans <wim.taymans@gmail.com>");
104 GST_DEBUG_CATEGORY_INIT (rtpvrawdepay_debug, "rtpvrawdepay", 0,
105 "raw video RTP Depayloader");
109 gst_rtp_vraw_depay_init (GstRtpVRawDepay * rtpvrawdepay)
114 gst_rtp_vraw_depay_reset (GstRtpVRawDepay * rtpvrawdepay, gboolean full)
116 if (rtpvrawdepay->outbuf) {
117 gst_video_frame_unmap (&rtpvrawdepay->frame);
118 gst_buffer_unref (rtpvrawdepay->outbuf);
119 rtpvrawdepay->outbuf = NULL;
121 rtpvrawdepay->timestamp = -1;
123 if (full && rtpvrawdepay->pool) {
124 gst_buffer_pool_set_active (rtpvrawdepay->pool, FALSE);
125 gst_object_unref (rtpvrawdepay->pool);
126 rtpvrawdepay->pool = NULL;
131 gst_rtp_vraw_depay_negotiate_pool (GstRtpVRawDepay * depay, GstCaps * caps,
135 GstBufferPool *pool = NULL;
136 guint size, min, max;
137 GstStructure *config;
139 /* find a pool for the negotiated caps now */
140 query = gst_query_new_allocation (caps, TRUE);
142 if (!gst_pad_peer_query (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depay), query)) {
143 /* not a problem, we use the defaults of query */
144 GST_DEBUG_OBJECT (depay, "could not get downstream ALLOCATION hints");
147 if (gst_query_get_n_allocation_pools (query) > 0) {
148 /* we got configuration from our peer, parse them */
149 gst_query_parse_nth_allocation_pool (query, 0, &pool, &size, &min, &max);
151 GST_DEBUG_OBJECT (depay, "didn't get downstream pool hints");
157 /* we did not get a pool, make one ourselves then */
158 pool = gst_video_buffer_pool_new ();
162 gst_object_unref (depay->pool);
165 config = gst_buffer_pool_get_config (pool);
166 gst_buffer_pool_config_set_params (config, caps, size, min, max);
167 if (gst_query_find_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL)) {
168 /* just set the metadata, if the pool can support it we will transparently use
169 * it through the video info API. We could also see if the pool support this
170 * metadata and only activate it then. */
171 gst_buffer_pool_config_add_option (config,
172 GST_BUFFER_POOL_OPTION_VIDEO_META);
175 gst_buffer_pool_set_config (pool, config);
177 gst_buffer_pool_set_active (pool, TRUE);
179 gst_query_unref (query);
185 gst_rtp_vraw_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
187 GstStructure *structure;
188 GstRtpVRawDepay *rtpvrawdepay;
191 gint format, width, height, depth, pgroup, xinc, yinc;
196 rtpvrawdepay = GST_RTP_VRAW_DEPAY (depayload);
198 structure = gst_caps_get_structure (caps, 0);
202 if (!gst_structure_get_int (structure, "clock-rate", &clock_rate))
203 clock_rate = 90000; /* default */
204 depayload->clock_rate = clock_rate;
206 if (!(str = gst_structure_get_string (structure, "width")))
210 if (!(str = gst_structure_get_string (structure, "height")))
214 if (!(str = gst_structure_get_string (structure, "depth")))
218 /* optional interlace value but we don't handle interlaced
220 if (gst_structure_get_string (structure, "interlace"))
223 if (!(str = gst_structure_get_string (structure, "sampling")))
226 if (!strcmp (str, "RGB")) {
227 format = GST_VIDEO_FORMAT_RGB;
229 } else if (!strcmp (str, "RGBA")) {
230 format = GST_VIDEO_FORMAT_RGBA;
232 } else if (!strcmp (str, "BGR")) {
233 format = GST_VIDEO_FORMAT_BGR;
235 } else if (!strcmp (str, "BGRA")) {
236 format = GST_VIDEO_FORMAT_BGRA;
238 } else if (!strcmp (str, "YCbCr-4:4:4")) {
239 format = GST_VIDEO_FORMAT_AYUV;
241 } else if (!strcmp (str, "YCbCr-4:2:2")) {
243 format = GST_VIDEO_FORMAT_UYVY;
245 } else if (depth == 10) {
246 format = GST_VIDEO_FORMAT_UYVP;
251 } else if (!strcmp (str, "YCbCr-4:2:0")) {
252 format = GST_VIDEO_FORMAT_I420;
255 } else if (!strcmp (str, "YCbCr-4:1:1")) {
256 format = GST_VIDEO_FORMAT_Y41B;
263 gst_video_info_init (&rtpvrawdepay->vinfo);
264 gst_video_info_set_format (&rtpvrawdepay->vinfo, format, width, height);
265 GST_VIDEO_INFO_FPS_N (&rtpvrawdepay->vinfo) = 0;
266 GST_VIDEO_INFO_FPS_D (&rtpvrawdepay->vinfo) = 1;
268 rtpvrawdepay->pgroup = pgroup;
269 rtpvrawdepay->xinc = xinc;
270 rtpvrawdepay->yinc = yinc;
272 srccaps = gst_video_info_to_caps (&rtpvrawdepay->vinfo);
273 res = gst_pad_set_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload), srccaps);
274 gst_caps_unref (srccaps);
276 GST_DEBUG_OBJECT (depayload, "width %d, height %d, format %d", width, height,
278 GST_DEBUG_OBJECT (depayload, "xinc %d, yinc %d, pgroup %d",
281 /* negotiate a bufferpool */
282 if ((ret = gst_rtp_vraw_depay_negotiate_pool (rtpvrawdepay, srccaps,
283 &rtpvrawdepay->vinfo)) != GST_FLOW_OK)
291 GST_ERROR_OBJECT (depayload, "no width specified");
296 GST_ERROR_OBJECT (depayload, "no height specified");
301 GST_ERROR_OBJECT (depayload, "no depth specified");
306 GST_ERROR_OBJECT (depayload, "interlaced formats not supported yet");
311 GST_ERROR_OBJECT (depayload, "no sampling specified");
316 GST_ERROR_OBJECT (depayload, "unknown sampling format '%s'", str);
321 GST_DEBUG_OBJECT (depayload, "no bufferpool");
327 gst_rtp_vraw_depay_process_packet (GstRTPBaseDepayload * depayload,
330 GstRtpVRawDepay *rtpvrawdepay;
331 guint8 *payload, *p0, *yp, *up, *vp, *headers;
333 guint cont, ystride, uvstride, pgroup, payload_len;
334 gint width, height, xinc, yinc;
335 GstVideoFrame *frame;
337 GstBuffer *outbuf = NULL;
339 rtpvrawdepay = GST_RTP_VRAW_DEPAY (depayload);
341 timestamp = gst_rtp_buffer_get_timestamp (rtp);
343 if (timestamp != rtpvrawdepay->timestamp || rtpvrawdepay->outbuf == NULL) {
344 GstBuffer *new_buffer;
347 GST_LOG_OBJECT (depayload, "new frame with timestamp %u", timestamp);
348 /* new timestamp, flush old buffer and create new output buffer */
349 if (rtpvrawdepay->outbuf) {
350 gst_video_frame_unmap (&rtpvrawdepay->frame);
351 gst_rtp_base_depayload_push (depayload, rtpvrawdepay->outbuf);
352 rtpvrawdepay->outbuf = NULL;
355 if (gst_pad_check_reconfigure (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload))) {
359 gst_pad_get_current_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload));
360 gst_rtp_vraw_depay_negotiate_pool (rtpvrawdepay, caps,
361 &rtpvrawdepay->vinfo);
362 gst_caps_unref (caps);
366 gst_buffer_pool_acquire_buffer (rtpvrawdepay->pool, &new_buffer, NULL);
368 if (G_UNLIKELY (ret != GST_FLOW_OK))
371 /* clear timestamp from alloc... */
372 GST_BUFFER_PTS (new_buffer) = -1;
374 if (!gst_video_frame_map (&rtpvrawdepay->frame, &rtpvrawdepay->vinfo,
375 new_buffer, GST_MAP_WRITE | GST_VIDEO_FRAME_MAP_FLAG_NO_REF)) {
376 gst_buffer_unref (new_buffer);
380 rtpvrawdepay->outbuf = new_buffer;
381 rtpvrawdepay->timestamp = timestamp;
384 frame = &rtpvrawdepay->frame;
386 g_assert (frame->buffer != NULL);
388 /* get pointer and strides of the planes */
389 p0 = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
390 yp = GST_VIDEO_FRAME_COMP_DATA (frame, 0);
391 up = GST_VIDEO_FRAME_COMP_DATA (frame, 1);
392 vp = GST_VIDEO_FRAME_COMP_DATA (frame, 2);
394 ystride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
395 uvstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 1);
397 pgroup = rtpvrawdepay->pgroup;
398 width = GST_VIDEO_INFO_WIDTH (&rtpvrawdepay->vinfo);
399 height = GST_VIDEO_INFO_HEIGHT (&rtpvrawdepay->vinfo);
400 xinc = rtpvrawdepay->xinc;
401 yinc = rtpvrawdepay->yinc;
403 payload = gst_rtp_buffer_get_payload (rtp);
404 payload_len = gst_rtp_buffer_get_payload_len (rtp);
409 /* skip extended seqnum */
413 /* remember header position */
416 gst_rtp_copy_video_meta (rtpvrawdepay, frame->buffer, rtp->buffer);
418 /* find data start */
423 cont = payload[4] & 0x80;
430 guint length, line, offs, plen;
433 /* stop when we run out of data */
434 if (payload_len == 0)
437 /* read length and cont. This should work because we iterated the headers
439 length = (headers[0] << 8) | headers[1];
440 line = ((headers[2] & 0x7f) << 8) | headers[3];
441 offs = ((headers[4] & 0x7f) << 8) | headers[5];
442 cont = headers[4] & 0x80;
445 /* length must be a multiple of pgroup */
446 if (length % pgroup != 0)
449 if (length > payload_len)
450 length = payload_len;
453 if (line > (height - yinc)) {
454 GST_WARNING_OBJECT (depayload, "skipping line %d: out of range", line);
457 if (offs > (width - xinc)) {
458 GST_WARNING_OBJECT (depayload, "skipping offset %d: out of range", offs);
462 /* calculate the maximum amount of bytes we can use per line */
463 if (offs + ((length / pgroup) * xinc) > width) {
464 plen = ((width - offs) * pgroup) / xinc;
465 GST_WARNING_OBJECT (depayload, "clipping length %d, offset %d, plen %d",
470 GST_LOG_OBJECT (depayload,
471 "writing length %u/%u, line %u, offset %u, remaining %u", plen, length,
472 line, offs, payload_len);
474 switch (GST_VIDEO_INFO_FORMAT (&rtpvrawdepay->vinfo)) {
475 case GST_VIDEO_FORMAT_RGB:
476 case GST_VIDEO_FORMAT_RGBA:
477 case GST_VIDEO_FORMAT_BGR:
478 case GST_VIDEO_FORMAT_BGRA:
479 case GST_VIDEO_FORMAT_UYVY:
480 case GST_VIDEO_FORMAT_UYVP:
481 /* samples are packed just like gstreamer packs them */
483 datap = p0 + (line * ystride) + (offs * pgroup);
485 memcpy (datap, payload, plen);
487 case GST_VIDEO_FORMAT_AYUV:
492 datap = p0 + (line * ystride) + (offs * 4);
495 /* samples are packed in order Cb-Y-Cr for both interlaced and
496 * progressive frames */
497 for (i = 0; i < plen; i += pgroup) {
506 case GST_VIDEO_FORMAT_I420:
510 guint8 *yd1p, *yd2p, *udp, *vdp, *p;
512 yd1p = yp + (line * ystride) + (offs);
513 yd2p = yd1p + ystride;
514 uvoff = (line / yinc * uvstride) + (offs / xinc);
520 /* line 0/1: Y00-Y01-Y10-Y11-Cb00-Cr00 Y02-Y03-Y12-Y13-Cb01-Cr01 ... */
521 for (i = 0; i < plen; i += pgroup) {
532 case GST_VIDEO_FORMAT_Y41B:
536 guint8 *ydp, *udp, *vdp, *p;
538 ydp = yp + (line * ystride) + (offs);
539 uvoff = (line / yinc * uvstride) + (offs / xinc);
545 /* Samples are packed in order Cb0-Y0-Y1-Cr0-Y2-Y3 for both interlaced
546 * and progressive scan lines */
547 for (i = 0; i < plen; i += pgroup) {
559 goto unknown_sampling;
567 payload_len -= length;
570 marker = gst_rtp_buffer_get_marker (rtp);
573 GST_LOG_OBJECT (depayload, "marker, flushing frame");
574 gst_video_frame_unmap (&rtpvrawdepay->frame);
575 outbuf = rtpvrawdepay->outbuf;
576 rtpvrawdepay->outbuf = NULL;
577 rtpvrawdepay->timestamp = -1;
584 GST_ELEMENT_ERROR (depayload, STREAM, FORMAT,
585 (NULL), ("unimplemented sampling"));
590 GST_WARNING_OBJECT (depayload, "failed to alloc output buffer");
595 GST_ERROR_OBJECT (depayload, "could not map video frame");
600 GST_WARNING_OBJECT (depayload, "length not multiple of pgroup");
605 GST_WARNING_OBJECT (depayload, "short packet");
611 gst_rtp_vraw_depay_handle_event (GstRTPBaseDepayload * filter, GstEvent * event)
614 GstRtpVRawDepay *rtpvrawdepay;
616 rtpvrawdepay = GST_RTP_VRAW_DEPAY (filter);
618 switch (GST_EVENT_TYPE (event)) {
619 case GST_EVENT_FLUSH_STOP:
620 gst_rtp_vraw_depay_reset (rtpvrawdepay, FALSE);
627 GST_RTP_BASE_DEPAYLOAD_CLASS (parent_class)->handle_event (filter, event);
632 static GstStateChangeReturn
633 gst_rtp_vraw_depay_change_state (GstElement * element,
634 GstStateChange transition)
636 GstRtpVRawDepay *rtpvrawdepay;
637 GstStateChangeReturn ret;
639 rtpvrawdepay = GST_RTP_VRAW_DEPAY (element);
641 switch (transition) {
642 case GST_STATE_CHANGE_NULL_TO_READY:
644 case GST_STATE_CHANGE_READY_TO_PAUSED:
645 gst_rtp_vraw_depay_reset (rtpvrawdepay, TRUE);
651 ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
653 switch (transition) {
654 case GST_STATE_CHANGE_PAUSED_TO_READY:
655 gst_rtp_vraw_depay_reset (rtpvrawdepay, TRUE);
657 case GST_STATE_CHANGE_READY_TO_NULL: