2 * Copyright (C) <2008> Wim Taymans <wim.taymans@gmail.com>
4 * This library is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Library General Public
6 * License as published by the Free Software Foundation; either
7 * version 2 of the License, or (at your option) any later version.
9 * This library is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Library General Public License for more details.
14 * You should have received a copy of the GNU Library General Public
15 * License along with this library; if not, write to the
16 * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
17 * Boston, MA 02111-1307, USA.
24 #include <gst/rtp/gstrtpbuffer.h>
28 #include "gstrtpvrawdepay.h"
30 GST_DEBUG_CATEGORY_STATIC (rtpvrawdepay_debug);
31 #define GST_CAT_DEFAULT (rtpvrawdepay_debug)
33 static GstStaticPadTemplate gst_rtp_vraw_depay_src_template =
34 GST_STATIC_PAD_TEMPLATE ("src",
37 GST_STATIC_CAPS ("video/x-raw")
40 static GstStaticPadTemplate gst_rtp_vraw_depay_sink_template =
41 GST_STATIC_PAD_TEMPLATE ("sink",
44 GST_STATIC_CAPS ("application/x-rtp, "
45 "media = (string) \"video\", "
46 "payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ", "
47 "clock-rate = (int) 90000, " "encoding-name = (string) \"RAW\"")
50 #define gst_rtp_vraw_depay_parent_class parent_class
51 G_DEFINE_TYPE (GstRtpVRawDepay, gst_rtp_vraw_depay,
52 GST_TYPE_RTP_BASE_DEPAYLOAD);
54 static gboolean gst_rtp_vraw_depay_setcaps (GstRTPBaseDepayload * depayload,
56 static GstBuffer *gst_rtp_vraw_depay_process (GstRTPBaseDepayload * depayload,
59 static GstStateChangeReturn gst_rtp_vraw_depay_change_state (GstElement *
60 element, GstStateChange transition);
62 static gboolean gst_rtp_vraw_depay_handle_event (GstRTPBaseDepayload * filter,
66 gst_rtp_vraw_depay_class_init (GstRtpVRawDepayClass * klass)
68 GstElementClass *gstelement_class;
69 GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
71 gstelement_class = (GstElementClass *) klass;
72 gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
74 gstelement_class->change_state = gst_rtp_vraw_depay_change_state;
76 gstrtpbasedepayload_class->set_caps = gst_rtp_vraw_depay_setcaps;
77 gstrtpbasedepayload_class->process = gst_rtp_vraw_depay_process;
78 gstrtpbasedepayload_class->handle_event = gst_rtp_vraw_depay_handle_event;
80 gst_element_class_add_pad_template (gstelement_class,
81 gst_static_pad_template_get (&gst_rtp_vraw_depay_src_template));
82 gst_element_class_add_pad_template (gstelement_class,
83 gst_static_pad_template_get (&gst_rtp_vraw_depay_sink_template));
85 gst_element_class_set_static_metadata (gstelement_class,
86 "RTP Raw Video depayloader", "Codec/Depayloader/Network/RTP",
87 "Extracts raw video from RTP packets (RFC 4175)",
88 "Wim Taymans <wim.taymans@gmail.com>");
90 GST_DEBUG_CATEGORY_INIT (rtpvrawdepay_debug, "rtpvrawdepay", 0,
91 "raw video RTP Depayloader");
95 gst_rtp_vraw_depay_init (GstRtpVRawDepay * rtpvrawdepay)
100 gst_rtp_vraw_depay_reset (GstRtpVRawDepay * rtpvrawdepay)
102 if (rtpvrawdepay->outbuf) {
103 gst_buffer_unref (rtpvrawdepay->outbuf);
104 rtpvrawdepay->outbuf = NULL;
106 rtpvrawdepay->timestamp = -1;
107 if (rtpvrawdepay->pool) {
108 gst_buffer_pool_set_active (rtpvrawdepay->pool, FALSE);
109 gst_object_unref (rtpvrawdepay->pool);
110 rtpvrawdepay->pool = NULL;
115 gst_rtp_vraw_depay_negotiate_pool (GstRtpVRawDepay * depay, GstCaps * caps,
119 GstBufferPool *pool = NULL;
120 guint size, min, max;
121 GstStructure *config;
123 /* find a pool for the negotiated caps now */
124 query = gst_query_new_allocation (caps, TRUE);
126 if (!gst_pad_peer_query (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depay), query)) {
127 /* not a problem, we use the defaults of query */
128 GST_DEBUG_OBJECT (depay, "could not get downstream ALLOCATION hints");
131 if (gst_query_get_n_allocation_pools (query) > 0) {
132 /* we got configuration from our peer, parse them */
133 gst_query_parse_nth_allocation_pool (query, 0, &pool, &size, &min, &max);
135 GST_DEBUG_OBJECT (depay, "didn't get downstream pool hints");
141 /* we did not get a pool, make one ourselves then */
142 pool = gst_video_buffer_pool_new ();
146 gst_object_unref (depay->pool);
149 config = gst_buffer_pool_get_config (pool);
150 gst_buffer_pool_config_set_params (config, caps, size, min, max);
151 if (gst_query_find_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL)) {
152 /* just set the metadata, if the pool can support it we will transparently use
153 * it through the video info API. We could also see if the pool support this
154 * metadata and only activate it then. */
155 gst_buffer_pool_config_add_option (config,
156 GST_BUFFER_POOL_OPTION_VIDEO_META);
159 gst_buffer_pool_set_config (pool, config);
161 gst_buffer_pool_set_active (pool, TRUE);
163 gst_query_unref (query);
169 gst_rtp_vraw_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
171 GstStructure *structure;
172 GstRtpVRawDepay *rtpvrawdepay;
175 gint format, width, height, pgroup, xinc, yinc;
180 rtpvrawdepay = GST_RTP_VRAW_DEPAY (depayload);
182 structure = gst_caps_get_structure (caps, 0);
186 if (!gst_structure_get_int (structure, "clock-rate", &clock_rate))
187 clock_rate = 90000; /* default */
188 depayload->clock_rate = clock_rate;
190 if (!(str = gst_structure_get_string (structure, "width")))
194 if (!(str = gst_structure_get_string (structure, "height")))
198 /* optional interlace value but we don't handle interlaced
200 if (gst_structure_get_string (structure, "interlace"))
203 if (!(str = gst_structure_get_string (structure, "sampling")))
206 if (!strcmp (str, "RGB")) {
207 format = GST_VIDEO_FORMAT_RGB;
209 } else if (!strcmp (str, "RGBA")) {
210 format = GST_VIDEO_FORMAT_RGBA;
212 } else if (!strcmp (str, "BGR")) {
213 format = GST_VIDEO_FORMAT_BGR;
215 } else if (!strcmp (str, "BGRA")) {
216 format = GST_VIDEO_FORMAT_BGRA;
218 } else if (!strcmp (str, "YCbCr-4:4:4")) {
219 format = GST_VIDEO_FORMAT_AYUV;
221 } else if (!strcmp (str, "YCbCr-4:2:2")) {
222 format = GST_VIDEO_FORMAT_UYVY;
225 } else if (!strcmp (str, "YCbCr-4:2:0")) {
226 format = GST_VIDEO_FORMAT_I420;
229 } else if (!strcmp (str, "YCbCr-4:1:1")) {
230 format = GST_VIDEO_FORMAT_Y41B;
236 gst_video_info_init (&rtpvrawdepay->vinfo);
237 gst_video_info_set_format (&rtpvrawdepay->vinfo, format, width, height);
238 GST_VIDEO_INFO_FPS_N (&rtpvrawdepay->vinfo) = 0;
239 GST_VIDEO_INFO_FPS_D (&rtpvrawdepay->vinfo) = 1;
241 rtpvrawdepay->pgroup = pgroup;
242 rtpvrawdepay->xinc = xinc;
243 rtpvrawdepay->yinc = yinc;
245 srccaps = gst_video_info_to_caps (&rtpvrawdepay->vinfo);
246 res = gst_pad_set_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload), srccaps);
247 gst_caps_unref (srccaps);
249 GST_DEBUG_OBJECT (depayload, "width %d, height %d, format %d", width, height,
251 GST_DEBUG_OBJECT (depayload, "xinc %d, yinc %d, pgroup %d",
254 /* negotiate a bufferpool */
255 if ((ret = gst_rtp_vraw_depay_negotiate_pool (rtpvrawdepay, srccaps,
256 &rtpvrawdepay->vinfo)) != GST_FLOW_OK)
264 GST_ERROR_OBJECT (depayload, "no width specified");
269 GST_ERROR_OBJECT (depayload, "no height specified");
274 GST_ERROR_OBJECT (depayload, "interlaced formats not supported yet");
279 GST_ERROR_OBJECT (depayload, "no sampling specified");
284 GST_ERROR_OBJECT (depayload, "unknown sampling format '%s'", str);
289 GST_DEBUG_OBJECT (depayload, "no bufferpool");
295 gst_rtp_vraw_depay_process (GstRTPBaseDepayload * depayload, GstBuffer * buf)
297 GstRtpVRawDepay *rtpvrawdepay;
298 guint8 *payload, *yp, *up, *vp, *headers;
300 guint cont, ystride, uvstride, pgroup, payload_len;
301 gint width, height, xinc, yinc;
302 GstRTPBuffer rtp = { NULL };
305 rtpvrawdepay = GST_RTP_VRAW_DEPAY (depayload);
307 gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
309 timestamp = gst_rtp_buffer_get_timestamp (&rtp);
311 if (timestamp != rtpvrawdepay->timestamp || rtpvrawdepay->outbuf == NULL) {
315 GST_LOG_OBJECT (depayload, "new frame with timestamp %u", timestamp);
316 /* new timestamp, flush old buffer and create new output buffer */
317 if (rtpvrawdepay->outbuf) {
318 gst_rtp_base_depayload_push (depayload, rtpvrawdepay->outbuf);
319 rtpvrawdepay->outbuf = NULL;
322 if (gst_pad_check_reconfigure (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload))) {
326 gst_pad_get_current_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload));
327 gst_rtp_vraw_depay_negotiate_pool (rtpvrawdepay, caps,
328 &rtpvrawdepay->vinfo);
329 gst_caps_unref (caps);
332 ret = gst_buffer_pool_acquire_buffer (rtpvrawdepay->pool, &outbuf, NULL);
333 if (G_UNLIKELY (ret != GST_FLOW_OK))
336 /* clear timestamp from alloc... */
337 GST_BUFFER_TIMESTAMP (outbuf) = -1;
339 rtpvrawdepay->outbuf = outbuf;
340 rtpvrawdepay->timestamp = timestamp;
343 if (!gst_video_frame_map (&frame, &rtpvrawdepay->vinfo, rtpvrawdepay->outbuf,
347 /* get pointer and strides of the planes */
348 yp = GST_VIDEO_FRAME_COMP_DATA (&frame, 0);
349 up = GST_VIDEO_FRAME_COMP_DATA (&frame, 1);
350 vp = GST_VIDEO_FRAME_COMP_DATA (&frame, 2);
352 ystride = GST_VIDEO_FRAME_COMP_STRIDE (&frame, 0);
353 uvstride = GST_VIDEO_FRAME_COMP_STRIDE (&frame, 1);
355 pgroup = rtpvrawdepay->pgroup;
356 width = GST_VIDEO_INFO_WIDTH (&rtpvrawdepay->vinfo);
357 height = GST_VIDEO_INFO_HEIGHT (&rtpvrawdepay->vinfo);
358 xinc = rtpvrawdepay->xinc;
359 yinc = rtpvrawdepay->yinc;
361 payload = gst_rtp_buffer_get_payload (&rtp);
362 payload_len = gst_rtp_buffer_get_payload_len (&rtp);
367 /* skip extended seqnum */
371 /* remember header position */
374 /* find data start */
379 cont = payload[4] & 0x80;
386 guint length, line, offs, plen;
389 /* stop when we run out of data */
390 if (payload_len == 0)
393 /* read length and cont. This should work because we iterated the headers
395 length = (headers[0] << 8) | headers[1];
396 line = ((headers[2] & 0x7f) << 8) | headers[3];
397 offs = ((headers[4] & 0x7f) << 8) | headers[5];
398 cont = headers[4] & 0x80;
401 /* length must be a multiple of pgroup */
402 if (length % pgroup != 0)
405 if (length > payload_len)
406 length = payload_len;
409 if (line > (height - yinc)) {
410 GST_WARNING_OBJECT (depayload, "skipping line %d: out of range", line);
413 if (offs > (width - xinc)) {
414 GST_WARNING_OBJECT (depayload, "skipping offset %d: out of range", offs);
418 /* calculate the maximim amount of bytes we can use per line */
419 if (offs + ((length / pgroup) * xinc) > width) {
420 plen = ((width - offs) * pgroup) / xinc;
421 GST_WARNING_OBJECT (depayload, "clipping length %d, offset %d, plen %d",
426 GST_LOG_OBJECT (depayload,
427 "writing length %u/%u, line %u, offset %u, remaining %u", plen, length,
428 line, offs, payload_len);
430 switch (GST_VIDEO_INFO_FORMAT (&rtpvrawdepay->vinfo)) {
431 case GST_VIDEO_FORMAT_RGB:
432 case GST_VIDEO_FORMAT_RGBA:
433 case GST_VIDEO_FORMAT_BGR:
434 case GST_VIDEO_FORMAT_BGRA:
435 case GST_VIDEO_FORMAT_UYVY:
436 /* samples are packed just like gstreamer packs them */
438 datap = yp + (line * ystride) + (offs * pgroup);
440 memcpy (datap, payload, plen);
442 case GST_VIDEO_FORMAT_AYUV:
447 datap = yp + (line * ystride) + (offs * 4);
450 /* samples are packed in order Cb-Y-Cr for both interlaced and
451 * progressive frames */
452 for (i = 0; i < plen; i += pgroup) {
461 case GST_VIDEO_FORMAT_I420:
465 guint8 *yd1p, *yd2p, *udp, *vdp, *p;
467 yd1p = yp + (line * ystride) + (offs);
468 yd2p = yd1p + ystride;
469 uvoff = (line / yinc * uvstride) + (offs / xinc);
475 /* line 0/1: Y00-Y01-Y10-Y11-Cb00-Cr00 Y02-Y03-Y12-Y13-Cb01-Cr01 ... */
476 for (i = 0; i < plen; i += pgroup) {
487 case GST_VIDEO_FORMAT_Y41B:
491 guint8 *ydp, *udp, *vdp, *p;
493 ydp = yp + (line * ystride) + (offs);
494 uvoff = (line / yinc * uvstride) + (offs / xinc);
500 /* Samples are packed in order Cb0-Y0-Y1-Cr0-Y2-Y3 for both interlaced
501 * and progressive scan lines */
502 for (i = 0; i < plen; i += pgroup) {
514 goto unknown_sampling;
522 payload_len -= length;
525 gst_video_frame_unmap (&frame);
526 gst_rtp_buffer_unmap (&rtp);
528 if (gst_rtp_buffer_get_marker (&rtp)) {
529 GST_LOG_OBJECT (depayload, "marker, flushing frame");
530 if (rtpvrawdepay->outbuf) {
531 gst_rtp_base_depayload_push (depayload, rtpvrawdepay->outbuf);
532 rtpvrawdepay->outbuf = NULL;
534 rtpvrawdepay->timestamp = -1;
541 GST_ELEMENT_ERROR (depayload, STREAM, FORMAT,
542 (NULL), ("unimplemented sampling"));
543 gst_video_frame_unmap (&frame);
544 gst_rtp_buffer_unmap (&rtp);
549 GST_WARNING_OBJECT (depayload, "failed to alloc output buffer");
550 gst_rtp_buffer_unmap (&rtp);
555 GST_ERROR_OBJECT (depayload, "could not map video frame");
556 gst_rtp_buffer_unmap (&rtp);
561 GST_WARNING_OBJECT (depayload, "length not multiple of pgroup");
562 gst_video_frame_unmap (&frame);
563 gst_rtp_buffer_unmap (&rtp);
568 GST_WARNING_OBJECT (depayload, "short packet");
569 gst_video_frame_unmap (&frame);
570 gst_rtp_buffer_unmap (&rtp);
576 gst_rtp_vraw_depay_handle_event (GstRTPBaseDepayload * filter, GstEvent * event)
579 GstRtpVRawDepay *rtpvrawdepay;
581 rtpvrawdepay = GST_RTP_VRAW_DEPAY (filter);
583 switch (GST_EVENT_TYPE (event)) {
584 case GST_EVENT_FLUSH_STOP:
585 gst_rtp_vraw_depay_reset (rtpvrawdepay);
592 GST_RTP_BASE_DEPAYLOAD_CLASS (parent_class)->handle_event (filter, event);
597 static GstStateChangeReturn
598 gst_rtp_vraw_depay_change_state (GstElement * element,
599 GstStateChange transition)
601 GstRtpVRawDepay *rtpvrawdepay;
602 GstStateChangeReturn ret;
604 rtpvrawdepay = GST_RTP_VRAW_DEPAY (element);
606 switch (transition) {
607 case GST_STATE_CHANGE_NULL_TO_READY:
609 case GST_STATE_CHANGE_READY_TO_PAUSED:
610 gst_rtp_vraw_depay_reset (rtpvrawdepay);
616 ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
618 switch (transition) {
619 case GST_STATE_CHANGE_PAUSED_TO_READY:
620 gst_rtp_vraw_depay_reset (rtpvrawdepay);
622 case GST_STATE_CHANGE_READY_TO_NULL:
631 gst_rtp_vraw_depay_plugin_init (GstPlugin * plugin)
633 return gst_element_register (plugin, "rtpvrawdepay",
634 GST_RANK_SECONDARY, GST_TYPE_RTP_VRAW_DEPAY);