2 * Copyright (C) <2008> Wim Taymans <wim.taymans@gmail.com>
4 * This library is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Library General Public
6 * License as published by the Free Software Foundation; either
7 * version 2 of the License, or (at your option) any later version.
9 * This library is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Library General Public License for more details.
14 * You should have received a copy of the GNU Library General Public
15 * License along with this library; if not, write to the
16 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
17 * Boston, MA 02110-1301, USA.
26 #include <gst/rtp/gstrtpbuffer.h>
28 #include "gstrtpvrawpay.h"
30 GST_DEBUG_CATEGORY_STATIC (rtpvrawpay_debug);
31 #define GST_CAT_DEFAULT (rtpvrawpay_debug)
33 static GstStaticPadTemplate gst_rtp_vraw_pay_sink_template =
34 GST_STATIC_PAD_TEMPLATE ("sink",
37 GST_STATIC_CAPS ("video/x-raw, "
38 "format = (string) { RGB, RGBA, BGR, BGRA, AYUV, UYVY, I420, Y41B, UYVP }, "
39 "width = (int) [ 1, 32767 ], " "height = (int) [ 1, 32767 ]; ")
42 static GstStaticPadTemplate gst_rtp_vraw_pay_src_template =
43 GST_STATIC_PAD_TEMPLATE ("src",
46 GST_STATIC_CAPS ("application/x-rtp, "
47 "media = (string) \"video\", "
48 "payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ", "
49 "clock-rate = (int) 90000, "
50 "encoding-name = (string) \"RAW\","
51 "sampling = (string) { \"RGB\", \"RGBA\", \"BGR\", \"BGRA\", "
52 "\"YCbCr-4:4:4\", \"YCbCr-4:2:2\", \"YCbCr-4:2:0\", "
54 /* we cannot express these as strings
55 * "width = (string) [1 32767],"
56 * "height = (string) [1 32767],"
58 "depth = (string) { \"8\", \"10\", \"12\", \"16\" },"
59 "colorimetry = (string) { \"BT601-5\", \"BT709-2\", \"SMPTE240M\" }"
63 * chroma-position = (string)
69 static gboolean gst_rtp_vraw_pay_setcaps (GstRTPBasePayload * payload,
71 static GstFlowReturn gst_rtp_vraw_pay_handle_buffer (GstRTPBasePayload *
72 payload, GstBuffer * buffer);
74 G_DEFINE_TYPE (GstRtpVRawPay, gst_rtp_vraw_pay, GST_TYPE_RTP_BASE_PAYLOAD)
76 static void gst_rtp_vraw_pay_class_init (GstRtpVRawPayClass * klass)
78 GstRTPBasePayloadClass *gstrtpbasepayload_class;
79 GstElementClass *gstelement_class;
81 gstelement_class = (GstElementClass *) klass;
82 gstrtpbasepayload_class = (GstRTPBasePayloadClass *) klass;
84 gstrtpbasepayload_class->set_caps = gst_rtp_vraw_pay_setcaps;
85 gstrtpbasepayload_class->handle_buffer = gst_rtp_vraw_pay_handle_buffer;
87 gst_element_class_add_pad_template (gstelement_class,
88 gst_static_pad_template_get (&gst_rtp_vraw_pay_src_template));
89 gst_element_class_add_pad_template (gstelement_class,
90 gst_static_pad_template_get (&gst_rtp_vraw_pay_sink_template));
92 gst_element_class_set_static_metadata (gstelement_class,
93 "RTP Raw Video payloader", "Codec/Payloader/Network/RTP",
94 "Payload raw video as RTP packets (RFC 4175)",
95 "Wim Taymans <wim.taymans@gmail.com>");
97 GST_DEBUG_CATEGORY_INIT (rtpvrawpay_debug, "rtpvrawpay", 0,
98 "Raw video RTP Payloader");
102 gst_rtp_vraw_pay_init (GstRtpVRawPay * rtpvrawpay)
107 gst_rtp_vraw_pay_setcaps (GstRTPBasePayload * payload, GstCaps * caps)
109 GstRtpVRawPay *rtpvrawpay;
111 gint pgroup, xinc, yinc;
112 const gchar *depthstr, *samplingstr, *colorimetrystr;
117 rtpvrawpay = GST_RTP_VRAW_PAY (payload);
119 if (!gst_video_info_from_caps (&info, caps))
122 rtpvrawpay->vinfo = info;
124 if (gst_video_colorimetry_matches (&info.colorimetry,
125 GST_VIDEO_COLORIMETRY_BT601)) {
126 colorimetrystr = "BT601-5";
127 } else if (gst_video_colorimetry_matches (&info.colorimetry,
128 GST_VIDEO_COLORIMETRY_BT709)) {
129 colorimetrystr = "BT709-2";
130 } else if (gst_video_colorimetry_matches (&info.colorimetry,
131 GST_VIDEO_COLORIMETRY_SMPTE240M)) {
132 colorimetrystr = "SMPTE240M";
134 colorimetrystr = "SMPTE240M";
139 /* these values are the only thing we can do */
143 switch (GST_VIDEO_INFO_FORMAT (&info)) {
144 case GST_VIDEO_FORMAT_RGBA:
145 samplingstr = "RGBA";
148 case GST_VIDEO_FORMAT_BGRA:
149 samplingstr = "BGRA";
152 case GST_VIDEO_FORMAT_RGB:
156 case GST_VIDEO_FORMAT_BGR:
160 case GST_VIDEO_FORMAT_AYUV:
161 samplingstr = "YCbCr-4:4:4";
164 case GST_VIDEO_FORMAT_UYVY:
165 samplingstr = "YCbCr-4:2:2";
169 case GST_VIDEO_FORMAT_Y41B:
170 samplingstr = "YCbCr-4:1:1";
174 case GST_VIDEO_FORMAT_I420:
175 samplingstr = "YCbCr-4:2:0";
179 case GST_VIDEO_FORMAT_UYVP:
180 samplingstr = "YCbCr-4:2:2";
191 if (GST_VIDEO_INFO_IS_INTERLACED (&info)) {
195 rtpvrawpay->pgroup = pgroup;
196 rtpvrawpay->xinc = xinc;
197 rtpvrawpay->yinc = yinc;
198 rtpvrawpay->depth = depth;
200 GST_DEBUG_OBJECT (payload, "width %d, height %d, sampling %s",
201 GST_VIDEO_INFO_WIDTH (&info), GST_VIDEO_INFO_HEIGHT (&info), samplingstr);
202 GST_DEBUG_OBJECT (payload, "xinc %d, yinc %d, pgroup %d", xinc, yinc, pgroup);
204 wstr = g_strdup_printf ("%d", GST_VIDEO_INFO_WIDTH (&info));
205 hstr = g_strdup_printf ("%d", GST_VIDEO_INFO_HEIGHT (&info));
207 gst_rtp_base_payload_set_options (payload, "video", TRUE, "RAW", 90000);
208 if (GST_VIDEO_INFO_IS_INTERLACED (&info)) {
209 res = gst_rtp_base_payload_set_outcaps (payload, "sampling", G_TYPE_STRING,
210 samplingstr, "depth", G_TYPE_STRING, depthstr, "width", G_TYPE_STRING,
211 wstr, "height", G_TYPE_STRING, hstr, "colorimetry", G_TYPE_STRING,
212 colorimetrystr, "interlace", G_TYPE_STRING, "true", NULL);
214 res = gst_rtp_base_payload_set_outcaps (payload, "sampling", G_TYPE_STRING,
215 samplingstr, "depth", G_TYPE_STRING, depthstr, "width", G_TYPE_STRING,
216 wstr, "height", G_TYPE_STRING, hstr, "colorimetry", G_TYPE_STRING,
217 colorimetrystr, NULL);
227 GST_ERROR_OBJECT (payload, "could not parse caps");
232 GST_ERROR_OBJECT (payload, "unknown caps format");
238 gst_rtp_vraw_pay_handle_buffer (GstRTPBasePayload * payload, GstBuffer * buffer)
240 GstRtpVRawPay *rtpvrawpay;
241 GstFlowReturn ret = GST_FLOW_OK;
243 guint8 *yp, *up, *vp;
244 guint ystride, uvstride;
251 GstRTPBuffer rtp = { NULL, };
253 rtpvrawpay = GST_RTP_VRAW_PAY (payload);
255 gst_video_frame_map (&frame, &rtpvrawpay->vinfo, buffer, GST_MAP_READ);
257 GST_LOG_OBJECT (rtpvrawpay, "new frame of %" G_GSIZE_FORMAT " bytes",
258 gst_buffer_get_size (buffer));
260 /* get pointer and strides of the planes */
261 yp = GST_VIDEO_FRAME_COMP_DATA (&frame, 0);
262 up = GST_VIDEO_FRAME_COMP_DATA (&frame, 1);
263 vp = GST_VIDEO_FRAME_COMP_DATA (&frame, 2);
265 ystride = GST_VIDEO_FRAME_COMP_STRIDE (&frame, 0);
266 uvstride = GST_VIDEO_FRAME_COMP_STRIDE (&frame, 1);
268 mtu = GST_RTP_BASE_PAYLOAD_MTU (payload);
270 /* amount of bytes for one pixel */
271 pgroup = rtpvrawpay->pgroup;
272 width = GST_VIDEO_INFO_WIDTH (&rtpvrawpay->vinfo);
273 height = GST_VIDEO_INFO_HEIGHT (&rtpvrawpay->vinfo);
275 interlaced = GST_VIDEO_INFO_IS_INTERLACED (&rtpvrawpay->vinfo);
277 /* start with line 0, offset 0 */
278 for (field = 0; field < 1 + interlaced; field++) {
282 /* write all lines */
283 while (line < height) {
286 guint8 *outdata, *headers;
288 guint length, cont, pixels;
290 /* get the max allowed payload length size, we try to fill the complete MTU */
291 left = gst_rtp_buffer_calc_payload_len (mtu, 0, 0);
292 out = gst_rtp_buffer_new_allocate (left, 0, 0);
295 GST_BUFFER_TIMESTAMP (out) = GST_BUFFER_TIMESTAMP (buffer);
297 GST_BUFFER_TIMESTAMP (out) = GST_BUFFER_TIMESTAMP (buffer) +
298 GST_BUFFER_DURATION (buffer) / 2;
301 gst_rtp_buffer_map (out, GST_MAP_WRITE, &rtp);
302 outdata = gst_rtp_buffer_get_payload (&rtp);
304 GST_LOG_OBJECT (rtpvrawpay, "created buffer of size %u for MTU %u", left,
309 * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
310 * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
311 * | Extended Sequence Number | Length |
312 * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
313 * |F| Line No |C| Offset |
314 * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
315 * | Length |F| Line No |
316 * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
318 * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ .
320 * . Two (partial) lines of video data .
322 * +---------------------------------------------------------------+
325 /* need 2 bytes for the extended sequence number */
330 /* the headers start here */
333 /* while we can fit at least one header and one pixel */
334 while (left > (6 + pgroup)) {
335 /* we need a 6 bytes header */
338 /* get how may bytes we need for the remaining pixels */
339 pixels = width - offset;
340 length = (pixels * pgroup) / rtpvrawpay->xinc;
342 if (left >= length) {
343 /* pixels and header fit completely, we will write them and skip to the
347 /* line does not fit completely, see how many pixels fit */
348 pixels = (left / pgroup) * rtpvrawpay->xinc;
349 length = (pixels * pgroup) / rtpvrawpay->xinc;
352 GST_LOG_OBJECT (rtpvrawpay, "filling %u bytes in %u pixels", length,
357 *outdata++ = (length >> 8) & 0xff;
358 *outdata++ = length & 0xff;
361 *outdata++ = ((line >> 8) & 0x7f) | ((field << 7) & 0x80);
362 *outdata++ = line & 0xff;
365 /* go to next line we do this here to make the check below easier */
366 line += rtpvrawpay->yinc;
369 /* calculate continuation marker */
370 cont = (left > (6 + pgroup) && line < height) ? 0x80 : 0x00;
372 /* write offset and continuation marker */
373 *outdata++ = ((offset >> 8) & 0x7f) | cont;
374 *outdata++ = offset & 0xff;
379 GST_LOG_OBJECT (rtpvrawpay, "go to next line %u", line);
382 GST_LOG_OBJECT (rtpvrawpay, "next offset %u", offset);
388 GST_LOG_OBJECT (rtpvrawpay, "consumed %u bytes",
389 (guint) (outdata - headers));
391 /* second pass, read headers and write the data */
395 /* read length and cont */
396 length = (headers[0] << 8) | headers[1];
397 lin = ((headers[2] & 0x7f) << 8) | headers[3];
398 offs = ((headers[4] & 0x7f) << 8) | headers[5];
399 cont = headers[4] & 0x80;
400 pixels = length / pgroup;
403 GST_LOG_OBJECT (payload,
404 "writing length %u, line %u, offset %u, cont %d", length, lin, offs,
407 switch (GST_VIDEO_INFO_FORMAT (&rtpvrawpay->vinfo)) {
408 case GST_VIDEO_FORMAT_RGB:
409 case GST_VIDEO_FORMAT_RGBA:
410 case GST_VIDEO_FORMAT_BGR:
411 case GST_VIDEO_FORMAT_BGRA:
412 case GST_VIDEO_FORMAT_UYVY:
413 case GST_VIDEO_FORMAT_UYVP:
414 offs /= rtpvrawpay->xinc;
415 memcpy (outdata, yp + (lin * ystride) + (offs * pgroup), length);
418 case GST_VIDEO_FORMAT_AYUV:
423 datap = yp + (lin * ystride) + (offs * 4);
425 for (i = 0; i < pixels; i++) {
426 *outdata++ = datap[2];
427 *outdata++ = datap[1];
428 *outdata++ = datap[3];
433 case GST_VIDEO_FORMAT_I420:
437 guint8 *yd1p, *yd2p, *udp, *vdp;
439 yd1p = yp + (lin * ystride) + (offs);
440 yd2p = yd1p + ystride;
442 (lin / rtpvrawpay->yinc * uvstride) + (offs / rtpvrawpay->xinc);
446 for (i = 0; i < pixels; i++) {
447 *outdata++ = *yd1p++;
448 *outdata++ = *yd1p++;
449 *outdata++ = *yd2p++;
450 *outdata++ = *yd2p++;
456 case GST_VIDEO_FORMAT_Y41B:
460 guint8 *ydp, *udp, *vdp;
462 ydp = yp + (lin * ystride) + offs;
464 (lin / rtpvrawpay->yinc * uvstride) + (offs / rtpvrawpay->xinc);
468 for (i = 0; i < pixels; i++) {
479 gst_rtp_buffer_unmap (&rtp);
480 gst_buffer_unref (out);
481 goto unknown_sampling;
488 if (line >= height) {
489 GST_LOG_OBJECT (rtpvrawpay, "field/frame complete, set marker");
490 gst_rtp_buffer_set_marker (&rtp, TRUE);
492 gst_rtp_buffer_unmap (&rtp);
494 GST_LOG_OBJECT (rtpvrawpay, "we have %u bytes left", left);
495 gst_buffer_resize (out, 0, gst_buffer_get_size (out) - left);
499 ret = gst_rtp_base_payload_push (payload, out);
504 gst_video_frame_unmap (&frame);
505 gst_buffer_unref (buffer);
512 GST_ELEMENT_ERROR (payload, STREAM, FORMAT,
513 (NULL), ("unimplemented sampling"));
514 gst_video_frame_unmap (&frame);
515 gst_buffer_unref (buffer);
516 return GST_FLOW_NOT_SUPPORTED;
521 gst_rtp_vraw_pay_plugin_init (GstPlugin * plugin)
523 return gst_element_register (plugin, "rtpvrawpay",
524 GST_RANK_SECONDARY, GST_TYPE_RTP_VRAW_PAY);